From 02892ef65427bc95827d9d422368b55841dfb974 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Thu, 4 Nov 2021 18:18:56 +0100 Subject: [PATCH 001/101] New module: Samblaster (#954) * add base code from samblaster * added test yml * fixing versions files, should this be the cause of online lint failures * removed tmp files that shouldn't be there * fixing output file name - 1 Co-authored-by: Harshil Patel * fixing output file name - 2 Co-authored-by: Harshil Patel * fixing output file name - 3 Co-authored-by: Harshil Patel * fixing output file name - 4 Co-authored-by: Harshil Patel * fixing output file name - 5 Co-authored-by: Harshil Patel * fixing output file name - 6 * fixed indent * fixed input name and updated test.yml file with new name Co-authored-by: Harshil Patel --- modules/samblaster/functions.nf | 78 +++++++++++++++++++++++++++++++ modules/samblaster/main.nf | 42 +++++++++++++++++ modules/samblaster/meta.yml | 53 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samblaster/main.nf | 13 ++++++ tests/modules/samblaster/test.yml | 7 +++ 6 files changed, 197 insertions(+) create mode 100644 modules/samblaster/functions.nf create mode 100644 modules/samblaster/main.nf create mode 100644 modules/samblaster/meta.yml create mode 100644 tests/modules/samblaster/main.nf create mode 100644 tests/modules/samblaster/test.yml diff --git a/modules/samblaster/functions.nf b/modules/samblaster/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/samblaster/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf new file mode 100644 index 00000000..4481d8cd --- /dev/null +++ b/modules/samblaster/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SAMBLASTER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samblaster=0.1.26 bioconda::samtools=1.14" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" + } else { + container "quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" + } + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if( "$bam" == "${prefix}.bam" ) error "Input and output names are the same, use the suffix option to disambiguate" + """ + samtools view -h $options.args2 $bam | \\ + samblaster $options.args | \\ + samtools view $options.args3 -Sb - >${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( samblaster -h 2>&1 | head -n 1 | sed 's/^samblaster: Version //' ) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/samblaster/meta.yml b/modules/samblaster/meta.yml new file mode 100644 index 00000000..4d51f4fe --- /dev/null +++ b/modules/samblaster/meta.yml @@ -0,0 +1,53 @@ +name: samblaster +description: | + This module combines samtools and samblaster in order to use + samblaster capability to filter or tag SAM files, with the advantage + of maintaining both input and output in BAM format. + Samblaster input must contain a sequence header: for this reason it has been piped + with the "samtools view -h" command. + Additional desired arguments for samtools can be passed using: + options.args2 for the input bam file + options.args3 for the output bam file +keywords: + - sort +tools: + - samblaster: + description: | + samblaster is a fast and flexible program for marking duplicates in read-id grouped paired-end SAM files. + It can also optionally output discordant read pairs and/or split read mappings to separate SAM files, + and/or unmapped/clipped reads to a separate FASTQ file. + By default, samblaster reads SAM input from stdin and writes SAM to stdout. + homepage: None + documentation: https://github.com/GregoryFaust/samblaster + tool_dev_url: https://github.com/GregoryFaust/samblaster + doi: "10.1093/bioinformatics/btu314" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Tagged or filtered BAM file + pattern: "*.bam" + +authors: + - "@lescai" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 008c98dc..41694c2e 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1019,6 +1019,10 @@ salmon/quant: - modules/salmon/quant/** - tests/modules/salmon/quant/** +samblaster: + - modules/samblaster/** + - tests/modules/samblaster/** + samtools/ampliconclip: - modules/samtools/ampliconclip/** - tests/modules/samtools/ampliconclip/** diff --git a/tests/modules/samblaster/main.nf b/tests/modules/samblaster/main.nf new file mode 100644 index 00000000..5983d130 --- /dev/null +++ b/tests/modules/samblaster/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMBLASTER } from '../../../modules/samblaster/main.nf' addParams( options: [args: "-M --addMateTags", suffix:'.processed'] ) + +workflow test_samblaster { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_bam'], checkIfExists: true) ] + + SAMBLASTER ( input ) +} diff --git a/tests/modules/samblaster/test.yml b/tests/modules/samblaster/test.yml new file mode 100644 index 00000000..d56d4330 --- /dev/null +++ b/tests/modules/samblaster/test.yml @@ -0,0 +1,7 @@ +- name: samblaster test_samblaster + command: nextflow run tests/modules/samblaster -entry test_samblaster -c tests/config/nextflow.config + tags: + - samblaster + files: + - path: output/samblaster/test.processed.bam + md5sum: 950f23d85f75be1cf872f45c0144bdf4 From e560fbbc3ed68e62bc810a50f552394056f81762 Mon Sep 17 00:00:00 2001 From: Lasse Folkersen Date: Fri, 5 Nov 2021 10:25:54 +0100 Subject: [PATCH 002/101] Imputeme (#882) * first commit with imputeme as a module. Extensive re-write of imputeme-code, resulting in release v1.0.7 that is runnable in the next-flow framework. Co-authored-by: EC2 Default User Co-authored-by: Harshil Patel Co-authored-by: Pontus Freyhult --- modules/gunzip/test.txt.gz | Bin 0 -> 47 bytes modules/imputeme/vcftoprs/functions.nf | 78 +++++++++++++++++++++++ modules/imputeme/vcftoprs/main.nf | 60 +++++++++++++++++ modules/imputeme/vcftoprs/meta.yml | 41 ++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 2 + tests/modules/imputeme/vcftoprs/main.nf | 15 +++++ tests/modules/imputeme/vcftoprs/test.yml | 8 +++ 8 files changed, 208 insertions(+) create mode 100644 modules/gunzip/test.txt.gz create mode 100644 modules/imputeme/vcftoprs/functions.nf create mode 100644 modules/imputeme/vcftoprs/main.nf create mode 100644 modules/imputeme/vcftoprs/meta.yml create mode 100644 tests/modules/imputeme/vcftoprs/main.nf create mode 100644 tests/modules/imputeme/vcftoprs/test.yml diff --git a/modules/gunzip/test.txt.gz b/modules/gunzip/test.txt.gz new file mode 100644 index 0000000000000000000000000000000000000000..381417cf643f1b5c547b57b251d71e6d5ce11e16 GIT binary patch literal 47 zcmb2|=HU3lo{`AFT#{N`qE}K;!r-m#=Xv^+o}cIW6JE^0nUR_|V;IhR&VMY%z`y_i DevlAG literal 0 HcmV?d00001 diff --git a/modules/imputeme/vcftoprs/functions.nf b/modules/imputeme/vcftoprs/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/imputeme/vcftoprs/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/imputeme/vcftoprs/main.nf b/modules/imputeme/vcftoprs/main.nf new file mode 100644 index 00000000..a3ce7e3c --- /dev/null +++ b/modules/imputeme/vcftoprs/main.nf @@ -0,0 +1,60 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName } from './functions' + + +params.options = [:] +options = initOptions(params.options) + +process IMPUTEME_VCFTOPRS { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "YOUR-TOOL-HERE" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://containers.biocontainers.pro/s3/SingImgsRepo/imputeme/vv1.0.7_cv1/imputeme_vv1.0.7_cv1.img" + } else { + container "biocontainers/imputeme:vv1.0.7_cv1" + } + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.json"), emit: json + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + #!/usr/bin/env Rscript + + #Set configuration - either from options.args or from defaults + source("/imputeme/code/impute-me/functions.R") + if(file.exists('$options.args')){ + set_conf("set_from_file",'$options.args') + }else{ + set_conf("set_from_file", "/imputeme/code/impute-me/template/nextflow_default_configuration.R") + } + + #main run + return_message <- prepare_individual_genome('$vcf',overrule_vcf_checks=T) + uniqueID <- sub(' .+\$','',sub('^.+this run is ','',return_message)) + convert_vcfs_to_simple_format(uniqueID=uniqueID) + crawl_for_snps_to_analyze(uniqueIDs=uniqueID) + run_export_script(uniqueIDs=uniqueID) + file.copy(paste0("./",uniqueID,"/",uniqueID,"_data.json"),"output.json") + + #version export. Have to hardcode process name and software name because + #won't run inside an R-block + version_file_path="versions.yml" + f <- file(version_file_path,"w") + writeLines("IMPUTEME_VCFTOPRS:", f) + writeLines(paste0(" imputeme: ", sub("^v","",get_conf("version"))),f) + close(f) + + """ + +} diff --git a/modules/imputeme/vcftoprs/meta.yml b/modules/imputeme/vcftoprs/meta.yml new file mode 100644 index 00000000..8ba5dfe1 --- /dev/null +++ b/modules/imputeme/vcftoprs/meta.yml @@ -0,0 +1,41 @@ +name: imputeme_vcftoprs +description: inputs a VCF-file with whole genome DNA sequencing. Outputs a JSON with polygenic risk scores. +keywords: + - PRS, VCF +tools: + - imputeme: + description: + homepage: www.impute.me + documentation: https://hub.docker.com/repository/docker/lassefolkersen/impute-me + tool_dev_url: https://github.com/lassefolkersen/impute-me + doi: "https://doi.org/10.3389/fgene.2020.00578" + licence: LGPL3 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ vcf:'test', single_end:false ] + - vcf: + type: file + description: vcf file + pattern: "*.{vcf}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: json containing Z-scores for all calculated PRS + pattern: "*.{json}" + +authors: + - "@lassefolkersen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 41694c2e..a8fa40df 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -609,6 +609,10 @@ homer/makeucscfile: - modules/homer/makeucscfile/** - tests/modules/homer/makeucscfile/** +imputeme/vcftoprs: + - modules/imputeme/vcftoprs/** + - tests/modules/imputeme/vcftoprs/** + idr: - modules/idr/** - tests/modules/idr/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 12252542..2d30880f 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -119,6 +119,8 @@ params { gnomad_r2_1_1_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz.tbi" mills_and_1000g_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" + syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz" + syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" } diff --git a/tests/modules/imputeme/vcftoprs/main.nf b/tests/modules/imputeme/vcftoprs/main.nf new file mode 100644 index 00000000..ff59ca5e --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { IMPUTEME_VCFTOPRS } from '../../../../modules/imputeme/vcftoprs/main.nf' addParams( options: [:] ) + +workflow test_imputeme_vcftoprs { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true) + ] + + IMPUTEME_VCFTOPRS ( input ) +} diff --git a/tests/modules/imputeme/vcftoprs/test.yml b/tests/modules/imputeme/vcftoprs/test.yml new file mode 100644 index 00000000..efb73769 --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/test.yml @@ -0,0 +1,8 @@ +- name: imputeme vcftoprs test_imputeme_vcftoprs + command: nextflow run tests/modules/imputeme/vcftoprs -entry test_imputeme_vcftoprs -c tests/config/nextflow.config + tags: + - imputeme + - imputeme/vcftoprs + files: + - path: output/imputeme/output.json + contains: [ 'type_2_diabetes_32541925":{"GRS":[24.01]' ] From 02932973fa5a1ee4cc4897ad44914a6bb015edf1 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 6 Nov 2021 16:05:04 +0100 Subject: [PATCH 003/101] bwameth: Add touch command to fix problem with bwameth rejecting older files (#1037) See https://github.com/nf-core/methylseq/pull/217 for context where this fix was added into the DSL1 methylseq pipeline. --- modules/bwameth/align/main.nf | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 9b1d2b86..e15aba6d 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -32,6 +32,10 @@ process BWAMETH_ALIGN { """ INDEX=`find -L ${index} -name "*.bwameth.c2t" | sed 's/.bwameth.c2t//'` + # Modify the timestamps so that bwameth doesn't complain about building the index + # See https://github.com/nf-core/methylseq/pull/217 + touch -c -- * + bwameth.py \\ $options.args \\ $read_group \\ From 22aa168622d6fa8c8da5a845979ddd3e8c619005 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sat, 6 Nov 2021 09:34:39 -0600 Subject: [PATCH 004/101] add scoary module (#1034) Co-authored-by: Gregor Sturm --- modules/scoary/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/scoary/main.nf | 45 +++++++++++++++++++ modules/scoary/meta.yml | 51 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/scoary/main.nf | 15 +++++++ tests/modules/scoary/test.yml | 9 ++++ 6 files changed, 202 insertions(+) create mode 100644 modules/scoary/functions.nf create mode 100644 modules/scoary/main.nf create mode 100644 modules/scoary/meta.yml create mode 100644 tests/modules/scoary/main.nf create mode 100644 tests/modules/scoary/test.yml diff --git a/modules/scoary/functions.nf b/modules/scoary/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/scoary/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/scoary/main.nf b/modules/scoary/main.nf new file mode 100644 index 00000000..5720b4e5 --- /dev/null +++ b/modules/scoary/main.nf @@ -0,0 +1,45 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process SCOARY { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::scoary=1.6.16" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/scoary:1.6.16--py_2" + } else { + container "quay.io/biocontainers/scoary:1.6.16--py_2" + } + + input: + tuple val(meta), path(genes), path(traits) + path(tree) + + output: + tuple val(meta), path("*.csv"), emit: csv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def newick_tree = tree ? "-n ${tree}" : "" + """ + scoary \\ + $options.args \\ + --no-time \\ + --threads $task.cpus \\ + --traits $traits \\ + --genes $genes + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( scoary --version 2>&1 ) + END_VERSIONS + """ +} diff --git a/modules/scoary/meta.yml b/modules/scoary/meta.yml new file mode 100644 index 00000000..e8e8515e --- /dev/null +++ b/modules/scoary/meta.yml @@ -0,0 +1,51 @@ +name: scoary +description: Use pangenome outputs for GWAS +keywords: + - gwas + - pangenome + - prokaryote +tools: + - scoary: + description: Microbial pan-GWAS using the output from Roary + homepage: https://github.com/AdmiralenOla/Scoary + documentation: https://github.com/AdmiralenOla/Scoary + tool_dev_url: https://github.com/AdmiralenOla/Scoary + doi: "10.1186/s13059-016-1108-8" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - genes: + type: file + description: A presence/absence matrix of genes in the pan-genome + pattern: "*.csv" + - traits: + type: file + description: A CSV file containing trait information per-sample + pattern: "*.csv" + - tree: + type: file + description: A Newick formtted tree for phylogenetic analyses + pattern: "*.{dnd,nwk,treefile}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - csv: + type: file + description: Gene associations in a CSV file per trait + pattern: "*.csv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a8fa40df..6d9d0d12 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1083,6 +1083,10 @@ samtools/view: - modules/samtools/view/** - tests/modules/samtools/view/** +scoary: + - modules/scoary/** + - tests/modules/scoary/** + seacr/callpeak: - modules/seacr/callpeak/** - tests/modules/seacr/callpeak/** diff --git a/tests/modules/scoary/main.nf b/tests/modules/scoary/main.nf new file mode 100644 index 00000000..ec3f6e9f --- /dev/null +++ b/tests/modules/scoary/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SCOARY } from '../../../modules/scoary/main.nf' addParams( options: [:] ) + +workflow test_scoary { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://github.com/AdmiralenOla/Scoary/raw/master/scoary/exampledata/Gene_presence_absence.csv", checkIfExists: true), + file("https://github.com/AdmiralenOla/Scoary/raw/master/scoary/exampledata/Tetracycline_resistance.csv", checkIfExists: true) ] + + tree = [] + SCOARY ( input, tree) +} diff --git a/tests/modules/scoary/test.yml b/tests/modules/scoary/test.yml new file mode 100644 index 00000000..c5269293 --- /dev/null +++ b/tests/modules/scoary/test.yml @@ -0,0 +1,9 @@ +- name: scoary test_scoary + command: nextflow run tests/modules/scoary -entry test_scoary -c tests/config/nextflow.config + tags: + - scoary + files: + - path: output/scoary/Bogus_trait.results.csv + md5sum: 9550c692bbe6ff0ac844357bfabb809b + - path: output/scoary/Tetracycline_resistance.results.csv + md5sum: a87740818ab4de69a758fc75d7b879dd From 729d9ae450e166938435cd9da9d95bbe6ad9062c Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sat, 6 Nov 2021 09:44:26 -0600 Subject: [PATCH 005/101] add meningotype module (#1022) Co-authored-by: Gregor Sturm --- modules/meningotype/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/meningotype/main.nf | 41 ++++++++++++++++ modules/meningotype/meta.yml | 43 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/meningotype/main.nf | 13 +++++ tests/modules/meningotype/test.yml | 7 +++ 6 files changed, 186 insertions(+) create mode 100644 modules/meningotype/functions.nf create mode 100644 modules/meningotype/main.nf create mode 100644 modules/meningotype/meta.yml create mode 100644 tests/modules/meningotype/main.nf create mode 100644 tests/modules/meningotype/test.yml diff --git a/modules/meningotype/functions.nf b/modules/meningotype/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/meningotype/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/meningotype/main.nf b/modules/meningotype/main.nf new file mode 100644 index 00000000..4e779e8c --- /dev/null +++ b/modules/meningotype/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MENINGOTYPE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::meningotype=0.8.5" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/meningotype:0.8.5--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/meningotype:0.8.5--pyhdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + meningotype \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(meningotype --version 2>&1) | sed 's/^.*meningotype v//' ) + END_VERSIONS + """ +} diff --git a/modules/meningotype/meta.yml b/modules/meningotype/meta.yml new file mode 100644 index 00000000..07c2ff5e --- /dev/null +++ b/modules/meningotype/meta.yml @@ -0,0 +1,43 @@ +name: meningotype +description: Serotyping of Neisseria meningitidis assemblies +keywords: + - fasta + - Neisseria meningitidis + - serotype +tools: + - meningotype: + description: In silico serotyping and finetyping (porA and fetA) of Neisseria meningitidis + homepage: https://github.com/MDU-PHL/meningotype + documentation: https://github.com/MDU-PHL/meningotype + tool_dev_url: https://github.com/MDU-PHL/meningotype + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6d9d0d12..37da142b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -766,6 +766,10 @@ megahit: - modules/megahit/** - tests/modules/megahit/** +meningotype: + - modules/meningotype/** + - tests/modules/meningotype/** + metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/meningotype/main.nf b/tests/modules/meningotype/main.nf new file mode 100644 index 00000000..d660ec72 --- /dev/null +++ b/tests/modules/meningotype/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MENINGOTYPE } from '../../../modules/meningotype/main.nf' addParams( options: [:] ) + +workflow test_meningotype { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + MENINGOTYPE ( input ) +} diff --git a/tests/modules/meningotype/test.yml b/tests/modules/meningotype/test.yml new file mode 100644 index 00000000..c61e78a6 --- /dev/null +++ b/tests/modules/meningotype/test.yml @@ -0,0 +1,7 @@ +- name: meningotype test_meningotype + command: nextflow run tests/modules/meningotype -entry test_meningotype -c tests/config/nextflow.config + tags: + - meningotype + files: + - path: output/meningotype/test.tsv + md5sum: 25651bccb3d1c64cefcb7946fda30a6c From 316aedaaa626819c1b6eff26bb4f76383b333453 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Sat, 6 Nov 2021 15:51:15 +0000 Subject: [PATCH 006/101] bug fixes: genomicsdbimport (#1035) * saving changes to checkout * saving to sort out other branch * removed yml tracking of files that cant be tracked due to directory name changing between runs * test data added, ready for pr * fix eol linting error * Update modules/gatk4/genomicsdbimport/main.nf Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> * merging with master * update push to show progress * tests now working untar able to pass data to genomicsdbimport * commit to checkout * tests updated, module reworked to simplify and emit updated gendb * Apply suggestions from code review Co-authored-by: Harshil Patel * update meta.yml Priority of input options changed, updated to reflect this * Update test.yml name prefix changed in main script, test.yml updated to reflect this * fix tests due to review changes * bug fixes, multicalling samples and gendb emissions now fixed * Update pytest_modules.yml * Update meta.yml Co-authored-by: GCJMackenzie Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- modules/gatk4/genomicsdbimport/main.nf | 10 +++++----- modules/gatk4/genomicsdbimport/meta.yml | 6 +++++- tests/modules/gatk4/genomicsdbimport/main.nf | 6 +++--- tests/modules/gatk4/genomicsdbimport/test.yml | 18 +++++++++--------- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index aa4fceb0..78c6b81f 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -25,18 +25,18 @@ process GATK4_GENOMICSDBIMPORT { val input_map output: - tuple val(meta), path("*_genomicsdb") , optional:true, emit: genomicsdb + tuple val(meta), path("${prefix}") , optional:true, emit: genomicsdb tuple val(meta), path("$updated_db") , optional:true, emit: updatedb tuple val(meta), path("*.interval_list"), optional:true, emit: intervallist path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" // settings for running default create gendb mode - def inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V')}" - def dir_command = "--genomicsdb-workspace-path ${prefix}" - def intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " + inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" + dir_command = "--genomicsdb-workspace-path ${prefix}" + intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " // settings changed for running get intervals list mode if run_intlist is true if (run_intlist) { diff --git a/modules/gatk4/genomicsdbimport/meta.yml b/modules/gatk4/genomicsdbimport/meta.yml index f7a32e7e..af626cb1 100644 --- a/modules/gatk4/genomicsdbimport/meta.yml +++ b/modules/gatk4/genomicsdbimport/meta.yml @@ -66,7 +66,11 @@ output: - genomicsdb: type: directory description: Directory containing the files that compose the genomicsdb workspace, this is only output for create mode, as update changes an existing db - pattern: "*_genomicsdb" + pattern: "*/$prefix" + - updatedb: + type: directory + description: Directory containing the files that compose the updated genomicsdb workspace, this is only output for update mode, and should be the same path as the input wspace. + pattern: "same/path/as/wspace" - intervallist: type: file description: File containing the intervals used to generate the genomicsdb, only created by get intervals mode. diff --git a/tests/modules/gatk4/genomicsdbimport/main.nf b/tests/modules/gatk4/genomicsdbimport/main.nf index ef67b04a..aff3973d 100644 --- a/tests/modules/gatk4/genomicsdbimport/main.nf +++ b/tests/modules/gatk4/genomicsdbimport/main.nf @@ -7,7 +7,7 @@ include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimp workflow test_gatk4_genomicsdbimport_create_genomicsdb { - input = [ [ id:'test_genomicsdb'], // meta map + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) , file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) , @@ -26,7 +26,7 @@ workflow test_gatk4_genomicsdbimport_get_intervalslist { UNTAR ( db ) - def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + def input = Channel.of([ [ id:'test'], // meta map [] , [] , [] , @@ -45,7 +45,7 @@ workflow test_gatk4_genomicsdbimport_update_genomicsdb { UNTAR ( db ) - def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + def input = Channel.of([ [ id:'test'], // meta map file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'] , checkIfExists: true) , file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz_tbi'] , checkIfExists: true) , [] , diff --git a/tests/modules/gatk4/genomicsdbimport/test.yml b/tests/modules/gatk4/genomicsdbimport/test.yml index 68f5ae7a..5fe2b49b 100644 --- a/tests/modules/gatk4/genomicsdbimport/test.yml +++ b/tests/modules/gatk4/genomicsdbimport/test.yml @@ -4,19 +4,19 @@ - gatk4/genomicsdbimport - gatk4 files: - - path: output/gatk4/test_genomicsdb/__tiledb_workspace.tdb + - path: output/gatk4/test/__tiledb_workspace.tdb md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/gatk4/test_genomicsdb/callset.json + - path: output/gatk4/test/callset.json md5sum: a7d07d1c86449bbb1091ff29368da07a - - path: output/gatk4/test_genomicsdb/chr22$1$40001/.__consolidation_lock + - path: output/gatk4/test/chr22$1$40001/.__consolidation_lock md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/gatk4/test_genomicsdb/chr22$1$40001/__array_schema.tdb - - path: output/gatk4/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json + - path: output/gatk4/test/chr22$1$40001/__array_schema.tdb + - path: output/gatk4/test/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json md5sum: 2502f79658bc000578ebcfddfc1194c0 - - path: output/gatk4/test_genomicsdb/vcfheader.vcf - contains: + - path: output/gatk4/test/vcfheader.vcf + contains: - "FORMAT= Date: Mon, 8 Nov 2021 12:43:15 -0700 Subject: [PATCH 007/101] add emmtyper module (#1028) Co-authored-by: Gregor Sturm --- modules/emmtyper/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/emmtyper/main.nf | 41 +++++++++++++++++ modules/emmtyper/meta.yml | 43 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/emmtyper/main.nf | 13 ++++++ tests/modules/emmtyper/test.yml | 7 +++ 6 files changed, 186 insertions(+) create mode 100644 modules/emmtyper/functions.nf create mode 100644 modules/emmtyper/main.nf create mode 100644 modules/emmtyper/meta.yml create mode 100644 tests/modules/emmtyper/main.nf create mode 100644 tests/modules/emmtyper/test.yml diff --git a/modules/emmtyper/functions.nf b/modules/emmtyper/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/emmtyper/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/emmtyper/main.nf b/modules/emmtyper/main.nf new file mode 100644 index 00000000..74624c1f --- /dev/null +++ b/modules/emmtyper/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process EMMTYPER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::emmtyper=0.2.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/emmtyper:0.2.0--py_0" + } else { + container "quay.io/biocontainers/emmtyper:0.2.0--py_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + emmtyper \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(emmtyper --version 2>&1) | sed 's/^.*emmtyper v//' ) + END_VERSIONS + """ +} diff --git a/modules/emmtyper/meta.yml b/modules/emmtyper/meta.yml new file mode 100644 index 00000000..019a8e4c --- /dev/null +++ b/modules/emmtyper/meta.yml @@ -0,0 +1,43 @@ +name: emmtyper +description: EMM typing of Streptococcus pyogenes assemblies +keywords: + - fasta + - Streptococcus pyogenes + - typing +tools: + - emmtyper: + description: Streptococcus pyogenes in silico EMM typer + homepage: https://github.com/MDU-PHL/emmtyper + documentation: https://github.com/MDU-PHL/emmtyper + tool_dev_url: https://github.com/MDU-PHL/emmtyper + doi: "" + licence: ['GNU General Public v3 (GPL v3)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 37da142b..146eba06 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -362,6 +362,10 @@ dshbio/splitgff3: - modules/dshbio/splitgff3/** - tests/modules/dshbio/splitgff3/** +emmtyper: + - modules/emmtyper/** + - tests/modules/emmtyper/** + ensemblvep: - modules/ensemblvep/** - tests/modules/ensemblvep/** diff --git a/tests/modules/emmtyper/main.nf b/tests/modules/emmtyper/main.nf new file mode 100644 index 00000000..9f2181a8 --- /dev/null +++ b/tests/modules/emmtyper/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { EMMTYPER } from '../../../modules/emmtyper/main.nf' addParams( options: [:] ) + +workflow test_emmtyper { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + EMMTYPER ( input ) +} diff --git a/tests/modules/emmtyper/test.yml b/tests/modules/emmtyper/test.yml new file mode 100644 index 00000000..da59e0f1 --- /dev/null +++ b/tests/modules/emmtyper/test.yml @@ -0,0 +1,7 @@ +- name: emmtyper test_emmtyper + command: nextflow run tests/modules/emmtyper -entry test_emmtyper -c tests/config/nextflow.config + tags: + - emmtyper + files: + - path: output/emmtyper/test.tsv + md5sum: c727ba859adec9ca8ff0e091ecf79c62 From c10f9eb817b8aa6c2f1ef43c08a18e54137580fc Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 8 Nov 2021 12:52:07 -0700 Subject: [PATCH 008/101] add lissero module (#1026) * add lissero module * Update test.yml Co-authored-by: Gregor Sturm --- modules/lissero/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/lissero/main.nf | 41 +++++++++++++++++ modules/lissero/meta.yml | 44 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/lissero/main.nf | 13 ++++++ tests/modules/lissero/test.yml | 7 +++ 6 files changed, 187 insertions(+) create mode 100644 modules/lissero/functions.nf create mode 100644 modules/lissero/main.nf create mode 100644 modules/lissero/meta.yml create mode 100644 tests/modules/lissero/main.nf create mode 100644 tests/modules/lissero/test.yml diff --git a/modules/lissero/functions.nf b/modules/lissero/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/lissero/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/lissero/main.nf b/modules/lissero/main.nf new file mode 100644 index 00000000..ff863aaa --- /dev/null +++ b/modules/lissero/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process LISSERO { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::lissero=0.4.9" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/lissero:0.4.9--py_0" + } else { + container "quay.io/biocontainers/lissero:0.4.9--py_0" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + lissero \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(lissero --version 2>&1) | sed 's/^.*LisSero //' ) + END_VERSIONS + """ +} diff --git a/modules/lissero/meta.yml b/modules/lissero/meta.yml new file mode 100644 index 00000000..d4fb38df --- /dev/null +++ b/modules/lissero/meta.yml @@ -0,0 +1,44 @@ +name: lissero +description: Serogrouping Listeria monocytogenes assemblies +keywords: + - fasta + - Listeria monocytogenes + - serogroup +tools: + - lissero: + description: In silico serotyping of Listeria monocytogenes + homepage: https://github.com/MDU-PHL/LisSero/blob/master/README.md + documentation: https://github.com/MDU-PHL/LisSero/blob/master/README.md + tool_dev_url: https://github.com/MDU-PHL/lissero + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" + diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 146eba06..6fd80c59 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -714,6 +714,10 @@ lima: - modules/lima/** - tests/modules/lima/** +lissero: + - modules/lissero/** + - tests/modules/lissero/** + lofreq/call: - modules/lofreq/call/** - tests/modules/lofreq/call/** diff --git a/tests/modules/lissero/main.nf b/tests/modules/lissero/main.nf new file mode 100644 index 00000000..e653bd76 --- /dev/null +++ b/tests/modules/lissero/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { LISSERO } from '../../../modules/lissero/main.nf' addParams( options: [:] ) + +workflow test_lissero { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://github.com/MDU-PHL/LisSero/raw/master/tests/test_seq/NC_002973.fna", checkIfExists: true) ] + + LISSERO ( input ) +} diff --git a/tests/modules/lissero/test.yml b/tests/modules/lissero/test.yml new file mode 100644 index 00000000..19e79623 --- /dev/null +++ b/tests/modules/lissero/test.yml @@ -0,0 +1,7 @@ +- name: lissero test_lissero + command: nextflow run tests/modules/lissero -entry test_lissero -c tests/config/nextflow.config + tags: + - lissero + files: + - path: output/lissero/test.tsv + contains: ['ID', 'SEROTYPE', 'FULL'] From e0ada7d219e6e3a71b47a8579d1a822a730ed38d Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Mon, 8 Nov 2021 21:08:26 +0100 Subject: [PATCH 009/101] New module: `metabat2` (#875) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- .../jgisummarizebamcontigdepths/functions.nf | 78 +++++++++++++++++++ .../jgisummarizebamcontigdepths/main.nf | 44 +++++++++++ .../jgisummarizebamcontigdepths/meta.yml | 50 ++++++++++++ modules/metabat2/metabat2/functions.nf | 78 +++++++++++++++++++ modules/metabat2/metabat2/main.nf | 53 +++++++++++++ modules/metabat2/metabat2/meta.yml | 56 +++++++++++++ tests/config/pytest_modules.yml | 9 +++ .../jgisummarizebamcontigdepths/main.nf | 14 ++++ .../jgisummarizebamcontigdepths/test.yml | 8 ++ tests/modules/metabat2/metabat2/main.nf | 35 +++++++++ tests/modules/metabat2/metabat2/test.yml | 23 ++++++ 11 files changed, 448 insertions(+) create mode 100644 modules/metabat2/jgisummarizebamcontigdepths/functions.nf create mode 100644 modules/metabat2/jgisummarizebamcontigdepths/main.nf create mode 100644 modules/metabat2/jgisummarizebamcontigdepths/meta.yml create mode 100644 modules/metabat2/metabat2/functions.nf create mode 100644 modules/metabat2/metabat2/main.nf create mode 100644 modules/metabat2/metabat2/meta.yml create mode 100644 tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf create mode 100644 tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml create mode 100644 tests/modules/metabat2/metabat2/main.nf create mode 100644 tests/modules/metabat2/metabat2/test.yml diff --git a/modules/metabat2/jgisummarizebamcontigdepths/functions.nf b/modules/metabat2/jgisummarizebamcontigdepths/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/modules/metabat2/jgisummarizebamcontigdepths/main.nf new file mode 100644 index 00000000..1860ae16 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -0,0 +1,44 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" + } else { + container "quay.io/biocontainers/metabat2:2.15--h986a166_1" + } + + input: + tuple val(meta), path(bam), path(bai) + + output: + tuple val(meta), path("*.txt.gz"), emit: depth + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + export OMP_NUM_THREADS=$task.cpus + + jgi_summarize_bam_contig_depths \\ + --outputDepth ${prefix}.txt \\ + $options.args \\ + $bam + + bgzip --threads $task.cpus ${prefix}.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/metabat2/jgisummarizebamcontigdepths/meta.yml b/modules/metabat2/jgisummarizebamcontigdepths/meta.yml new file mode 100644 index 00000000..351a4701 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/meta.yml @@ -0,0 +1,50 @@ +name: metabat2_jgisummarizebamcontigdepths +description: Depth computation per contig step of metabat2 +keywords: + - sort + - binning + - depth + - bam + - coverage + - de novo assembly +tools: + - metabat2: + description: Metagenome binning + homepage: https://bitbucket.org/berkeleylab/metabat/src/master/ + documentation: https://bitbucket.org/berkeleylab/metabat/src/master/ + tool_dev_url: https://bitbucket.org/berkeleylab/metabat/src/master/ + doi: "10.7717/peerj.7359" + licence: ['BSD-3-clause-LBNL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Sorted BAM file of reads aligned on the assembled contigs + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bam.bai" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - depth: + type: file + description: Text file listing the coverage per contig + pattern: ".txt.gz" + +authors: + - "@maxibor" diff --git a/modules/metabat2/metabat2/functions.nf b/modules/metabat2/metabat2/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/metabat2/metabat2/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf new file mode 100644 index 00000000..589e268c --- /dev/null +++ b/modules/metabat2/metabat2/main.nf @@ -0,0 +1,53 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process METABAT2_METABAT2 { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" + } else { + container "quay.io/biocontainers/metabat2:2.15--h986a166_1" + } + + input: + tuple val(meta), path(fasta), path(depth) + + output: + tuple val(meta), path("bins/*.fa.gz") , optional:true , emit: fasta + tuple val(meta), path("*.tsv.gz"), optional:true , emit: membership + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def decompress_depth = depth ? "gzip -d -f $depth" : "" + def depth_file = depth ? "-a ${depth.baseName}" : "" + """ + $decompress_depth + + metabat2 \\ + $options.args \\ + -i $fasta \\ + $depth_file \\ + -t $task.cpus \\ + --saveCls \\ + -o metabat2/${prefix} + + mv metabat2/${prefix} ${prefix}.tsv + mv metabat2 bins + bgzip --threads $task.cpus ${prefix}.tsv + bgzip --threads $task.cpus bins/*.fa + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/metabat2/metabat2/meta.yml b/modules/metabat2/metabat2/meta.yml new file mode 100644 index 00000000..a7f3a7ff --- /dev/null +++ b/modules/metabat2/metabat2/meta.yml @@ -0,0 +1,56 @@ +name: metabat2_metabat2 +keywords: + - sort + - binning + - depth + - bam + - coverage + - de novo assembly +tools: + - metabat2: + description: Metagenome binning + homepage: https://bitbucket.org/berkeleylab/metabat/src/master/ + documentation: https://bitbucket.org/berkeleylab/metabat/src/master/ + tool_dev_url: https://bitbucket.org/berkeleylab/metabat/src/master/ + doi: "10.7717/peerj.7359" + licence: ['BSD-3-clause-LBNL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Fasta file of the assembled contigs + pattern: "*.{fa,fas,fasta,fna,fa.gz,fas.gz,fasta.gz,fna.gz}" + - depth: + type: file + description: | + Optional text file listing the coverage per contig pre-generated + by metabat2_jgisummarizebamcontigdepths + pattern: "*.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fasta: + type: file + description: Bins created from assembled contigs in fasta file + pattern: "*.fa.gz" + - membership: + type: file + description: cluster memberships as a matrix format. + pattern: "*.tsv.gz" + + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6fd80c59..4edf5ec6 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -774,10 +774,19 @@ megahit: - modules/megahit/** - tests/modules/megahit/** +metabat2/jgisummarizebamcontigdepths: + - modules/metabat2/jgisummarizebamcontigdepths/** + - tests/modules/metabat2/jgisummarizebamcontigdepths/** + +metabat2/metabat2: + - modules/metabat2/metabat2/** + - tests/modules/metabat2/metabat2/** + meningotype: - modules/meningotype/** - tests/modules/meningotype/** + metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf new file mode 100644 index 00000000..2cfc2e2c --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) + +workflow test_metabat2_jgisummarizebamcontigdepths { + + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input ) +} diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml new file mode 100644 index 00000000..d318c6d4 --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml @@ -0,0 +1,8 @@ +- name: metabat2 jgisummarizebamcontigdepths test_metabat2_jgisummarizebamcontigdepths + command: nextflow run tests/modules/metabat2/jgisummarizebamcontigdepths -entry test_metabat2_jgisummarizebamcontigdepths -c tests/config/nextflow.config + tags: + - metabat2/jgisummarizebamcontigdepths + - metabat2 + files: + - path: output/metabat2/test.txt.gz + md5sum: 8f735aa408d6c90e5a0310e06ace7a9a diff --git a/tests/modules/metabat2/metabat2/main.nf b/tests/modules/metabat2/metabat2/main.nf new file mode 100644 index 00000000..3d01f194 --- /dev/null +++ b/tests/modules/metabat2/metabat2/main.nf @@ -0,0 +1,35 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) + +workflow test_metabat2_no_depth { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + Channel.fromPath(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it, []] } + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) +} + +workflow test_metabat2_depth { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) +} diff --git a/tests/modules/metabat2/metabat2/test.yml b/tests/modules/metabat2/metabat2/test.yml new file mode 100644 index 00000000..7b3435b7 --- /dev/null +++ b/tests/modules/metabat2/metabat2/test.yml @@ -0,0 +1,23 @@ +- name: metabat2 metabat2 test_metabat2_no_depth + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c tests/config/nextflow.config + tags: + - metabat2/metabat2 + - metabat2 + files: + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 0e9bce5b5a0033fd4411a21dec881170 + - path: output/metabat2/test.tsv.gz + md5sum: ea77e8c4426d2337419905b57f1ec335 + +- name: metabat2 metabat2 test_metabat2_depth + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c tests/config/nextflow.config + tags: + - metabat2/metabat2 + - metabat2 + files: + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 0e9bce5b5a0033fd4411a21dec881170 + - path: output/metabat2/test.tsv.gz + md5sum: ea77e8c4426d2337419905b57f1ec335 + - path: output/metabat2/test.txt.gz + md5sum: 8f735aa408d6c90e5a0310e06ace7a9a From 9573cb1bec52de3d50de3f277c28366f3c5795fe Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Tue, 9 Nov 2021 10:16:43 +0000 Subject: [PATCH 010/101] Add panel of normals subworkflow (#1044) * commiting changes to switch branch * commit to setup remote branch * first draft of the sompon workflow * keep branch in line with gendb bugfixing * Update test.yml * tidy up main.nf * fixed md5sum Co-authored-by: GCJMackenzie --- .../nf-core/gatk_create_som_pon/main.nf | 58 ++++++++++++++ .../nf-core/gatk_create_som_pon/meta.yml | 75 +++++++++++++++++++ .../gatk_create_som_pon/nextflow.config | 3 + tests/config/pytest_subworkflows.yml | 5 ++ .../nf-core/gatk_create_som_pon/main.nf | 26 +++++++ .../nf-core/gatk_create_som_pon/test.yml | 38 ++++++++++ 6 files changed, 205 insertions(+) create mode 100644 subworkflows/nf-core/gatk_create_som_pon/main.nf create mode 100644 subworkflows/nf-core/gatk_create_som_pon/meta.yml create mode 100644 subworkflows/nf-core/gatk_create_som_pon/nextflow.config create mode 100644 tests/subworkflows/nf-core/gatk_create_som_pon/main.nf create mode 100644 tests/subworkflows/nf-core/gatk_create_som_pon/test.yml diff --git a/subworkflows/nf-core/gatk_create_som_pon/main.nf b/subworkflows/nf-core/gatk_create_som_pon/main.nf new file mode 100644 index 00000000..9b190584 --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -0,0 +1,58 @@ +// +// Run GATK mutect2, genomicsdbimport and createsomaticpanelofnormals +// + +params.mutect2_options = [args: '--max-mnp-distance 0'] +params.gendbimport_options = [:] +params.createsompon_options = [:] + +include { GATK4_MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_GENOMICSDBIMPORT } from '../../../modules/gatk4/genomicsdbimport/main' addParams( options: params.gendbimport_options ) +include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../modules/gatk4/createsomaticpanelofnormals/main' addParams( options: params.createsompon_options ) + +workflow GATK_CREATE_SOM_PON { + take: + ch_mutect2_in // channel: [ val(meta), [ input ], [ input_index ], [] ] + fasta // channel: /path/to/reference/fasta + fastaidx // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + pon_name // channel: name for panel of normals + interval_file // channel: /path/to/interval/file + + main: + ch_versions = Channel.empty() + input = channel.from(ch_mutect2_in) + // + //Perform variant calling for each sample using mutect2 module in panel of normals mode. + // + GATK4_MUTECT2 ( input , false , true, false , [] , fasta , fastaidx , dict , [], [] , [] , [] ) + ch_versions = ch_versions.mix(GATK4_MUTECT2.out.versions.first()) + + // + //Convert all sample vcfs into a genomicsdb workspace using genomicsdbimport. + // + ch_vcf = GATK4_MUTECT2.out.vcf.collect{it[1]}.toList() + ch_index = GATK4_MUTECT2.out.tbi.collect{it[1]}.toList() + gendb_input = Channel.of([[ id:pon_name ]]).combine(ch_vcf).combine(ch_index).combine([interval_file]).combine(['']).combine([dict]) + GATK4_GENOMICSDBIMPORT ( gendb_input, false, false, false ) + ch_versions = ch_versions.mix(GATK4_GENOMICSDBIMPORT.out.versions.first()) + + // + //Panel of normals made from genomicsdb workspace using createsomaticpanelofnormals. + // + GATK4_GENOMICSDBIMPORT.out.genomicsdb.view() + GATK4_CREATESOMATICPANELOFNORMALS ( GATK4_GENOMICSDBIMPORT.out.genomicsdb, fasta, fastaidx, dict ) + ch_versions = ch_versions.mix(GATK4_CREATESOMATICPANELOFNORMALS.out.versions.first()) + + emit: + mutect2_vcf = GATK4_MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_index = GATK4_MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = GATK4_MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + genomicsdb = GATK4_GENOMICSDBIMPORT.out.genomicsdb // channel: [ val(meta), [ genomicsdb ] ] + + pon_vcf = GATK4_CREATESOMATICPANELOFNORMALS.out.vcf // channel: [ val(meta), [ vcf.gz ] ] + pon_index = GATK4_CREATESOMATICPANELOFNORMALS.out.tbi // channel: [ val(meta), [ tbi ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_create_som_pon/meta.yml b/subworkflows/nf-core/gatk_create_som_pon/meta.yml new file mode 100644 index 00000000..bc02b885 --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/meta.yml @@ -0,0 +1,75 @@ +name: gatk_create_som_pon +description: Perform variant calling on a set of normal samples using mutect2 panel of normals mode. Group them into a genomicsdbworkspace using genomicsdbimport, then use this to create a panel of normals using createsomaticpanelofnormals. +keywords: + - gatk4 + - mutect2 + - genomicsdbimport + - createsomaticpanelofnormals + - variant_calling + - genomicsdb_workspace + - panel_of_normals +modules: + - gatk4/mutect2 + - gatk4/genomicsdbimport + - gatk4/createsomaticpanelofnormals +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list of BAM files, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list of BAM file indexes, also able to take CRAM indexes as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fastaidx: + type: file + description: Index of reference fasta file + pattern: "fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - pon_name: + type: String + Description: name to be used for the genomicsdb workspace and panel of normals as meta_id has the individual sample names and a name for the combined files is reuired here. + pattern: "example_name" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: list + description: List of compressed vcf files to be used to make the gendb workspace + pattern: "[ *.vcf.gz ]" + - mutect2_index: + type: list + description: List of indexes of mutect2_vcf files + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: list + description: List of stats files that pair with mutect2_vcf files + pattern: "[ *vcf.gz.stats ]" + - genomicsdb: + type: directory + description: Directory containing the files that compose the genomicsdb workspace. + pattern: "path/name_of_workspace" + - pon_vcf: + type: file + description: Panel of normal as compressed vcf file + pattern: "*.vcf.gz" + - pon_index: + type: file + description: Index of pon_vcf file + pattern: "*vcf.gz.tbi" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_create_som_pon/nextflow.config b/subworkflows/nf-core/gatk_create_som_pon/nextflow.config new file mode 100644 index 00000000..6f560c9e --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/nextflow.config @@ -0,0 +1,3 @@ +params.mutect2_options = [:] +params.gendbimport_options = [:] +params.createsompon_options = [:] diff --git a/tests/config/pytest_subworkflows.yml b/tests/config/pytest_subworkflows.yml index 84919be8..4f9c5514 100644 --- a/tests/config/pytest_subworkflows.yml +++ b/tests/config/pytest_subworkflows.yml @@ -14,3 +14,8 @@ subworkflows/sra_fastq: - subworkflows/nf-core/sra_fastq/** - tests/subworkflows/nf-core/sra_fastq/** +subworkflows/gatk_create_som_pon: + - subworkflows/nf-core/gatk_create_som_pon/** + - tests/subworkflows/nf-core/gatk_create_som_pon/** + + \ No newline at end of file diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf new file mode 100644 index 00000000..d484ac2f --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -0,0 +1,26 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK_CREATE_SOM_PON } from '../../../../subworkflows/nf-core/gatk_create_som_pon/main' addParams( [:] ) + +workflow test_gatk_create_som_pon { + ch_mutect2_in = [ + [[ id:'test1' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ], + [[ id:'test2' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + pon_name = "test_panel" + interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) + + GATK_CREATE_SOM_PON ( ch_mutect2_in, fasta, fastaidx, dict, pon_name, interval_file ) + +} diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml new file mode 100644 index 00000000..eae34dd4 --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -0,0 +1,38 @@ +- name: gatk_create_som_pon + command: nextflow run ./tests/subworkflows/nf-core/gatk_create_som_pon -entry test_gatk_create_som_pon -c tests/config/nextflow.config + tags: + - subworkflows/gatk_create_som_pon + # Modules + - gatk4 + - gatk4 + - gatk4/genomicsdbimport + - gatk4/createsomaticpanelofnormals + files: + # gatk4 mutect2 + - path: output/gatk4/test1.vcf.gz + - path: output/gatk4/test1.vcf.gz.stats + md5sum: 4f77301a125913170b8e9e7828b4ca3f + - path: output/gatk4/test1.vcf.gz.tbi + - path: output/gatk4/test2.vcf.gz + - path: output/gatk4/test2.vcf.gz.stats + md5sum: 106c5828b02b906c97922618b6072169 + - path: output/gatk4/test2.vcf.gz.tbi + # gatk4 genomicsdbimport + - path: output/gatk4/test_panel/__tiledb_workspace.tdb + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_panel/callset.json + md5sum: 2ab411773b7267de61f8c04939de2a99 + - path: output/gatk4/test_panel/chr22$1$40001/.__consolidation_lock + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_panel/chr22$1$40001/__array_schema.tdb + - path: output/gatk4/test_panel/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json + md5sum: 2502f79658bc000578ebcfddfc1194c0 + - path: output/gatk4/test_panel/vcfheader.vcf + contains: + - "FORMAT= Date: Tue, 9 Nov 2021 11:08:59 +0000 Subject: [PATCH 011/101] bugfix: panel of normals subworkflow: remove md5sum on a gzipped file (#1045) * commiting changes to switch branch * commit to setup remote branch * first draft of the sompon workflow * keep branch in line with gendb bugfixing * Update test.yml * tidy up main.nf * fixed md5sum * Update test.yml removed md5sum from test_panel.vcf.gz.tbi * Update test.yml * remove md5sum from gzipped test_panel.vcf.gz.tbi Co-authored-by: GCJMackenzie --- tests/subworkflows/nf-core/gatk_create_som_pon/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index eae34dd4..7c9e7ac0 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -35,4 +35,3 @@ # gatk4 createsomaticpanelofnormals - path: output/gatk4/test_panel.vcf.gz - path: output/gatk4/test_panel.vcf.gz.tbi - md5sum: d7e2524ba4bf7538dbee3e225a74b0da From 6d3d8306e1ce8a096ef33f2ecfbc4d7b2ec91687 Mon Sep 17 00:00:00 2001 From: anan220606 <81744003+Darcy220606@users.noreply.github.com> Date: Tue, 9 Nov 2021 14:05:23 +0100 Subject: [PATCH 012/101] Add new module Mapdamage2 (#975) * Fitst attempt at mapdamage2 * Add new module mapdamage2 * Removed __pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc * Modify main.nf and meta.yml * Modify main.nf and meta.yml * Modify main.nf and meta.yml * Modify meta.yml * Update pytest_modules.yml * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * edit the meta.yml and main.nf after reviews * Update meta.yml * Update meta.yml Co-authored-by: AIbrahim Co-authored-by: James A. Fellows Yates --- modules/mapdamage2/functions.nf | 78 ++++++++++++++++++++ modules/mapdamage2/main.nf | 58 +++++++++++++++ modules/mapdamage2/meta.yml | 114 ++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/mapdamage2/main.nf | 15 ++++ tests/modules/mapdamage2/test.yml | 25 +++++++ 6 files changed, 294 insertions(+) create mode 100644 modules/mapdamage2/functions.nf create mode 100644 modules/mapdamage2/main.nf create mode 100644 modules/mapdamage2/meta.yml create mode 100644 tests/modules/mapdamage2/main.nf create mode 100644 tests/modules/mapdamage2/test.yml diff --git a/modules/mapdamage2/functions.nf b/modules/mapdamage2/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/mapdamage2/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/mapdamage2/main.nf b/modules/mapdamage2/main.nf new file mode 100644 index 00000000..e252e27c --- /dev/null +++ b/modules/mapdamage2/main.nf @@ -0,0 +1,58 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MAPDAMAGE2 { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mapdamage2=2.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mapdamage2:2.2.1--pyr40_0" + } else { + container "quay.io/biocontainers/mapdamage2:2.2.1--pyr40_0" + } + + input: + tuple val(meta), path(bam) + path(fasta) + + output: + tuple val(meta), path("results_*/Runtime_log.txt") ,emit: runtime_log + tuple val(meta), path("results_*/Fragmisincorporation_plot.pdf"), optional: true ,emit: fragmisincorporation_plot + tuple val(meta), path("results_*/Length_plot.pdf"), optional: true ,emit: length_plot + tuple val(meta), path("results_*/misincorporation.txt"), optional: true ,emit: misincorporation + tuple val(meta), path("results_*/lgdistribution.txt"), optional: true ,emit: lgdistribution + tuple val(meta), path("results_*/dnacomp.txt"), optional: true ,emit: dnacomp + tuple val(meta), path("results_*/Stats_out_MCMC_hist.pdf"), optional: true ,emit: stats_out_mcmc_hist + tuple val(meta), path("results_*/Stats_out_MCMC_iter.csv"), optional: true ,emit: stats_out_mcmc_iter + tuple val(meta), path("results_*/Stats_out_MCMC_trace.pdf"), optional: true ,emit: stats_out_mcmc_trace + tuple val(meta), path("results_*/Stats_out_MCMC_iter_summ_stat.csv"), optional: true ,emit: stats_out_mcmc_iter_summ_stat + tuple val(meta), path("results_*/Stats_out_MCMC_post_pred.pdf"), optional: true ,emit: stats_out_mcmc_post_pred + tuple val(meta), path("results_*/Stats_out_MCMC_correct_prob.csv"), optional: true ,emit: stats_out_mcmc_correct_prob + tuple val(meta), path("results_*/dnacomp_genome.csv"), optional: true ,emit: dnacomp_genome + tuple val(meta), path("results_*/rescaled.bam"), optional: true ,emit: rescaled + tuple val(meta), path("results_*/5pCtoT_freq.txt"), optional: true ,emit: pctot_freq + tuple val(meta), path("results_*/3pGtoA_freq.txt"), optional: true ,emit: pgtoa_freq + tuple val(meta), path("results_*/*.fasta"), optional: true ,emit: fasta + tuple val(meta), path("*/"), optional: true ,emit: folder + path "versions.yml",emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + mapDamage \\ + $options.args \\ + -i $bam \\ + -r $fasta + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(mapDamage --version)) + END_VERSIONS + """ +} diff --git a/modules/mapdamage2/meta.yml b/modules/mapdamage2/meta.yml new file mode 100644 index 00000000..e511a0a6 --- /dev/null +++ b/modules/mapdamage2/meta.yml @@ -0,0 +1,114 @@ +name: mapdamage2 + +description: Computational framework for tracking and quantifying DNA damage patterns among ancient DNA sequencing reads generated by Next-Generation Sequencing platforms. +keywords: + - ancient DNA + - DNA damage + - NGS + - damage patterns + - bam +tools: + - mapdamage2: + description: Tracking and quantifying damage patterns in ancient DNA sequences + homepage: http://ginolhac.github.io/mapDamage/ + documentation: https://ginolhac.github.io/mapDamage/ + tool_dev_url: https://github.com/ginolhac/mapDamage + doi: "10.1093/bioinformatics/btt193" + licence: ['MIT'] + +input: + - meta: + type: map + description: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.{bam}" + - fasta: + type: file + description: Fasta file, the reference the input BAM was mapped against + pattern: "*.{fasta}" + +output: + - meta: + type: map + description: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - runtime_log: + type: file + description: Log file with a summary of command lines used and timestamps. + pattern: "Runtime_log.txt" + - fragmisincorporation_plot: + type: file + description: A pdf file that displays both fragmentation and misincorporation patterns. + pattern: "Fragmisincorporation_plot.pdf" + - length_plot: + type: file + description: A pdf file that displays length distribution of singleton reads per strand and cumulative frequencies of C->T at 5'-end and G->A at 3'-end are also displayed per strand. + pattern: "Length_plot.pdf" + - misincorporation: + type: file + description: Contains a table with occurrences for each type of mutations and relative positions from the reads ends. + pattern: "misincorporation.txt" + - pctot_freq: + type: file + description: Contains frequencies of Cytosine to Thymine mutations per position from the 5'-ends. + pattern: "5pCtoT_freq.txt" + - pgtoa_freq: + type: file + description: Contains frequencies of Guanine to Adenine mutations per position from the 3'-ends. + pattern: "3pGtoA_freq.txt" + - dnacomp: + type: file + description: Contains a table of the reference genome base composition per position, inside reads and adjacent regions. + pattern: "dnacomp.txt" + - lgdistribution: + type: file + description: Contains a table with read length distributions per strand. + pattern: "lgdistribution.txt" + - stats_out_mcmc_hist: + type: file + description: A MCMC histogram for the damage parameters and log likelihood. + pattern: "Stats_out_MCMC_hist.pdf" + - stats_out_mcmc_iter: + type: file + description: Values for the damage parameters and log likelihood in each MCMC iteration. + pattern: "Stats_out_MCMC_iter.csv" + - stats_out_mcmc_trace: + type: file + description: A MCMC trace plot for the damage parameters and log likelihood. + pattern: "Stats_out_MCMC_trace.pdf" + - stats_out_mcmc_iter_summ_stat: + type: file + description: Summary statistics for the damage parameters estimated posterior distributions. + pattern: "Stats_out_MCMC_iter_summ_stat.csv" + - stats_out_mcmc_post_pred: + type: file + description: Empirical misincorporation frequency and posterior predictive intervals from the fitted model. + pattern: "Stats_out_MCMC_post_pred.pdf" + - stats_out_mcmc_correct_prob: + type: file + description: Position specific probability of a C->T and G->A misincorporation is due to damage. + pattern: "Stats_out_MCMC_correct_prob.csv" + - dnacomp_genome: + type: file + description: Contains the global reference genome base composition (computed by seqtk). + pattern: "dnacomp_genome.csv" + - rescaled: + type: file + description: Rescaled BAM file, where likely post-mortem damaged bases have downscaled quality scores. + pattern: "*.{bam}" + - fasta: + type: file + description: Allignments in a FASTA file, only if flagged by -d. + pattern: "*.{fasta}" + - folder: + type: folder + description: Folder created when --plot-only, --rescale and --stats-only flags are passed. + pattern: "*/" + +authors: +- "@darcy220606" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4edf5ec6..f1f5c096 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -758,6 +758,10 @@ manta/tumoronly: - modules/manta/tumoronly/** - tests/modules/manta/tumoronly/** +mapdamage2: + - modules/mapdamage2/** + - tests/modules/mapdamage2/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** diff --git a/tests/modules/mapdamage2/main.nf b/tests/modules/mapdamage2/main.nf new file mode 100644 index 00000000..a4a0eb02 --- /dev/null +++ b/tests/modules/mapdamage2/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MAPDAMAGE2 } from '../../../modules/mapdamage2/main.nf' addParams( options: [:] ) + +workflow test_mapdamage2 { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + MAPDAMAGE2 ( input, fasta ) +} diff --git a/tests/modules/mapdamage2/test.yml b/tests/modules/mapdamage2/test.yml new file mode 100644 index 00000000..657f59b5 --- /dev/null +++ b/tests/modules/mapdamage2/test.yml @@ -0,0 +1,25 @@ +- name: mapdamage2 test_mapdamage2 + command: nextflow run tests/modules/mapdamage2 -entry test_mapdamage2 -c tests/config/nextflow.config + tags: + - mapdamage2 + files: + - path: output/mapdamage2/results_test.paired_end.sorted/3pGtoA_freq.txt + md5sum: 3b300b8d2842441675cb2b56740801f0 + - path: output/mapdamage2/results_test.paired_end.sorted/5pCtoT_freq.txt + md5sum: 4c27465cd02e1fb8bf6fb2b01e98446d + - path: output/mapdamage2/results_test.paired_end.sorted/Fragmisincorporation_plot.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Runtime_log.txt + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_correct_prob.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_hist.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_iter.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_iter_summ_stat.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_post_pred.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_trace.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/dnacomp.txt + md5sum: 4244d9fa554bbfeebbcea8eba3ad6466 + - path: output/mapdamage2/results_test.paired_end.sorted/dnacomp_genome.csv + md5sum: ea91a3d205717d3c6b3e0b77bb840945 + - path: output/mapdamage2/results_test.paired_end.sorted/lgdistribution.txt + md5sum: f86dfc04b1fff4337cc91add6356e3a0 + - path: output/mapdamage2/results_test.paired_end.sorted/misincorporation.txt + md5sum: 1c89b4c96d1f8996c3d0879cad5129a5 From 6bb4a6a7eefdd2c53e15eca51949c05a503523c4 Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Tue, 9 Nov 2021 14:03:13 +0000 Subject: [PATCH 013/101] Implement `plink/extract` module (#901) * Implement PLINK_EXTRACT module * fix plink version number * Update main.nf * Update test_data.config * Update modules/plink/extract/main.nf Co-authored-by: Harshil Patel * just use one channel * fix test with new channel input Co-authored-by: Harshil Patel --- modules/plink/extract/functions.nf | 78 ++++++++++++++++++++++++++++ modules/plink/extract/main.nf | 47 +++++++++++++++++ modules/plink/extract/meta.yml | 62 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 2 + tests/modules/plink/extract/main.nf | 29 +++++++++++ tests/modules/plink/extract/test.yml | 18 +++++++ 7 files changed, 240 insertions(+) create mode 100644 modules/plink/extract/functions.nf create mode 100644 modules/plink/extract/main.nf create mode 100644 modules/plink/extract/meta.yml create mode 100644 tests/modules/plink/extract/main.nf create mode 100644 tests/modules/plink/extract/test.yml diff --git a/modules/plink/extract/functions.nf b/modules/plink/extract/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/plink/extract/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/plink/extract/main.nf b/modules/plink/extract/main.nf new file mode 100644 index 00000000..2e18500a --- /dev/null +++ b/modules/plink/extract/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PLINK_EXTRACT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" + } else { + container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" + } + + input: + tuple val(meta), path(bed), path(bim), path(fam), path(variants) + + output: + tuple val(meta), path("*.bed"), emit: bed + tuple val(meta), path("*.bim"), emit: bim + tuple val(meta), path("*.fam"), emit: fam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if( "$bed" == "${prefix}.bed" ) error "Input and output names are the same, use the suffix option to disambiguate" + """ + plink \\ + --bfile ${meta.id} \\ + $options.args \\ + --extract $variants \\ + --threads $task.cpus \\ + --make-bed \\ + --out $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(plink --version) | sed 's/^PLINK v//;s/64.*//') + END_VERSIONS + """ +} diff --git a/modules/plink/extract/meta.yml b/modules/plink/extract/meta.yml new file mode 100644 index 00000000..3978fbb4 --- /dev/null +++ b/modules/plink/extract/meta.yml @@ -0,0 +1,62 @@ +name: plink_extract +description: Subset plink bfiles with a text file of variant identifiers +keywords: + - extract + - plink +tools: + - plink: + description: Whole genome association analysis toolset, designed to perform a range of basic, large-scale analyses in a computationally efficient manner. + homepage: None + documentation: None + tool_dev_url: None + doi: "" + licence: ['GPL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: PLINK binary biallelic genotype table + pattern: "*.{bed}" + - bim: + type: file + description: PLINK extended MAP file + pattern: "*.{bim}" + - fam: + type: file + description: PLINK sample information file + pattern: "*.{fam}" + - variants: + type: file + description: A text file containing variant identifiers to keep (one per line) + pattern: "*.{keep}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bed: + type: file + description: PLINK binary biallelic genotype table + pattern: "*.{bed}" + - bim: + type: file + description: PLINK extended MAP file + pattern: "*.{bim}" + - fam: + type: file + description: PLINK sample information file + pattern: "*.{fam}" + +authors: + - "@nebfield" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index f1f5c096..39a1393d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -936,6 +936,10 @@ plasmidid: - modules/plasmidid/** - tests/modules/plasmidid/** +plink/extract: + - modules/plink/extract/** + - tests/modules/plink/extract/** + plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 2d30880f..4ea333cb 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -119,8 +119,10 @@ params { gnomad_r2_1_1_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz.tbi" mills_and_1000g_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" + syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz" syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" + index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" } diff --git a/tests/modules/plink/extract/main.nf b/tests/modules/plink/extract/main.nf new file mode 100644 index 00000000..e031a7b7 --- /dev/null +++ b/tests/modules/plink/extract/main.nf @@ -0,0 +1,29 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams ( options: [args:'--make-bed --set-missing-var-ids @:#:\\$1:\\$2']) +include { PLINK_EXTRACT } from '../../../../modules/plink/extract/main.nf' addParams( options: [suffix:'.extract'] ) + +workflow test_plink_extract { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true) + ] + + PLINK_VCF ( input ) + + PLINK_VCF.out.bim + .splitText(file: 'variants.keep', keepHeader: false, by: 10) + .first() + .set { ch_variants } + + PLINK_VCF.out.bed + .concat(PLINK_VCF.out.bim, PLINK_VCF.out.fam.concat(ch_variants)) + .groupTuple() + .map{ meta, paths -> [meta, paths[0], paths[1], paths[2], paths[3]] } + .set { ch_extract } + + PLINK_EXTRACT ( ch_extract ) +} diff --git a/tests/modules/plink/extract/test.yml b/tests/modules/plink/extract/test.yml new file mode 100644 index 00000000..40569d9d --- /dev/null +++ b/tests/modules/plink/extract/test.yml @@ -0,0 +1,18 @@ +- name: plink extract test_plink_extract + command: nextflow run tests/modules/plink/extract -entry test_plink_extract -c tests/config/nextflow.config + tags: + - plink + - plink/extract + files: + - path: output/plink/test.bed + md5sum: 9121010aba9905eee965e96bc983611d + - path: output/plink/test.bim + md5sum: 510ec606219ee5daaf5c207cb01554bf + - path: output/plink/test.extract.bed + md5sum: 9e02f7143bcc756a51f20d50ca7f8032 + - path: output/plink/test.extract.bim + md5sum: 63d190aea4094aa5d042aacd63397f94 + - path: output/plink/test.extract.fam + md5sum: c499456df4da78792ef29934ef3cd47d + - path: output/plink/test.fam + md5sum: c499456df4da78792ef29934ef3cd47d From b399f22af241b6d7d4a2f7aa5616bf21b9cff2f9 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Tue, 9 Nov 2021 16:12:51 +0100 Subject: [PATCH 014/101] Add new module: cmseq/polymut (#918) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * first cmseq commit * compressing and removing not reproducible md5sums * save intermediate work * follow symlinks while decompressing * add cmseq/polymut * add polymut * add extra test with optional input file * remove metabat2 * Update modules/cmseq/polymut/main.nf Co-authored-by: James A. Fellows Yates * Update modules/cmseq/polymut/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/cmseq/polymut/meta.yml Co-authored-by: James A. Fellows Yates * fix file extension * Update modules/cmseq/polymut/meta.yml Co-authored-by: James A. Fellows Yates * add test without bam index * split tests in workflows * answer PR review * report version from variable Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/cmseq/polymut/functions.nf | 78 ++++++++++++++++++++++++++++ modules/cmseq/polymut/main.nf | 46 ++++++++++++++++ modules/cmseq/polymut/meta.yml | 61 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/cmseq/polymut/main.nf | 38 ++++++++++++++ tests/modules/cmseq/polymut/test.yml | 26 ++++++++++ 6 files changed, 253 insertions(+) create mode 100644 modules/cmseq/polymut/functions.nf create mode 100644 modules/cmseq/polymut/main.nf create mode 100644 modules/cmseq/polymut/meta.yml create mode 100644 tests/modules/cmseq/polymut/main.nf create mode 100644 tests/modules/cmseq/polymut/test.yml diff --git a/modules/cmseq/polymut/functions.nf b/modules/cmseq/polymut/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cmseq/polymut/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cmseq/polymut/main.nf b/modules/cmseq/polymut/main.nf new file mode 100644 index 00000000..4c061e26 --- /dev/null +++ b/modules/cmseq/polymut/main.nf @@ -0,0 +1,46 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '1.0.4' + +process CMSEQ_POLYMUT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cmseq=1.0.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cmseq:1.0.4--pyhb7b1952_0" + } else { + container "quay.io/biocontainers/cmseq:1.0.4--pyhb7b1952_0" + } + + input: + tuple val(meta), path(bam), path(bai), path(gff), path(fasta) + + output: + tuple val(meta), path("*.txt"), emit: polymut + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def fasta_refid = fasta ? "-c $fasta" : "" + def sortindex = bai ? "" : "--sortindex" + """ + polymut.py \\ + $options.args \\ + $sortindex \\ + $fasta_refid \\ + --gff_file $gff \\ + $bam > ${prefix}.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ +} diff --git a/modules/cmseq/polymut/meta.yml b/modules/cmseq/polymut/meta.yml new file mode 100644 index 00000000..49e6b519 --- /dev/null +++ b/modules/cmseq/polymut/meta.yml @@ -0,0 +1,61 @@ +name: cmseq_polymut +description: Calculates polymorphic site rates over protein coding genes +keywords: + - polymut + - polymorphic + - mags + - assembly + - polymorphic sites + - estimation + - protein coding genes + - cmseq + - bam + - coverage +tools: + - cmseq: + description: Set of utilities on sequences and BAM files + homepage: https://github.com/SegataLab/cmseq + documentation: https://github.com/SegataLab/cmseq + tool_dev_url: https://github.com/SegataLab/cmseq + licence: ['MIT License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bai" + - gff: + type: file + description: GFF file used to extract protein-coding genes + pattern: "*.gff" + - fasta: + type: file + description: Optional fasta file to run on a subset of references in the BAM file. + pattern: .{fa,fasta,fas,fna} + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - polymut: + type: file + description: Polymut report in `.txt` format. + pattern: "*.txt" + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 39a1393d..2e5b55f1 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -274,6 +274,10 @@ chromap/index: - modules/chromap/index/** - tests/modules/chromap/index/** +cmseq/polymut: + - modules/cmseq/polymut/** + - tests/modules/cmseq/polymut/** + cnvkit: - modules/cnvkit/** - tests/modules/cnvkit/** diff --git a/tests/modules/cmseq/polymut/main.nf b/tests/modules/cmseq/polymut/main.nf new file mode 100644 index 00000000..729ed38f --- /dev/null +++ b/tests/modules/cmseq/polymut/main.nf @@ -0,0 +1,38 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CMSEQ_POLYMUT } from '../../../../modules/cmseq/polymut/main.nf' addParams( options: [:] ) + +workflow test_cmseq_polymut_1 { + + input_1 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + [], + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + [] ] + + CMSEQ_POLYMUT( input_1 ) + +} + +workflow test_cmseq_polymut_2 { + input_2 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + [] ] + + CMSEQ_POLYMUT( input_2 ) +} + +workflow test_cmseq_polymut_3 { + input_3 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), ] + + CMSEQ_POLYMUT( input_3 ) +} + diff --git a/tests/modules/cmseq/polymut/test.yml b/tests/modules/cmseq/polymut/test.yml new file mode 100644 index 00000000..2a989cb9 --- /dev/null +++ b/tests/modules/cmseq/polymut/test.yml @@ -0,0 +1,26 @@ +- name: cmseq polymut test_cmseq_polymut_1 + command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_1 -c tests/config/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc + +- name: cmseq polymut test_cmseq_polymut_2 + command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_2 -c tests/config/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc + +- name: cmseq polymut test_cmseq_polymut_3 + command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_3 -c tests/config/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc From 1abe23e1592f7cf6a3724589061827df0d8a0252 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 10 Nov 2021 10:30:48 +0100 Subject: [PATCH 015/101] Add CRAM to samtools/index (#1049) * feat: update samtools/index * feat: add test * fix: lint * fix: output file + md5 --- modules/samtools/index/main.nf | 11 ++++++----- modules/samtools/index/meta.yml | 5 +++++ tests/modules/samtools/index/main.nf | 13 +++++++++++-- tests/modules/samtools/index/test.yml | 9 +++++++++ 4 files changed, 31 insertions(+), 7 deletions(-) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index febbc11c..62254bc8 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -19,16 +19,17 @@ process SAMTOOLS_INDEX { } input: - tuple val(meta), path(bam) + tuple val(meta), path(input) output: - tuple val(meta), path("*.bai"), optional:true, emit: bai - tuple val(meta), path("*.csi"), optional:true, emit: csi - path "versions.yml" , emit: versions + tuple val(meta), path("*.bai") , optional:true, emit: bai + tuple val(meta), path("*.crai"), optional:true, emit: crai + tuple val(meta), path("*.csi") , optional:true, emit: csi + path "versions.yml" , emit: versions script: """ - samtools index $options.args $bam + samtools index $options.args $input cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/index/meta.yml b/modules/samtools/index/meta.yml index 988e8f53..0905b3cd 100644 --- a/modules/samtools/index/meta.yml +++ b/modules/samtools/index/meta.yml @@ -35,6 +35,10 @@ output: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" + - crai: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" - csi: type: file description: CSI index file @@ -46,3 +50,4 @@ output: authors: - "@drpatelh" - "@ewels" + - "@maxulysse" diff --git a/tests/modules/samtools/index/main.nf b/tests/modules/samtools/index/main.nf index be9014e0..737936fb 100644 --- a/tests/modules/samtools/index/main.nf +++ b/tests/modules/samtools/index/main.nf @@ -2,8 +2,9 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' addParams( options: [args:'-c'] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CRAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' addParams( options: [args:'-c'] ) workflow test_samtools_index_bai { input = [ [ id:'test', single_end:false ], // meta map @@ -13,6 +14,14 @@ workflow test_samtools_index_bai { SAMTOOLS_INDEX_BAI ( input ) } +workflow test_samtools_index_crai { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true) + ] + + SAMTOOLS_INDEX_CRAI ( input ) +} + workflow test_samtools_index_csi { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 31941dd6..66ab8211 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -7,6 +7,15 @@ - path: output/samtools/test.paired_end.sorted.bam.bai md5sum: 704c10dd1326482448ca3073fdebc2f4 +- name: samtools index crai + command: nextflow run tests/modules/samtools/index -entry test_samtools_index_crai -c tests/config/nextflow.config + tags: + - samtools + - samtools/index + files: + - path: output/samtools/test.paired_end.recalibrated.sorted.cram.crai + md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 + - name: samtools index csi command: nextflow run tests/modules/samtools/index -entry test_samtools_index_csi -c tests/config/nextflow.config tags: From 7fdeed5b79517357758900d3d52e2ffe28c47102 Mon Sep 17 00:00:00 2001 From: Mei Wu Date: Wed, 10 Nov 2021 10:52:54 +0100 Subject: [PATCH 016/101] Picard/collecthsmetrics (#927) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * added template * integrated module * added fasta index info * test works, have placeholder data for baits until test-data PR is merged * added new files to config * updated test files * fixing fails :sparkles: * okay final fix here on the md5sum :face_palm: * md5sum variable * update meta.yml to reflect consistency to main.nf * reverted version so conda works * Apply suggestions from code review Co-authored-by: Sébastien Guizard * md5sum can't be generated consistently for output Co-authored-by: Sébastien Guizard --- modules/picard/collecthsmetrics/functions.nf | 78 +++++++++++++++++++ modules/picard/collecthsmetrics/main.nf | 58 ++++++++++++++ modules/picard/collecthsmetrics/meta.yml | 66 ++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 3 + tests/modules/picard/collecthsmetrics/main.nf | 18 +++++ .../modules/picard/collecthsmetrics/test.yml | 8 ++ 7 files changed, 235 insertions(+) create mode 100644 modules/picard/collecthsmetrics/functions.nf create mode 100644 modules/picard/collecthsmetrics/main.nf create mode 100644 modules/picard/collecthsmetrics/meta.yml create mode 100644 tests/modules/picard/collecthsmetrics/main.nf create mode 100644 tests/modules/picard/collecthsmetrics/test.yml diff --git a/modules/picard/collecthsmetrics/functions.nf b/modules/picard/collecthsmetrics/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/picard/collecthsmetrics/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf new file mode 100644 index 00000000..1f7ad8e6 --- /dev/null +++ b/modules/picard/collecthsmetrics/main.nf @@ -0,0 +1,58 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PICARD_COLLECTHSMETRICS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::picard=2.26.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0" + } else { + container "quay.io/biocontainers/picard:2.26.2--hdfd78af_0" + } + + input: + tuple val(meta), path(bam) + path fasta + path fai + path bait_intervals + path target_intervals + + output: + tuple val(meta), path("*collecthsmetrics.txt"), emit: hs_metrics + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def reference = fasta ? "-R $fasta" : "" + + def avail_mem = 3 + if (!task.memory) { + log.info '[Picard CollectHsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + picard \\ + -Xmx${avail_mem}g \\ + CollectHsMetrics \\ + $options.args \\ + $reference \\ + -BAIT_INTERVALS $bait_intervals \\ + -TARGET_INTERVALS $target_intervals \\ + -INPUT $bam \\ + -OUTPUT ${prefix}_collecthsmetrics.txt + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ +} diff --git a/modules/picard/collecthsmetrics/meta.yml b/modules/picard/collecthsmetrics/meta.yml new file mode 100644 index 00000000..4b94909f --- /dev/null +++ b/modules/picard/collecthsmetrics/meta.yml @@ -0,0 +1,66 @@ +name: picard_collecthsmetrics +description: Collects hybrid-selection (HS) metrics for a SAM or BAM file. +keywords: + - alignment + - metrics + - statistics + - insert + - hybrid-selection + - quality + - bam +tools: + - picard: + description: | + A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) + data and formats such as SAM/BAM/CRAM and VCF. + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/ + tool_dev_url: https://github.com/broadinstitute/picard/ + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: An aligned BAM/SAM file + pattern: "*.{bam,sam}" + - fasta: + type: file + description: | + A reference file to calculate dropout metrics measuring reduced representation of reads. + Optional input. + pattern: "*.fasta" + - fai: + type: file + description: Index of FASTA file. Only needed when fasta is supplied. + pattern: "*.fai" + - bait_intervals: + type: file + description: An interval list file that contains the locations of the baits used. + pattern: "baits.interval_list" + - target_intervals: + type: file + description: An interval list file that contains the locations of the targets. + pattern: "targets.interval_list" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - hs_metrics: + type: file + description: The metrics file. + pattern: "*_collecthsmetrics.txt" + +authors: + - "@projectoriented" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 2e5b55f1..8dfe67cf 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -908,6 +908,10 @@ pbccs: - modules/pbccs/** - tests/modules/pbccs/** +picard/collecthsmetrics: + - modules/picard/collecthsmetrics/** + - tests/modules/picard/collecthsmetrics/** + picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 4ea333cb..6cb494f7 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -34,6 +34,9 @@ params { contigs_genome_maf_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.maf.gz" contigs_genome_par = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.par" lastdb_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/lastdb.tar.gz" + + baits_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/baits.interval_list" + targets_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/targets.interval_list" } 'illumina' { test_single_end_bam = "${test_data_dir}/genomics/sarscov2/illumina/bam/test.single_end.bam" diff --git a/tests/modules/picard/collecthsmetrics/main.nf b/tests/modules/picard/collecthsmetrics/main.nf new file mode 100644 index 00000000..24b031fc --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsmetrics/main.nf' addParams( options: [:] ) + +workflow test_picard_collecthsmetrics { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + bait_intervals = file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true) + target_intervals = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) + + PICARD_COLLECTHSMETRICS ( input, fasta, fai, bait_intervals, target_intervals ) +} diff --git a/tests/modules/picard/collecthsmetrics/test.yml b/tests/modules/picard/collecthsmetrics/test.yml new file mode 100644 index 00000000..8c610abd --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/test.yml @@ -0,0 +1,8 @@ +- name: picard collecthsmetrics test_picard_collecthsmetrics + command: nextflow run tests/modules/picard/collecthsmetrics -entry test_picard_collecthsmetrics -c tests/config/nextflow.config + tags: + - picard + - picard/collecthsmetrics + files: + # The file can't be md5'd consistently + - path: output/picard/test_collecthsmetrics.txt From 8b4bfb12bb95930feafaf7b019c9cf82e2a1f0b2 Mon Sep 17 00:00:00 2001 From: Daniel Lundin Date: Wed, 10 Nov 2021 11:27:52 +0100 Subject: [PATCH 017/101] Add log to output from bbmap/align (#1050) --- modules/bbmap/align/main.nf | 4 +++- tests/modules/bbmap/align/test.yml | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 733fd4d5..40810575 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -24,6 +24,7 @@ process BBMAP_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path("*.log"), emit: log path "versions.yml" , emit: versions script: @@ -51,7 +52,8 @@ process BBMAP_ALIGN { out=${prefix}.bam \\ $options.args \\ threads=$task.cpus \\ - -Xmx${task.memory.toGiga()}g + -Xmx${task.memory.toGiga()}g \\ + &> ${prefix}.bbmap.log cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/tests/modules/bbmap/align/test.yml b/tests/modules/bbmap/align/test.yml index 0fcc8ce9..a30713c9 100644 --- a/tests/modules/bbmap/align/test.yml +++ b/tests/modules/bbmap/align/test.yml @@ -6,6 +6,7 @@ files: - path: output/bbmap/test.bam md5sum: e0ec7f1eec537acf146fac1cbdd868d1 + - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c tests/config/nextflow.config @@ -15,6 +16,7 @@ files: - path: output/bbmap/test.bam md5sum: 345a72a0d58366d75dd263b107caa460 + - path: output/bbmap/test.bbmap.log - name: bbmap align single end index ref command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c tests/config/nextflow.config @@ -24,6 +26,7 @@ files: - path: output/bbmap/test.bam md5sum: 95f690636581ce9b27cf8568c715ae4d + - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref pigz command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c tests/config/nextflow.config @@ -33,3 +36,4 @@ files: - path: output/bbmap/test.bam md5sum: 441c4f196b9a82c7b224903538064308 + - path: output/bbmap/test.bbmap.log From 24707f2144aff3e966827376f37fb990fe0aa92e Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Wed, 10 Nov 2021 13:21:12 +0100 Subject: [PATCH 018/101] Fix read indexing in AdapterRemoval module (#1051) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip * fix read index * update test.yml Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- modules/adapterremoval/main.nf | 4 ++-- tests/modules/adapterremoval/test.yml | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index fad3963f..6d559826 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -49,7 +49,7 @@ process ADAPTERREMOVAL { """ AdapterRemoval \\ --file1 ${reads[0]} \\ - --file2 ${reads[0]} \\ + --file2 ${reads[1]} \\ $options.args \\ --basename $prefix \\ --threads $task.cpus \\ @@ -68,7 +68,7 @@ process ADAPTERREMOVAL { """ AdapterRemoval \\ --file1 ${reads[0]} \\ - --file2 ${reads[0]} \\ + --file2 ${reads[1]} \\ --collapse \\ $options.args \\ --basename $prefix \\ diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index 95cd4b04..318e7866 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -13,12 +13,12 @@ tags: - adapterremoval files: - - path: output/adapterremoval/test.pair2.trimmed.fastq.gz - md5sum: f076a9f666235e01a3281f8c46c9d010 - path: output/adapterremoval/test.log - md5sum: bea86105aff4d27fe29c83e24498fefa + md5sum: b8a451d3981b327f3fdb44f40ba2d6d1 - path: output/adapterremoval/test.pair1.trimmed.fastq.gz - md5sum: f076a9f666235e01a3281f8c46c9d010 + md5sum: 294a6277f0139bd597e57c6fa31f39c7 + - path: output/adapterremoval/test.pair2.trimmed.fastq.gz + md5sum: de7b38e2c881bced8671acb1ab452d78 - name: adapterremoval test_adapterremoval_paired_end_collapse command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c tests/config/nextflow.config @@ -26,6 +26,6 @@ - adapterremoval files: - path: output/adapterremoval/test.log - md5sum: 97cb97b3d03123ac88430768b2e36c59 + md5sum: 7f0b2328152226e46101a535cce718b3 - path: output/adapterremoval/test.merged.fastq.gz - md5sum: 50a4f9fdac6a24e211eb4dcf9f292bef + md5sum: 07a8f725bfd3ecbeabdc41b32d898dee From 64006e239a5e9fedd2224b54cd93bd796785173f Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 10 Nov 2021 16:26:34 +0100 Subject: [PATCH 019/101] fix: actually do the tests for multiple files (#1058) --- tests/modules/gatk4/markduplicates/main.nf | 6 +++--- tests/modules/gatk4/markduplicates/test.yml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/modules/gatk4/markduplicates/main.nf b/tests/modules/gatk4/markduplicates/main.nf index b9709dc0..f80c1bd5 100644 --- a/tests/modules/gatk4/markduplicates/main.nf +++ b/tests/modules/gatk4/markduplicates/main.nf @@ -14,9 +14,9 @@ workflow test_gatk4_markduplicates { workflow test_gatk4_markduplicates_multiple_bams { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - ] + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + ] ] GATK4_MARKDUPLICATES ( input ) } diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index 99296ca4..66921e45 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -17,7 +17,7 @@ - gatk4 files: - path: output/gatk4/test.bai - md5sum: 93cebe29e7cca2064262b739235cca9b + md5sum: d12be29abba5865b7da0cd23f1a84e86 - path: output/gatk4/test.bam - md5sum: dcd6f584006b04141fb787001a8ecacc + md5sum: e988925ed850f8d9d966aa6689ae57de - path: output/gatk4/test.metrics From 56d5eb983463e74047acc2e1c81346715519c7fb Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Wed, 10 Nov 2021 17:40:07 +0100 Subject: [PATCH 020/101] Add Bacillus fragilis alignments to `test_data.config` (#1054) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip * add bacillus fragilis alignments Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- tests/config/test_data.config | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6cb494f7..c34696f2 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -267,6 +267,12 @@ params { test1_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" test2_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" test2_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" + test1_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.bam" + test1_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam" + test1_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam.bai" + test2_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.bam" + test2_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam" + test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam.bai" } 'nanopore' { test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" From 4bd530135fe2a78bdfec2d710b9d294fb447c245 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Wed, 10 Nov 2021 11:25:50 -0700 Subject: [PATCH 021/101] add ngmaster module (#1024) * add ngmaster module * add docker container Co-authored-by: Gregor Sturm --- modules/ngmaster/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/ngmaster/main.nf | 41 +++++++++++++++++ modules/ngmaster/meta.yml | 43 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ngmaster/main.nf | 13 ++++++ tests/modules/ngmaster/test.yml | 7 +++ 6 files changed, 186 insertions(+) create mode 100644 modules/ngmaster/functions.nf create mode 100644 modules/ngmaster/main.nf create mode 100644 modules/ngmaster/meta.yml create mode 100644 tests/modules/ngmaster/main.nf create mode 100644 tests/modules/ngmaster/test.yml diff --git a/modules/ngmaster/functions.nf b/modules/ngmaster/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ngmaster/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ngmaster/main.nf b/modules/ngmaster/main.nf new file mode 100644 index 00000000..1897b5f3 --- /dev/null +++ b/modules/ngmaster/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process NGMASTER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ngmaster=0.5.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ngmaster:0.5.8--pyhdfd78af_1" + } else { + container "quay.io/biocontainers/ngmaster:0.5.8--pyhdfd78af_1" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + ngmaster \\ + $options.args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(ngmaster --version 2>&1) | sed 's/^.*ngmaster //' ) + END_VERSIONS + """ +} diff --git a/modules/ngmaster/meta.yml b/modules/ngmaster/meta.yml new file mode 100644 index 00000000..1dbb02a0 --- /dev/null +++ b/modules/ngmaster/meta.yml @@ -0,0 +1,43 @@ +name: ngmaster +description: Serotyping Neisseria gonorrhoeae assemblies +keywords: + - fasta + - Neisseria gonorrhoeae + - serotype +tools: + - ngmaster: + description: In silico multi-antigen sequence typing for Neisseria gonorrhoeae (NG-MAST) + homepage: https://github.com/MDU-PHL/ngmaster/blob/master/README.md + documentation: https://github.com/MDU-PHL/ngmaster/blob/master/README.md + tool_dev_url: https://github.com/MDU-PHL/ngmaster + doi: "10.1099/mgen.0.000076" + licence: ['GPL v3 only'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8dfe67cf..8c169fcd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -860,6 +860,10 @@ nextclade: - modules/nextclade/** - tests/modules/nextclade/** +ngmaster: + - modules/ngmaster/** + - tests/modules/ngmaster/** + optitype: - modules/optitype/** - tests/modules/optitype/** diff --git a/tests/modules/ngmaster/main.nf b/tests/modules/ngmaster/main.nf new file mode 100644 index 00000000..8bc975ed --- /dev/null +++ b/tests/modules/ngmaster/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NGMASTER } from '../../../modules/ngmaster/main.nf' addParams( options: [:] ) + +workflow test_ngmaster { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + NGMASTER ( input ) +} diff --git a/tests/modules/ngmaster/test.yml b/tests/modules/ngmaster/test.yml new file mode 100644 index 00000000..31584a54 --- /dev/null +++ b/tests/modules/ngmaster/test.yml @@ -0,0 +1,7 @@ +- name: ngmaster test_ngmaster + command: nextflow run tests/modules/ngmaster -entry test_ngmaster -c tests/config/nextflow.config + tags: + - ngmaster + files: + - path: output/ngmaster/test.tsv + md5sum: cf674474eaf8ac6abfcebce0af0226cf From 3b600af50eae8264960df817277cfe303d2acd47 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Thu, 11 Nov 2021 08:58:59 +0100 Subject: [PATCH 022/101] feat: update gatk4 from 4.2.0.0 to 4.2.3.0 (#1059) * feat: update gatk4 from 4.2.0.0 to 4.2.3.0 * update md5checksum * commit all files * actually checksum was good, but I suspect something fishy with the tests --- modules/gatk4/applybqsr/main.nf | 6 +++--- modules/gatk4/baserecalibrator/main.nf | 6 +++--- modules/gatk4/bedtointervallist/main.nf | 6 +++--- modules/gatk4/calculatecontamination/main.nf | 6 +++--- modules/gatk4/createsequencedictionary/main.nf | 6 +++--- modules/gatk4/createsomaticpanelofnormals/main.nf | 6 +++--- modules/gatk4/estimatelibrarycomplexity/main.nf | 6 +++--- modules/gatk4/fastqtosam/main.nf | 6 +++--- modules/gatk4/filtermutectcalls/main.nf | 6 +++--- modules/gatk4/genomicsdbimport/main.nf | 6 +++--- modules/gatk4/getpileupsummaries/main.nf | 6 +++--- modules/gatk4/haplotypecaller/main.nf | 6 +++--- modules/gatk4/intervallisttools/main.nf | 6 +++--- modules/gatk4/learnreadorientationmodel/main.nf | 6 +++--- modules/gatk4/markduplicates/main.nf | 6 +++--- modules/gatk4/mergebamalignment/main.nf | 6 +++--- modules/gatk4/mergevcfs/main.nf | 6 +++--- modules/gatk4/mutect2/main.nf | 6 +++--- modules/gatk4/revertsam/main.nf | 6 +++--- modules/gatk4/samtofastq/main.nf | 6 +++--- modules/gatk4/splitncigarreads/main.nf | 6 +++--- modules/gatk4/variantfiltration/main.nf | 6 +++--- tests/modules/gatk4/applybqsr/test.yml | 6 +++--- tests/modules/gatk4/calculatecontamination/test.yml | 8 ++++---- tests/modules/gatk4/createsomaticpanelofnormals/test.yml | 2 +- tests/modules/gatk4/fastqtosam/test.yml | 4 ++-- tests/modules/gatk4/filtermutectcalls/test.yml | 6 +++--- tests/modules/gatk4/getpileupsummaries/test.yml | 4 ++-- tests/modules/gatk4/markduplicates/test.yml | 6 +++--- tests/modules/gatk4/mergebamalignment/test.yml | 2 +- tests/modules/gatk4/mergevcfs/test.yml | 4 ++-- tests/modules/gatk4/revertsam/test.yml | 2 +- tests/modules/gatk4/splitncigarreads/test.yml | 2 +- tests/subworkflows/nf-core/gatk_create_som_pon/test.yml | 2 +- 34 files changed, 90 insertions(+), 90 deletions(-) diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index 508a29ca..e1a4d7b4 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -11,11 +11,11 @@ process GATK4_APPLYBQSR { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 85c30daf..ff9eb1f9 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -11,11 +11,11 @@ process GATK4_BASERECALIBRATOR { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 064247cc..7c06ccef 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -11,11 +11,11 @@ process GATK4_BEDTOINTERVALLIST { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index bfe9b8fd..28dd7ccf 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -11,11 +11,11 @@ process GATK4_CALCULATECONTAMINATION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 12372bdf..db28e244 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -11,11 +11,11 @@ process GATK4_CREATESEQUENCEDICTIONARY { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 66dfda23..b3685171 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -11,11 +11,11 @@ process GATK4_CREATESOMATICPANELOFNORMALS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index 4cea7086..bfaeedbc 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -11,11 +11,11 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.2.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.2.0--hdfd78af_1" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.2.0--hdfd78af_1" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index ebd081ac..5879618d 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -11,11 +11,11 @@ process GATK4_FASTQTOSAM { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 5a784677..b54e07ed 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -11,11 +11,11 @@ process GATK4_FILTERMUTECTCALLS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index 78c6b81f..c5582563 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -11,11 +11,11 @@ process GATK4_GENOMICSDBIMPORT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 09449f12..7919678c 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -11,11 +11,11 @@ process GATK4_GETPILEUPSUMMARIES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 4bddbb6d..1e540d17 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -11,11 +11,11 @@ process GATK4_HAPLOTYPECALLER { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 2f464919..5da651b9 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -11,11 +11,11 @@ process GATK4_INTERVALLISTTOOLS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--hdfd78af_1" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--hdfd78af_1" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index 0a499def..b8aee764 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -11,11 +11,11 @@ process GATK4_LEARNREADORIENTATIONMODEL { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index b1ff5222..e44f4bfc 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -11,11 +11,11 @@ process GATK4_MARKDUPLICATES { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 0c9fe5ee..9c5fe26c 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -11,11 +11,11 @@ process GATK4_MERGEBAMALIGNMENT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index ce9a52c3..28073fcb 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -11,11 +11,11 @@ process GATK4_MERGEVCFS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 7999eec3..748b1673 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -11,11 +11,11 @@ process GATK4_MUTECT2 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index b3c9085a..7b5ee696 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -11,11 +11,11 @@ process GATK4_REVERTSAM { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 324f3bae..843c61ce 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -11,11 +11,11 @@ process GATK4_SAMTOFASTQ { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 793cc671..01b1d05a 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -11,11 +11,11 @@ process GATK4_SPLITNCIGARREADS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 28084645..a4e950ae 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -11,11 +11,11 @@ process GATK4_VARIANTFILTRATION { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" + container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" } input: diff --git a/tests/modules/gatk4/applybqsr/test.yml b/tests/modules/gatk4/applybqsr/test.yml index ed89c6ff..02448b02 100644 --- a/tests/modules/gatk4/applybqsr/test.yml +++ b/tests/modules/gatk4/applybqsr/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 87a2eabae2b7b41574f966612b5addae + md5sum: af56f5dd81b95070079d54670507f530 - name: gatk4 applybqsr test_gatk4_applybqsr_intervals command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c tests/config/nextflow.config @@ -14,7 +14,7 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 9c015d3c1dbd9eee793b7386f432b6aa + md5sum: 0cbfa4be143e988d56ce741b5077510e - name: gatk4 applybqsr test_gatk4_applybqsr_cram command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c tests/config/nextflow.config @@ -23,4 +23,4 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 02f84815fdbc99c21c8d42ebdcabbbf7 + md5sum: 720ef7453fc3c9def18bbe396062346c diff --git a/tests/modules/gatk4/calculatecontamination/test.yml b/tests/modules/gatk4/calculatecontamination/test.yml index 8736bc32..89d419e0 100644 --- a/tests/modules/gatk4/calculatecontamination/test.yml +++ b/tests/modules/gatk4/calculatecontamination/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_matched_pair command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c tests/config/nextflow.config @@ -14,7 +14,7 @@ - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_segmentation command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c tests/config/nextflow.config @@ -23,6 +23,6 @@ - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - path: output/gatk4/test.segmentation.table - md5sum: 478cb4f69ec001944b9cd0e7e4de01ef + md5sum: 91f28bfe4727a3256810927fc5eba92f diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml index d3e6c537..d71059ad 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml +++ b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml @@ -6,4 +6,4 @@ files: - path: output/gatk4/test.pon.vcf.gz - path: output/gatk4/test.pon.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 diff --git a/tests/modules/gatk4/fastqtosam/test.yml b/tests/modules/gatk4/fastqtosam/test.yml index f6597b66..b576075a 100644 --- a/tests/modules/gatk4/fastqtosam/test.yml +++ b/tests/modules/gatk4/fastqtosam/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.bam - md5sum: 4967100b2e4912c0e4ce0976d946bafb + md5sum: 0a0d308b219837977b8df9daa26db7de - name: gatk4 fastqtosam test_gatk4_fastqtosam_paired_end command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c tests/config/nextflow.config @@ -14,4 +14,4 @@ - gatk4/fastqtosam files: - path: output/gatk4/test.bam - md5sum: 4967100b2e4912c0e4ce0976d946bafb + md5sum: 0a0d308b219837977b8df9daa26db7de diff --git a/tests/modules/gatk4/filtermutectcalls/test.yml b/tests/modules/gatk4/filtermutectcalls/test.yml index b17a306c..d5b97d36 100644 --- a/tests/modules/gatk4/filtermutectcalls/test.yml +++ b/tests/modules/gatk4/filtermutectcalls/test.yml @@ -8,7 +8,7 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_with_files command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c tests/config/nextflow.config @@ -20,7 +20,7 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_use_val command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c tests/config/nextflow.config @@ -32,4 +32,4 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 diff --git a/tests/modules/gatk4/getpileupsummaries/test.yml b/tests/modules/gatk4/getpileupsummaries/test.yml index 88cca794..6c5e1f84 100644 --- a/tests/modules/gatk4/getpileupsummaries/test.yml +++ b/tests/modules/gatk4/getpileupsummaries/test.yml @@ -5,7 +5,7 @@ - gatk4/getpileupsummaries files: - path: output/gatk4/test.pileups.table - md5sum: 00f92a8f7282d6129f1aca04e2c7d968 + md5sum: 0d19674bef2ff0700d5b02b3463dd210 - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_separate_sites command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c tests/config/nextflow.config @@ -14,4 +14,4 @@ - gatk4/getpileupsummaries files: - path: output/gatk4/test.pileups.table - md5sum: 00f92a8f7282d6129f1aca04e2c7d968 + md5sum: 0d19674bef2ff0700d5b02b3463dd210 diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index 66921e45..f4345bc4 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -7,7 +7,7 @@ - path: output/gatk4/test.bai md5sum: e9c125e82553209933883b4fe2b8d7c2 - path: output/gatk4/test.bam - md5sum: bda9a7bf5057f2288ed70be3eb8a753f + md5sum: f94271007c1ec8e56adfdd8e45a07bd0 - path: output/gatk4/test.metrics - name: gatk4 markduplicates test_gatk4_markduplicates_multiple_bams @@ -17,7 +17,7 @@ - gatk4 files: - path: output/gatk4/test.bai - md5sum: d12be29abba5865b7da0cd23f1a84e86 + md5sum: bad71df9c876e72a5bc0a3e0fd755f92 - path: output/gatk4/test.bam - md5sum: e988925ed850f8d9d966aa6689ae57de + md5sum: e0462bd4fe2cf4beda71e1bd2c66235b - path: output/gatk4/test.metrics diff --git a/tests/modules/gatk4/mergebamalignment/test.yml b/tests/modules/gatk4/mergebamalignment/test.yml index 190a9391..4fb98e3d 100644 --- a/tests/modules/gatk4/mergebamalignment/test.yml +++ b/tests/modules/gatk4/mergebamalignment/test.yml @@ -5,4 +5,4 @@ - gatk4/mergebamalignment files: - path: output/gatk4/test.bam - md5sum: bd4a5e2ea916826aadebb5878333e26f + md5sum: e6f1b343700b7ccb94e81ae127433988 diff --git a/tests/modules/gatk4/mergevcfs/test.yml b/tests/modules/gatk4/mergevcfs/test.yml index 4458f969..884738b0 100644 --- a/tests/modules/gatk4/mergevcfs/test.yml +++ b/tests/modules/gatk4/mergevcfs/test.yml @@ -5,7 +5,7 @@ - gatk4 files: - path: output/gatk4/test.vcf.gz - md5sum: ff48f175e26db2d4b2957762f6d1c715 + md5sum: 5b289bda88d3a3504f2e19ee8cff177c - name: gatk4 mergevcfs test_gatk4_mergevcfs_refdict command: nextflow run tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_refdict -c tests/config/nextflow.config @@ -14,4 +14,4 @@ - gatk4 files: - path: output/gatk4/test.vcf.gz - md5sum: ff48f175e26db2d4b2957762f6d1c715 + md5sum: 5b289bda88d3a3504f2e19ee8cff177c diff --git a/tests/modules/gatk4/revertsam/test.yml b/tests/modules/gatk4/revertsam/test.yml index 3f0969c4..c65d3666 100644 --- a/tests/modules/gatk4/revertsam/test.yml +++ b/tests/modules/gatk4/revertsam/test.yml @@ -5,4 +5,4 @@ - gatk4/revertsam files: - path: output/gatk4/test.reverted.bam - md5sum: f778310b18b83b49929eb648594f96dc + md5sum: f783a88deb45c3a2c20ca12cbe1c5652 diff --git a/tests/modules/gatk4/splitncigarreads/test.yml b/tests/modules/gatk4/splitncigarreads/test.yml index d6827db9..146cd329 100644 --- a/tests/modules/gatk4/splitncigarreads/test.yml +++ b/tests/modules/gatk4/splitncigarreads/test.yml @@ -5,4 +5,4 @@ - gatk4/splitncigarreads files: - path: output/gatk4/test.bam - md5sum: 8d05a41f9467e62d3fc1bc725f0869ec + md5sum: bfe6d04a4072f97fdb97dbc502c9d3e2 diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index 7c9e7ac0..a4478044 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -31,7 +31,7 @@ contains: - "FORMAT= Date: Thu, 11 Nov 2021 14:19:26 +0000 Subject: [PATCH 023/101] Update module: `pbccs` (#1018) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 📦 NEW: First commit of pbccs module * 👌 IMPROVE: Remove option from command + rename output (ccs -> bam) * 👌 IMPROVE: Move .pbi output into report channel * 🐛FIX: Correct code after --rq option removal from command line module - module main.nf: Remove ramaining rq input channel - Test main.nf: Transfert rq into addParams - Test test.yml: Update md5sums * 🐛FIX: Repair additionnal option usage * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: CCS is run in parallel with --chunk option * 👌 IMPROVE: Add Pbindex in bam ouput channel * 👌 IMPROVE: Change label to process_low * 👌 IMPROVE: Define reports files names + add json version of txt report * 🐛 FIX: Add missing backslashes * 🐛 FIX: Add missing gz extension * 🐛 FIX: update ouput channel * 🐛 FIX: output file name * 👌 IMPROVE: .gitignore * 👌 IMPROVE: Update function.nf to last version * 👌 IMPROVE: Update saveAs in main.nf * 👌 IMPROVE: Add pbccs module * 🐛 FIX: Fix Broken test * 👌 IMPROVE: Update test_data.config * 🐛 FIX: Fix test * 👌 IMPROVE: Update path of test dataset files * 👌 IMPROVE: Remove useless index + Fix Typos * 🐛 FIX: fill contains args * 👌 IMPROVE: One output => One Channel * 👌 IMPROVE: One input => One channel * 🐛 FIX: Update tests * 🐛 FIX: Remove TODOs from test.yaml * 👌 IMPROVE: Revert and keep bam and pbi together * 🐛 FIX: Remove old rq input from meta.yml * 👌 IMPROVE: Update test to match input channels * 👌 IMPROVE: use prefix for for output file name * 👌 IMPROVE: Update to new versions.yml * 👌 IMPROVE: Update pbccs from v6.0.0 to v6.0.2 * 👌 IMPROVE: Keep track of the former sample id in meta * Update modules/pbccs/main.nf Co-authored-by: Harshil Patel * 👌 IMPROVE: remove former_id from meta * 👌 IMPROVE: Use chunk number in output filename * 🐛 FIX: Update meta.yml * 🐛 FIX: Update reports filenames with chunk number. Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/pbccs/main.nf | 6 ++--- modules/pbccs/meta.yml | 22 +++++++++++++++--- ...t_versions_yml.cpython-39-pytest-6.2.5.pyc | Bin 0 -> 3558 bytes tests/modules/pbccs/test.yml | 10 ++++---- 4 files changed, 27 insertions(+), 11 deletions(-) create mode 100644 tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 7e70ac14..55eacd76 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -37,9 +37,9 @@ process PBCCS { ccs \\ $bam \\ ${prefix}.chunk${chunk_num}.bam \\ - --report-file ${prefix}.report.txt \\ - --report-json ${prefix}.report.json \\ - --metrics-json ${prefix}.metrics.json.gz \\ + --report-file ${prefix}.chunk${chunk_num}.report.txt \\ + --report-json ${prefix}.chunk${chunk_num}.report.json \\ + --metrics-json ${prefix}.chunk${chunk_num}.metrics.json.gz \\ --chunk $chunk_num/$chunk_on \\ -j $task.cpus \\ $options.args diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml index 38f31496..f55c0d71 100644 --- a/modules/pbccs/meta.yml +++ b/modules/pbccs/meta.yml @@ -42,10 +42,26 @@ output: type: file description: File containing software versions pattern: "versions.yml" - - css: + - bam: type: file - description: Consensus sequences - pattern: "*.ccs.bam" + description: CCS sequences in bam format + pattern: "*.bam" + - pbi: + type: file + description: PacBio Index of CCS sequences + pattern: "*.pbi" + - report_txt: + type: file + description: Summary of CCS in txt format + pattern: ".txt" + - report_json: + type: file + description: Summary of CCS in txt json + pattern: ".json" + - metrics: + type: file + description: Metrics about zmws + pattern: "*.json.gz" authors: - "@sguizard" diff --git a/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc b/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33acb8369a1bc62b5e66e1ed80e2247dd0e2759f GIT binary patch literal 3558 zcmaJ@TaVku73T0FQCdkWt=50fJ2%8xAU@;c_*j71tCg z&y04xD)|!Up)V=?+}g;h4+RSJp}(O&gXmj<0(mOXry?kto--7;v;&k9hjTgS%y%wx z=6LmbRfFfZUyVJ_*0jIjp!gUtco$v~KtnZ30msOtx zcHdT1m3~EitNki`jbLe9>(_wBq}G>Ozs~I4#_lq!Kh=Jt(Go3xsXZnACMEZ^j{Obn zsBLygYW~cVBlwmmqb!sIoX;N%I{4zG*n~!Ek~}55`k^t`5-rvaO-kk@Au+k9{qjxbpJ20feVU+IYv8?e?$%-E#Hn)VR1dVK5g8EOIwYxea|m~^}#a6)g)gcA+b zU_auIhC#G1oc)nM7&#+vk2#(rnULLsap3r&BS%b)g+qPL1~TFY-D=f==RxGq$Pv*c z2j-_-_K-ar?v6pkA zwLXeG%6Mub?#)hx>tLPKc3l+ex@ieSf{?V#nTUctmYO?3w4Is<-Z)6j0GQJXXC8GW zdm_{7d!AsK#EzAkBgR?(*yI5Bb8}lL$s#4g{*sypm_vUjwe$5qYPCkha|&Rsf(} z1YxX+8Z?^KAXQ@PtHdU}4dVg9*~N1VzDw}F53l&hz{rUq`Y>j8$)S!>L-jRH#SxjE zi6MGp@;iv!7shEV(IJLRh%n<*Equ#_SUUKeK->^bmSmY)u@01_7sgYF#V^P&v`}By zXc;3cHugwj#g<5=eORG(Yz+ym;QUaZn=n_6P1w0Amkw)lZBG-whq-!e%nARKT7_Py z(<)tht{>Nu63jJd4LIs2G}?$uK7rXXps){@6Z%*2w-!xl_&DfsB3@E@)E0VxerQLfhXuM+lZtH1)!05f zHV2=9CV(ieqeijyOc(DdX(vJ28b~Xm|0k^E=>Lrvj?kYvLa&RHNEO!sz0S`g74`xB zDb)w`^z?H>SwFLLDaQx<*AI`+HOMFb`b@Vuec(Dr&mU14LPx528K?|iQ}Un>RGHS`SvA)N*>5~AgT|{1kF4ezgEyM+w%}b! z+RyaIn^c3m`Q~OyT#n~$R*t4}GL^kqy_X+fOp&u~Oohc~tGK-7gpqWH(G-&3BAI4# zirKWtjs1-$6G*fS)1@5UcD82!*vc|)k?_ID7tX*7vz$Gi2GXAd3{tTtAhX<7Nwr9S z(Rr}QYCg>S(f z0T~~yAf(5ovk%!ZyHuhmyN$jtv|roUyyrgL_~ibFA8+38kZ()K?}r^SJN|WNX3Dpf5I(`sb z2IGu9iT?QM#wQ#1K7N1Jtlss6V4w|de*?CMoo;-CaROlVJ9hx+`T|+?SBs8wM^UeL z#08uI9Xi#-xs%P_L>U1KQ#%LMF;VC`KLJAdNoa6*bJka0j3$=@2G=d3%o)^$J)R1w zE>;ipV^6$+gey4;nFDtuKe(+7KjG1U39umg%ENnoAe~3-;1#uQWoq4;y$xGJcqq{j z44!lX2r{X}E;e`A*hdR>XWf@f1D^T+`ll@s4dp)E;YXKZ$KbD_&PR&RN}qp5-|k+V zwI62prV~!b+l<4p;?C@J2CNWV-eSG$%=k-au7K?`IM`eG<7z$!o&UM03mzYgs)=&d z^^UltdjBXLU1@A7C!o;5_1K`04o5_5YN<)4!|Mzp$$0FGiF6 z8dmuR&~{p>ojaWOU?N>}vuNpNn81Bq&h|MtCaoMzVR5bI_$A~i Date: Thu, 11 Nov 2021 14:57:55 +0000 Subject: [PATCH 024/101] Update: `gstama/collapse` (#1057) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add gd-tama module * 🐛 FIX (TEMP): Update singularity container address * 📦 NEW: Add bamtools module * 📦 NEW: Rewrite and rename module (gstama => gstama/collapse) * 👌 IMPROVE: ignore test data * 👌 IMPROVE: Remove junk files * 👌 IMPROVE: Update output * 👌 IMPROVE: Add channel for publishing tama's metadata outputs * 👌 IMPROVE: Update process label * 🐛 FIX: Use depot.galxyproject.org url for singularity * 👌 IMPROVE: autoselect running mode * 🐛 FIX: correct gstama collapse bash test * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update tama package and label * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update test * 👌 IMPROVE: Add some pacbio test files * 🐛 FIX: Add Pacbio index to test_data.config * 👌 IMPROVE: Re add 10000 data test * 👌 IMPROVE: Add some pbindex * 🐛 FIX: Add pbi extension to files * 📦 NEW: Add galgal6 chr30 test data * 📦 NEW: Add gd-tama module * 🐛 FIX (TEMP): Update singularity container address * 📦 NEW: Add bamtools module * 📦 NEW: Rewrite and rename module (gstama => gstama/collapse) * 👌 IMPROVE: ignore test data * 👌 IMPROVE: Update output * 👌 IMPROVE: Add channel for publishing tama's metadata outputs * 👌 IMPROVE: Update process label * 🐛 FIX: Use depot.galxyproject.org url for singularity * 👌 IMPROVE: autoselect running mode * 🐛 FIX: correct gstama collapse bash test * 👌 IMPROVE: Update to last templates version * 👌 IMPROVE: Update tama package and label * 👌 IMPROVE: Final version of test datasets config * 👌 IMPROVE: Remove useless index + Fix Typos * 👌 IMPROVE: Update test * 👌 IMPROVE: delete unnecessary files * 👌 IMPROVE: Update + clean - Remove unnecessary files - Update to new versions.yml file - Better output channels * 👌 IMPROVE: Update meta.yml and output channels * 👌 IMPROVE: Remove useless files * 👌 IMPROVE: Remove automatic MODE setup * 👌 IMPROVE: Applied @jfy133 code modification suggestions * Update modules/gstama/collapse/meta.yml Co-authored-by: James A. Fellows Yates * 🐛 FIX: Add missing fasta option in meta.yml * 🐛 FIX: Fix typo * 🐛 FIX: Update package version * Update main.nf * Update meta.yml * Update modules/gstama/collapse/meta.yml * Apply suggestions from code review * Update tests/modules/gstama/collapse/main.nf * Update main.nf * 👌 IMPROVE: Update to gs-tama 1.0.3 * Update modules/gstama/collapse/main.nf Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel --- modules/gstama/collapse/main.nf | 9 +++++---- tests/modules/gstama/collapse/test.yml | 4 +++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf index d4167b5e..8fc7877f 100644 --- a/modules/gstama/collapse/main.nf +++ b/modules/gstama/collapse/main.nf @@ -11,11 +11,12 @@ process GSTAMA_COLLAPSE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) + conda (params.enable_conda ? "bioconda::gs-tama=1.0.3" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0" } else { - container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" + container "quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0" + } input: @@ -23,7 +24,7 @@ process GSTAMA_COLLAPSE { path fasta output: - tuple val(meta), path("*.bed") , emit: bed + tuple val(meta), path("*_collapsed.bed") , emit: bed tuple val(meta), path("*_trans_read.bed") , emit: bed_trans_reads tuple val(meta), path("*_local_density_error.txt"), emit: local_density_error tuple val(meta), path("*_polya.txt") , emit: polya diff --git a/tests/modules/gstama/collapse/test.yml b/tests/modules/gstama/collapse/test.yml index 98de6bb3..3815a156 100644 --- a/tests/modules/gstama/collapse/test.yml +++ b/tests/modules/gstama/collapse/test.yml @@ -4,7 +4,7 @@ - gstama - gstama/collapse files: - - path: output/gstama/test_tc.bed + - path: output/gstama/test_tc_collapsed.bed md5sum: e5105198ed970a33ae0ecaa7bff421d9 - path: output/gstama/test_tc_local_density_error.txt md5sum: b917ac1f14eccd590b6881a686f324d5 @@ -18,5 +18,7 @@ md5sum: 0ca1a32f33ef05242d897d913802554b - path: output/gstama/test_tc_trans_report.txt md5sum: 33a86c15ca2acce36b2a5962f4c1adc4 + - path: output/gstama/test_tc_varcov.txt + md5sum: 587fd899ff658eb66b1770a35283bfcb - path: output/gstama/test_tc_variants.txt md5sum: 5b1165e9f33faba4f7207013fc27257e From 94851901d548ee879d94ab4f4a2c2496bab04715 Mon Sep 17 00:00:00 2001 From: alexandregilardet <63741852+alexandregilardet@users.noreply.github.com> Date: Sat, 13 Nov 2021 15:09:14 +0000 Subject: [PATCH 025/101] add new module pmdtools/filter #847 (#963) * commit but won't be used because pmdtools should have a submodule * added submodule pmdtools/filter * removed pmdtools module created before deciding to design two submodules * oops forgot to remove a TODO * removed white space meta.yml, removed v in version and manually added submodule /filter to test * Update pytest_modules.yml * Update main.nf added split_cpus for multi-tools module resources * Update test.yml added .pmd extension to match modules/ main.nf * Update test.yml update md5sum * Update singularity and docker build in main.nf From build 4 to 5 in order to match the conda one * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pmdtools/filter/main.nf Co-authored-by: James A. Fellows Yates * Update main.nf adding samtools version we need both pmdtools and samtools versions * Update main.nf remove .pmd extension * Update test.yml md5sum Because file extension changed Co-authored-by: James A. Fellows Yates --- modules/pmdtools/filter/functions.nf | 78 ++++++++++++++++++++++++++ modules/pmdtools/filter/main.nf | 60 ++++++++++++++++++++ modules/pmdtools/filter/meta.yml | 55 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/pmdtools/filter/main.nf | 15 +++++ tests/modules/pmdtools/filter/test.yml | 8 +++ 6 files changed, 220 insertions(+) create mode 100644 modules/pmdtools/filter/functions.nf create mode 100644 modules/pmdtools/filter/main.nf create mode 100644 modules/pmdtools/filter/meta.yml create mode 100644 tests/modules/pmdtools/filter/main.nf create mode 100644 tests/modules/pmdtools/filter/test.yml diff --git a/modules/pmdtools/filter/functions.nf b/modules/pmdtools/filter/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/pmdtools/filter/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/pmdtools/filter/main.nf b/modules/pmdtools/filter/main.nf new file mode 100644 index 00000000..3e363a9c --- /dev/null +++ b/modules/pmdtools/filter/main.nf @@ -0,0 +1,60 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PMDTOOLS_FILTER { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::pmdtools=0.60" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/pmdtools:0.60--hdfd78af_5" + } else { + container "quay.io/biocontainers/pmdtools:0.60--hdfd78af_5" + } + + input: + tuple val(meta), path(bam), path (bai) + val(threshold) + path(reference) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def split_cpus = Math.floor(task.cpus/2) + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + if ("$bam" == "${prefix}.bam") error "[pmdtools/filter] Input and output names are the same, use the suffix option to disambiguate!" + //threshold and header flags activate filtering function of pmdtools + """ + samtools \\ + calmd \\ + $bam \\ + $reference \\ + $options.args \\ + -@ ${split_cpus} \\ + | pmdtools \\ + --threshold $threshold \\ + --header \\ + $options.args2 \\ + | samtools \\ + view \\ + $options.args3 \\ + -Sb \\ + - \\ + -@ ${split_cpus} \\ + -o ${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + pmdtools: \$( pmdtools --version | cut -f2 -d ' ' | sed 's/v//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/pmdtools/filter/meta.yml b/modules/pmdtools/filter/meta.yml new file mode 100644 index 00000000..72abbfdc --- /dev/null +++ b/modules/pmdtools/filter/meta.yml @@ -0,0 +1,55 @@ +name: pmdtools_filter +description: pmdtools command to filter ancient DNA molecules from others +keywords: + - pmdtools + - aDNA + - filter + - damage +tools: + - pmdtools: + description: Compute postmortem damage patterns and decontaminate ancient genomes + homepage: https://github.com/pontussk/PMDtools + documentation: https://github.com/pontussk/PMDtools + tool_dev_url: https://github.com/pontussk/PMDtools + doi: "10.1073/pnas.1318934111" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bai" + - threshold: + type: value + description: Post-mortem damage score threshold + - reference: + type: file + description: FASTA file + pattern: "*.{fa,fasta}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Filtered BAM file + pattern: "*.bam" + +authors: + - "@alexandregilardet" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8c169fcd..22a3edf5 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -956,6 +956,10 @@ plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** +pmdtools/filter: + - modules/pmdtools/filter/** + - tests/modules/pmdtools/filter/** + porechop: - modules/porechop/** - tests/modules/porechop/** diff --git a/tests/modules/pmdtools/filter/main.nf b/tests/modules/pmdtools/filter/main.nf new file mode 100644 index 00000000..c4832bbb --- /dev/null +++ b/tests/modules/pmdtools/filter/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PMDTOOLS_FILTER } from '../../../../modules/pmdtools/filter/main.nf' addParams( options: [:] ) + +workflow test_pmdtools_filter { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ]] + threshold = 3 + reference = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] + PMDTOOLS_FILTER ( input, threshold, reference ) +} diff --git a/tests/modules/pmdtools/filter/test.yml b/tests/modules/pmdtools/filter/test.yml new file mode 100644 index 00000000..9171b02e --- /dev/null +++ b/tests/modules/pmdtools/filter/test.yml @@ -0,0 +1,8 @@ +- name: pmdtools filter + command: nextflow run ./tests/modules/pmdtools/filter -entry test_pmdtools_filter -c tests/config/nextflow.config + tags: + - pmdtools + - pmdtools/filter + files: + - path: output/pmdtools/test.bam + md5sum: 0fa64cb87d0439d4482938a4b6990b9d From 9475960928a3ba49624b49ef2b48438a4696ed0f Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 06:26:01 -0500 Subject: [PATCH 026/101] Bwa index (#1040) * fix a bug that the prefix is not handled by index. * build the test.yml Co-authored-by: Harshil Patel --- modules/bwa/index/main.nf | 4 ++-- tests/modules/bwa/index/test.yml | 20 ++++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index 479431ed..db1911cb 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -31,8 +31,8 @@ process BWA_INDEX { bwa \\ index \\ $options.args \\ - $fasta \\ - -p bwa/${fasta.baseName} + -p bwa/${fasta.baseName} \\ + $fasta cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/tests/modules/bwa/index/test.yml b/tests/modules/bwa/index/test.yml index cdcb5e53..3fe8663d 100644 --- a/tests/modules/bwa/index/test.yml +++ b/tests/modules/bwa/index/test.yml @@ -1,16 +1,16 @@ -- name: bwa index - command: nextflow run ./tests/modules/bwa/index -entry test_bwa_index -c tests/config/nextflow.config +- name: bwa index test_bwa_index + command: nextflow run tests/modules/bwa/index -entry test_bwa_index -c tests/config/nextflow.config tags: - bwa - bwa/index files: - - path: ./output/bwa/bwa/genome.bwt - md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/bwa/bwa/genome.amb + - path: output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/bwa/bwa/genome.sa - md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - - path: ./output/bwa/bwa/genome.pac - md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/bwa/bwa/genome.ann + - path: output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 From 171a2a2dbf6008cf00b0aac6b981f182aba968c7 Mon Sep 17 00:00:00 2001 From: Daniel Straub <42973691+d4straub@users.noreply.github.com> Date: Mon, 15 Nov 2021 12:48:56 +0100 Subject: [PATCH 027/101] Unicycler with long read input (#1041) * Unicycler with long read input * tests and md5sums * remove unstable md5sums * Update modules/unicycler/main.nf Co-authored-by: Harshil Patel --- modules/unicycler/main.nf | 18 +++++++++++------- modules/unicycler/meta.yml | 13 +++++++++---- tests/modules/unicycler/main.nf | 18 +++++++++++++++--- tests/modules/unicycler/test.yml | 27 +++++++++++++++++++-------- 4 files changed, 54 insertions(+), 22 deletions(-) diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 2f7c49d6..3629d730 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -19,26 +19,30 @@ process UNICYCLER { } input: - tuple val(meta), path(reads) + tuple val(meta), path(shortreads), path(longreads) output: - tuple val(meta), path('*.scaffolds.fa'), emit: scaffolds - tuple val(meta), path('*.assembly.gfa'), emit: gfa - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path('*.scaffolds.fa.gz'), emit: scaffolds + tuple val(meta), path('*.assembly.gfa.gz'), emit: gfa + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" + def short_reads = shortreads ? ( meta.single_end ? "-s $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" ) : "" + def long_reads = longreads ? "-l $longreads" : "" """ unicycler \\ --threads $task.cpus \\ $options.args \\ - $input_reads \\ + $short_reads \\ + $long_reads \\ --out ./ mv assembly.fasta ${prefix}.scaffolds.fa + gzip -n ${prefix}.scaffolds.fa mv assembly.gfa ${prefix}.assembly.gfa + gzip -n ${prefix}.assembly.gfa mv unicycler.log ${prefix}.unicycler.log cat <<-END_VERSIONS > versions.yml diff --git a/modules/unicycler/meta.yml b/modules/unicycler/meta.yml index e3b1aab9..b04ac882 100644 --- a/modules/unicycler/meta.yml +++ b/modules/unicycler/meta.yml @@ -19,11 +19,15 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - reads: + - shortreads: type: file description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, + List of input Illumina FastQ files of size 1 and 2 for single-end and paired-end data, respectively. + - longreads: + type: file + description: | + List of input FastQ files of size 1, PacBio or Nanopore long reads. output: - meta: type: map @@ -37,11 +41,11 @@ output: - scaffolds: type: file description: Fasta file containing scaffolds - pattern: "*.{scaffolds.fa}" + pattern: "*.{scaffolds.fa.gz}" - gfa: type: file description: gfa file containing assembly - pattern: "*.{assembly.gfa}" + pattern: "*.{assembly.gfa.gz}" - log: type: file description: unicycler log file @@ -53,3 +57,4 @@ output: authors: - "@JoseEspinosa" - "@drpatelh" + - "@d4straub" diff --git a/tests/modules/unicycler/main.nf b/tests/modules/unicycler/main.nf index 993310a1..5352fc8b 100644 --- a/tests/modules/unicycler/main.nf +++ b/tests/modules/unicycler/main.nf @@ -6,7 +6,8 @@ include { UNICYCLER } from '../../../modules/unicycler/main.nf' addParams( optio workflow test_unicycler_single_end { input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true) ], + [] ] UNICYCLER ( input ) @@ -14,8 +15,19 @@ workflow test_unicycler_single_end { workflow test_unicycler_paired_end { input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], + [] + ] + + UNICYCLER ( input ) +} + +workflow test_unicycler_shortreads_longreads { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['bacteroides_fragilis']['nanopore']['test_fastq_gz'], checkIfExists: true) ] ] UNICYCLER ( input ) diff --git a/tests/modules/unicycler/test.yml b/tests/modules/unicycler/test.yml index f12cc1ba..124ac3e2 100644 --- a/tests/modules/unicycler/test.yml +++ b/tests/modules/unicycler/test.yml @@ -1,21 +1,32 @@ -- name: unicycler single-end - command: nextflow run ./tests/modules/unicycler -entry test_unicycler_single_end -c tests/config/nextflow.config +- name: unicycler test_unicycler_single_end + command: nextflow run tests/modules/unicycler -entry test_unicycler_single_end -c tests/config/nextflow.config tags: - unicycler files: - - path: output/unicycler/test.scaffolds.fa - - path: output/unicycler/test.assembly.gfa + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz - path: output/unicycler/test.unicycler.log contains: - "Assembly complete" -- name: unicycler paired-end - command: nextflow run ./tests/modules/unicycler -entry test_unicycler_paired_end -c tests/config/nextflow.config +- name: unicycler test_unicycler_paired_end + command: nextflow run tests/modules/unicycler -entry test_unicycler_paired_end -c tests/config/nextflow.config tags: - unicycler files: - - path: output/unicycler/test.scaffolds.fa - - path: output/unicycler/test.assembly.gfa + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz + - path: output/unicycler/test.unicycler.log + contains: + - "Assembly complete" + +- name: unicycler test_unicycler_shortreads_longreads + command: nextflow run tests/modules/unicycler -entry test_unicycler_shortreads_longreads -c tests/config/nextflow.config + tags: + - unicycler + files: + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz - path: output/unicycler/test.unicycler.log contains: - "Assembly complete" From 661bdb645eb6dfef3dc3dd1a998bab5ce6f0706e Mon Sep 17 00:00:00 2001 From: Daniel Straub <42973691+d4straub@users.noreply.github.com> Date: Mon, 15 Nov 2021 12:53:07 +0100 Subject: [PATCH 028/101] Change Spades input (#1039) * Change spades module * correct meta map description * adjust memory handling * remove trailing whitespace * fix hmm input * Update modules/spades/main.nf Co-authored-by: Harshil Patel --- modules/spades/main.nf | 35 +++++++++++++-------- modules/spades/meta.yml | 22 ++++++++++++-- tests/modules/spades/main.nf | 33 ++++++++++++++++++-- tests/modules/spades/test.yml | 57 ++++++++++++++++++++++++++--------- 4 files changed, 115 insertions(+), 32 deletions(-) diff --git a/modules/spades/main.nf b/modules/spades/main.nf index c21066e2..836efbda 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -19,46 +19,57 @@ process SPADES { } input: - tuple val(meta), path(reads) + tuple val(meta), path(illumina), path(pacbio), path(nanopore) path hmm output: - tuple val(meta), path('*.scaffolds.fa') , optional:true, emit: scaffolds - tuple val(meta), path('*.contigs.fa') , optional:true, emit: contigs - tuple val(meta), path('*.transcripts.fa') , optional:true, emit: transcripts - tuple val(meta), path('*.gene_clusters.fa'), optional:true, emit: gene_clusters - tuple val(meta), path('*.assembly.gfa') , optional:true, emit: gfa - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path('*.scaffolds.fa.gz') , optional:true, emit: scaffolds + tuple val(meta), path('*.contigs.fa.gz') , optional:true, emit: contigs + tuple val(meta), path('*.transcripts.fa.gz') , optional:true, emit: transcripts + tuple val(meta), path('*.gene_clusters.fa.gz'), optional:true, emit: gene_clusters + tuple val(meta), path('*.assembly.gfa.gz') , optional:true, emit: gfa + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" - def custom_hmms = params.spades_hmm ? "--custom-hmms $hmm" : "" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def maxmem = task.memory.toGiga() + def illumina_reads = illumina ? ( meta.single_end ? "-s $illumina" : "-1 ${illumina[0]} -2 ${illumina[1]}" ) : "" + def pacbio_reads = pacbio ? "--pacbio $pacbio" : "" + def nanopore_reads = nanopore ? "--nanopore $nanopore" : "" + def custom_hmms = hmm ? "--custom-hmms $hmm" : "" """ spades.py \\ $options.args \\ --threads $task.cpus \\ + --memory $maxmem \\ $custom_hmms \\ - $input_reads \\ + $illumina_reads \\ + $pacbio_reads \\ + $nanopore_reads \\ -o ./ mv spades.log ${prefix}.spades.log if [ -f scaffolds.fasta ]; then mv scaffolds.fasta ${prefix}.scaffolds.fa + gzip -n ${prefix}.scaffolds.fa fi if [ -f contigs.fasta ]; then mv contigs.fasta ${prefix}.contigs.fa + gzip -n ${prefix}.contigs.fa fi if [ -f transcripts.fasta ]; then mv transcripts.fasta ${prefix}.transcripts.fa + gzip -n ${prefix}.transcripts.fa fi if [ -f assembly_graph_with_scaffolds.gfa ]; then mv assembly_graph_with_scaffolds.gfa ${prefix}.assembly.gfa + gzip -n ${prefix}.assembly.gfa fi if [ -f gene_clusters.fasta ]; then mv gene_clusters.fasta ${prefix}.gene_clusters.fa + gzip -n ${prefix}.gene_clusters.fa fi cat <<-END_VERSIONS > versions.yml diff --git a/modules/spades/meta.yml b/modules/spades/meta.yml index 3d5943ae..b6878d3d 100644 --- a/modules/spades/meta.yml +++ b/modules/spades/meta.yml @@ -20,11 +20,20 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - reads: + - illumina: type: file description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. + List of input FastQ (Illumina or PacBio CCS reads) files + of size 1 and 2 for single-end and paired-end data, + respectively. This input data type is required. + - pacbio: + type: file + description: | + List of input PacBio CLR FastQ files of size 1. + - nanopore: + type: file + description: | + List of input FastQ files of size 1, originating from Oxford Nanopore technology. - hmm: type: file description: @@ -39,26 +48,32 @@ output: type: file description: | Fasta file containing scaffolds + pattern: "*.fa.gz" - contigs: type: file description: | Fasta file containing contigs + pattern: "*.fa.gz" - transcripts: type: file description: | Fasta file containing transcripts + pattern: "*.fa.gz" - gene_clusters: type: file description: | Fasta file containing gene_clusters + pattern: "*.fa.gz" - gfa: type: file description: | gfa file containing assembly + pattern: "*.gfa.gz" - log: type: file description: | Spades log file + pattern: "*.log" - versions: type: file description: File containing software versions @@ -67,3 +82,4 @@ output: authors: - "@JoseEspinosa" - "@drpatelh" + - "@d4straub" diff --git a/tests/modules/spades/main.nf b/tests/modules/spades/main.nf index a8518a0e..b09a4266 100644 --- a/tests/modules/spades/main.nf +++ b/tests/modules/spades/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { SPADES } from '../../../modules/spades/main.nf' addParams( spades_hmm: false ,options: ['args': '--rnaviral'] ) +include { SPADES } from '../../../modules/spades/main.nf' addParams( options: ['args': '--rnaviral'] ) workflow test_spades_single_end { input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [] ] SPADES ( input, [] ) } @@ -14,7 +16,32 @@ workflow test_spades_single_end { workflow test_spades_paired_end { input = [ [ id:'test', single_end:false ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [] + ] + + SPADES ( input, [] ) +} + +workflow test_spades_illumina_nanopore { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + ] + + SPADES ( input, [] ) +} + +// that isnt perfect, because CCS reads should rather be used with -s instead of --pacbio +workflow test_spades_illumina_pacbio { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['pacbio']['ccs_fq_gz'], checkIfExists: true) ], + [] ] SPADES ( input, [] ) diff --git a/tests/modules/spades/test.yml b/tests/modules/spades/test.yml index 35beb1a7..a400e79d 100644 --- a/tests/modules/spades/test.yml +++ b/tests/modules/spades/test.yml @@ -1,23 +1,52 @@ -- name: spades single end - command: nextflow run ./tests/modules/spades -entry test_spades_single_end -c tests/config/nextflow.config +- name: spades test_spades_single_end + command: nextflow run tests/modules/spades -entry test_spades_single_end -c tests/config/nextflow.config tags: - spades files: - - path: output/spades/test.assembly.gfa - md5sum: a995d1d413031534180d2b3b715fa921 - - path: output/spades/test.contigs.fa - md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 - - path: output/spades/test.scaffolds.fa - md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 + - path: output/spades/test.assembly.gfa.gz + md5sum: e5eab229363a906954a07df00e2495a6 + - path: output/spades/test.contigs.fa.gz + md5sum: 64f6b339872b934138c6efd6baa445f4 + - path: output/spades/test.scaffolds.fa.gz + md5sum: 64f6b339872b934138c6efd6baa445f4 - path: output/spades/test.spades.log -- name: spades paired end - command: nextflow run ./tests/modules/spades -entry test_spades_paired_end -c tests/config/nextflow.config +- name: spades test_spades_paired_end + command: nextflow run tests/modules/spades -entry test_spades_paired_end -c tests/config/nextflow.config tags: - spades files: - - path: output/spades/test.assembly.gfa - md5sum: bb053ef4e9250829c980ca17fbdbe3e9 - - path: output/spades/test.contigs.fa - md5sum: 4476d409da70d9f7fc2aa8f25bbaf7fd + - path: output/spades/test.assembly.gfa.gz + md5sum: c8614fb69907ae832a1359a054af240f + - path: output/spades/test.contigs.fa.gz + md5sum: eab5165b3cda96c235aaa1388010cb27 - path: output/spades/test.spades.log + - path: output/spades/warnings.log + +- name: spades test_spades_illumina_nanopore + command: nextflow run tests/modules/spades -entry test_spades_illumina_nanopore -c tests/config/nextflow.config + tags: + - spades + files: + - path: output/spades/test.assembly.gfa.gz + md5sum: e438534f14e107f005efdd659adeba6a + - path: output/spades/test.contigs.fa.gz + md5sum: 027b0e54bfd8f4bc359e751e094133ef + - path: output/spades/test.scaffolds.fa.gz + md5sum: 027b0e54bfd8f4bc359e751e094133ef + - path: output/spades/test.spades.log + - path: output/spades/warnings.log + +- name: spades test_spades_illumina_pacbio + command: nextflow run tests/modules/spades -entry test_spades_illumina_pacbio -c tests/config/nextflow.config + tags: + - spades + files: + - path: output/spades/test.assembly.gfa.gz + md5sum: e12aaf83d8dbfc313339b7636ba43447 + - path: output/spades/test.contigs.fa.gz + md5sum: 78523f66d34ac4d5a4890f353c1a6ec6 + - path: output/spades/test.scaffolds.fa.gz + md5sum: 78523f66d34ac4d5a4890f353c1a6ec6 + - path: output/spades/test.spades.log + - path: output/spades/warnings.log From c48244b677d597cf1dbf15bd4a3354b43b4d585d Mon Sep 17 00:00:00 2001 From: Ilya Pletenev <56674821+i-pletenev@users.noreply.github.com> Date: Mon, 15 Nov 2021 15:51:40 +0300 Subject: [PATCH 029/101] Add new module 'ataqv/ataqv' (#998) * Add new module 'ataqv/ataqv' * Update main.nf * Update main.nf Co-authored-by: Harshil Patel --- modules/ataqv/ataqv/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/ataqv/ataqv/main.nf | 56 +++++++++++++++++++++ modules/ataqv/ataqv/meta.yml | 66 +++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ataqv/ataqv/main.nf | 69 ++++++++++++++++++++++++++ tests/modules/ataqv/ataqv/test.yml | 51 +++++++++++++++++++ 6 files changed, 324 insertions(+) create mode 100644 modules/ataqv/ataqv/functions.nf create mode 100644 modules/ataqv/ataqv/main.nf create mode 100644 modules/ataqv/ataqv/meta.yml create mode 100644 tests/modules/ataqv/ataqv/main.nf create mode 100644 tests/modules/ataqv/ataqv/test.yml diff --git a/modules/ataqv/ataqv/functions.nf b/modules/ataqv/ataqv/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ataqv/ataqv/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ataqv/ataqv/main.nf b/modules/ataqv/ataqv/main.nf new file mode 100644 index 00000000..5ddade28 --- /dev/null +++ b/modules/ataqv/ataqv/main.nf @@ -0,0 +1,56 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ATAQV_ATAQV { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ataqv=1.2.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ataqv:1.2.1--py39ha23c084_2" + } else { + container "quay.io/biocontainers/ataqv:1.2.1--py36hfdecbe1_2" + } + + input: + tuple val(meta), path(bam), path(bai), path(peak_file) + val organism + path tss_file + path excl_regs_file + path autosom_ref_file + + output: + tuple val(meta), path("*.ataqv.json"), emit: json + tuple val(meta), path("*.problems") , emit: problems, optional: true + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def peak = peak_file ? "--peak-file $peak_file" : '' + def tss = tss_file ? "--tss-file $tss_file" : '' + def excl_regs = excl_regs_file ? "--excluded-region-file $excl_regs_file" : '' + def autosom_ref = autosom_ref_file ? "--autosomal-reference-file $autosom_ref_file" : '' + """ + ataqv \\ + $options.args \\ + $peak \\ + $tss \\ + $excl_regs \\ + $autosom_ref \\ + --metrics-file "${prefix}.ataqv.json" \\ + --threads $task.cpus \\ + --name $prefix \\ + $organism \\ + $bam + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( ataqv --version ) + END_VERSIONS + """ +} diff --git a/modules/ataqv/ataqv/meta.yml b/modules/ataqv/ataqv/meta.yml new file mode 100644 index 00000000..760bf95f --- /dev/null +++ b/modules/ataqv/ataqv/meta.yml @@ -0,0 +1,66 @@ +name: ataqv_ataqv +description: ataqv function of a corresponding ataqv tool +keywords: + - ataqv +tools: + - ataqv: + description: ataqv is a toolkit for measuring and comparing ATAC-seq results. It was written to help understand how well ATAC-seq assays have worked, and to make it easier to spot differences that might be caused by library prep or sequencing. + homepage: https://github.com/ParkerLab/ataqv/blob/master/README.rst + documentation: https://github.com/ParkerLab/ataqv/blob/master/README.rst + tool_dev_url: https://github.com/ParkerLab/ataqv + doi: "https://doi.org/10.1016/j.cels.2020.02.009" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file with the same prefix as bam file. Required if tss_file input is provided. + pattern: "*.bam.bai" + - peak_file: + type: file + description: A BED file of peaks called for alignments in the BAM file + pattern: "*.bed" + - organism: + type: string + description: The subject of the experiment, which determines the list of autosomes (see "Reference Genome Configuration" section at https://github.com/ParkerLab/ataqv). + - tss_file: + type: file + description: A BED file of transcription start sites for the experiment organism. If supplied, a TSS enrichment score will be calculated according to the ENCODE data standards. This calculation requires that the BAM file of alignments be indexed. + pattern: "*.bed" + - excl_regs_file: + type: file + description: A BED file containing excluded regions. Peaks or TSS overlapping these will be ignored. + pattern: "*.bed" + - autosom_ref_file: + type: file + description: A file containing autosomal reference names, one per line. The names must match the reference names in the alignment file exactly, or the metrics based on counts of autosomal alignments will be wrong. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - json: + type: file + description: The JSON file to which metrics will be written. + - problems: + type: file + description: If given, problematic reads will be logged to a file per read group, with names derived from the read group IDs, with ".problems" appended. If no read groups are found, the reads will be written to one file named after the BAM file. + pattern: "*.problems" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@i-pletenev" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 22a3edf5..de72731b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -34,6 +34,10 @@ assemblyscan: - modules/assemblyscan/** - tests/modules/assemblyscan/** +ataqv/ataqv: + - modules/ataqv/ataqv/** + - tests/modules/ataqv/ataqv/** + bamaligncleaner: - modules/bamaligncleaner/** - tests/modules/bamaligncleaner/** diff --git a/tests/modules/ataqv/ataqv/main.nf b/tests/modules/ataqv/ataqv/main.nf new file mode 100644 index 00000000..2f2a62eb --- /dev/null +++ b/tests/modules/ataqv/ataqv/main.nf @@ -0,0 +1,69 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ATAQV_ATAQV } from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: [:] ) +include { ATAQV_ATAQV as ATAQV_ATAQV_PROBLEM_READS} from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: ['args': '--log-problematic-reads'] ) + +workflow test_ataqv_ataqv { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [], + [] + ] + + ATAQV_ATAQV ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_problem_reads { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [], + [] + ] + + ATAQV_ATAQV_PROBLEM_READS ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_peak { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + [], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] + + ATAQV_ATAQV ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_tss { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + tss_file = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + + ATAQV_ATAQV ( input, 'human', tss_file, [], [] ) +} + +workflow test_ataqv_ataqv_excluded_regs { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + tss_file = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + excl_regs_file = file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true) + + ATAQV_ATAQV ( input, 'human', tss_file, excl_regs_file, [] ) +} diff --git a/tests/modules/ataqv/ataqv/test.yml b/tests/modules/ataqv/ataqv/test.yml new file mode 100644 index 00000000..77452f6f --- /dev/null +++ b/tests/modules/ataqv/ataqv/test.yml @@ -0,0 +1,51 @@ +- name: ataqv ataqv test_ataqv_ataqv + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_problem_reads + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_problem_reads -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/1.problems + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_peak + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_peak -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_tss + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_tss -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_excluded_regs + command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_excluded_regs -c tests/config/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' From b012b349c1ef324f1e72fd474b941e2e249b8a65 Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Mon, 15 Nov 2021 13:55:18 +0000 Subject: [PATCH 030/101] New module: `plink2/vcf` (#1006) * implement plink2 VCF import * fix yaml indentation Co-authored-by: Chris Cheshire --- modules/plink2/vcf/functions.nf | 78 +++++++++++++++++++++++++++++++ modules/plink2/vcf/main.nf | 43 +++++++++++++++++ modules/plink2/vcf/meta.yml | 52 +++++++++++++++++++++ tests/modules/plink2/vcf/main.nf | 13 ++++++ tests/modules/plink2/vcf/test.yml | 12 +++++ 5 files changed, 198 insertions(+) create mode 100644 modules/plink2/vcf/functions.nf create mode 100644 modules/plink2/vcf/main.nf create mode 100644 modules/plink2/vcf/meta.yml create mode 100644 tests/modules/plink2/vcf/main.nf create mode 100644 tests/modules/plink2/vcf/test.yml diff --git a/modules/plink2/vcf/functions.nf b/modules/plink2/vcf/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/plink2/vcf/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf new file mode 100644 index 00000000..869a5587 --- /dev/null +++ b/modules/plink2/vcf/main.nf @@ -0,0 +1,43 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PLINK2_VCF { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::plink2=2.00a2.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1" + } else { + container "quay.io/biocontainers/plink2:2.00a2.3--h712d239_1" + } + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.pgen"), emit: pgen + tuple val(meta), path("*.psam"), emit: psam + tuple val(meta), path("*.pvar"), emit: pvar + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + plink2 \\ + $options.args \\ + --vcf $vcf \\ + --out ${prefix} + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) + END_VERSIONS + """ +} diff --git a/modules/plink2/vcf/meta.yml b/modules/plink2/vcf/meta.yml new file mode 100644 index 00000000..1b2f3a9b --- /dev/null +++ b/modules/plink2/vcf/meta.yml @@ -0,0 +1,52 @@ +name: plink2_vcf +description: Import variant genetic data using plink2 +keywords: + - plink2 + - import +tools: + - plink2: + description: | + Whole genome association analysis toolset, designed to perform a range + of basic, large-scale analyses in a computationally efficient manner + homepage: http://www.cog-genomics.org/plink/2.0/ + documentation: http://www.cog-genomics.org/plink/2.0/general_usage + tool_dev_url: None + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Variant calling file (vcf) + pattern: "*.{vcf}, *.{vcf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - pgen: + type: file + description: PLINK 2 binary genotype table + pattern: "*.{pgen}" + - psam: + type: file + description: PLINK 2 sample information file + pattern: "*.{psam}" + - pvar: + type: file + description: PLINK 2 variant information file + pattern: "*.{psam}" + +authors: + - "@nebfield" diff --git a/tests/modules/plink2/vcf/main.nf b/tests/modules/plink2/vcf/main.nf new file mode 100644 index 00000000..409e7995 --- /dev/null +++ b/tests/modules/plink2/vcf/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' addParams( options: [args:'--allow-extra-chr'] ) + +workflow test_plink2_vcf { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + + PLINK2_VCF ( input ) +} diff --git a/tests/modules/plink2/vcf/test.yml b/tests/modules/plink2/vcf/test.yml new file mode 100644 index 00000000..3f0cddc6 --- /dev/null +++ b/tests/modules/plink2/vcf/test.yml @@ -0,0 +1,12 @@ +- name: plink2 vcf test_plink2_vcf + command: nextflow run tests/modules/plink2/vcf -entry test_plink2_vcf -c tests/config/nextflow.config + tags: + - plink2/vcf + - plink2 + files: + - path: output/plink2/test.pgen + md5sum: d66d3cd4a6c9cca1a4073d7f4b277041 + - path: output/plink2/test.psam + md5sum: dc3b77d7753a7bed41734323e3549b10 + - path: output/plink2/test.pvar + md5sum: d61e53f847a6335138b584216b4e45d0 From 2c3c87a10fae962da73f2007a3041c7d581f66c6 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 15 Nov 2021 15:26:06 +0100 Subject: [PATCH 031/101] Add `leehom` module (#1052) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add leeHom module * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen * Update modules/leehom/main.nf Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry Co-authored-by: FriederikeHanssen --- modules/leehom/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/leehom/main.nf | 85 +++++++++++++++++++++++++++++++++ modules/leehom/meta.yml | 77 +++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 21 ++++---- tests/modules/leehom/main.nf | 36 ++++++++++++++ tests/modules/leehom/test.yml | 43 +++++++++++++++++ 6 files changed, 331 insertions(+), 9 deletions(-) create mode 100644 modules/leehom/functions.nf create mode 100644 modules/leehom/main.nf create mode 100644 modules/leehom/meta.yml create mode 100644 tests/modules/leehom/main.nf create mode 100644 tests/modules/leehom/test.yml diff --git a/modules/leehom/functions.nf b/modules/leehom/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/leehom/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/leehom/main.nf b/modules/leehom/main.nf new file mode 100644 index 00000000..e0d9ee39 --- /dev/null +++ b/modules/leehom/main.nf @@ -0,0 +1,85 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION="1.2.15" + +process LEEHOM { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::leehom=1.2.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/leehom:1.2.15--h29e30f7_1" + } else { + container "quay.io/biocontainers/leehom:1.2.15--h29e30f7_1" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("${prefix}.bam") , optional: true, emit: bam + tuple val(meta), path("${prefix}.fq.gz") , optional: true, emit: fq_pass + tuple val(meta), path("${prefix}.fail.fq.gz") , optional: true, emit: fq_fail + tuple val(meta), path("${prefix}_r1.fq.gz") , optional: true, emit: unmerged_r1_fq_pass + tuple val(meta), path("${prefix}_r1.fail.fq.gz"), optional: true, emit: unmerged_r1_fq_fail + tuple val(meta), path("${prefix}_r2.fq.gz") , optional: true, emit: unmerged_r2_fq_pass + tuple val(meta), path("${prefix}_r2.fail.fq.gz"), optional: true, emit: unmerged_r2_fq_fail + tuple val(meta), path("*.log") , emit: log + + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + if ( reads.toString().endsWith('.bam') ) { + """ + leeHom \\ + $options.args \\ + -t $task.cpus \\ + -o ${prefix}.bam \\ + --log ${prefix}.log \\ + $reads + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ + } else if ( meta.single_end ) { + """ + leeHom \\ + $options.args \\ + -t $task.cpus \\ + -fq1 $reads \\ + -fqo ${prefix} \\ + --log ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ + } else { + """ + leeHom \\ + $options.args \\ + -t $task.cpus \\ + -fq1 ${reads[0]} \\ + -fq2 ${reads[1]} \\ + -fqo ${prefix} \\ + --log ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ + } +} diff --git a/modules/leehom/meta.yml b/modules/leehom/meta.yml new file mode 100644 index 00000000..b0d6092a --- /dev/null +++ b/modules/leehom/meta.yml @@ -0,0 +1,77 @@ +name: leehom +description: Bayesian reconstruction of ancient DNA fragments +keywords: + - ancient DNA + - adapter removal + - clipping + - trimming + - merging + - collapsing + - preprocessing + - bayesian +tools: + - leehom: + description: Bayesian reconstruction of ancient DNA fragments + homepage: "https://grenaud.github.io/leeHom/" + documentation: "https://github.com/grenaud/leeHom" + tool_dev_url: "https://github.com/grenaud/leeHom" + doi: "10.1093/nar/gku699" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Unaligned BAM or one or two gzipped FASTQ file(s) + pattern: "*.{bam,fq.gz,fastq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: BAM file + pattern: "*.bam" + - fq_pass: + type: file + description: Trimmed and merged FASTQ + pattern: "*.fq.gz" + - fq_fail: + type: file + description: Failed trimmed and merged FASTQs + pattern: "*.fail.fq.gz" + - unmerged_r1_fq_pass: + type: file + description: Passed unmerged R1 FASTQs + pattern: "*.r1.fq.gz" + - unmerged_r1_fq_fail: + type: file + description: Failed unmerged R1 FASTQs + pattern: "*.r1.fail.fq.gz" + - unmerged_r2_fq_pass: + type: file + description: Passed unmerged R1 FASTQs + pattern: "*.r2.fq.gz" + - unmerged_r2_fq_pass: + type: file + description: Failed unmerged R1 FASTQs + pattern: "*.r2.fail.fq.gz" + - log: + type: file + description: Log file of command + pattern: "*.log" + + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index de72731b..e64e67f9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -621,14 +621,14 @@ homer/makeucscfile: - modules/homer/makeucscfile/** - tests/modules/homer/makeucscfile/** -imputeme/vcftoprs: - - modules/imputeme/vcftoprs/** - - tests/modules/imputeme/vcftoprs/** - idr: - modules/idr/** - tests/modules/idr/** +imputeme/vcftoprs: + - modules/imputeme/vcftoprs/** + - tests/modules/imputeme/vcftoprs/** + iqtree: - modules/iqtree/** - tests/modules/iqtree/** @@ -718,6 +718,10 @@ last/train: - modules/last/train/** - tests/modules/last/train/** +leehom: + - modules/leehom/** + - tests/modules/leehom/** + lima: - modules/lima/** - tests/modules/lima/** @@ -786,6 +790,10 @@ megahit: - modules/megahit/** - tests/modules/megahit/** +meningotype: + - modules/meningotype/** + - tests/modules/meningotype/** + metabat2/jgisummarizebamcontigdepths: - modules/metabat2/jgisummarizebamcontigdepths/** - tests/modules/metabat2/jgisummarizebamcontigdepths/** @@ -794,11 +802,6 @@ metabat2/metabat2: - modules/metabat2/metabat2/** - tests/modules/metabat2/metabat2/** -meningotype: - - modules/meningotype/** - - tests/modules/meningotype/** - - metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** diff --git a/tests/modules/leehom/main.nf b/tests/modules/leehom/main.nf new file mode 100644 index 00000000..2fe6f12f --- /dev/null +++ b/tests/modules/leehom/main.nf @@ -0,0 +1,36 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { LEEHOM } from '../../../modules/leehom/main.nf' addParams( options: [:] ) +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' addParams( options: [args: "-f4 -b"] ) + +workflow test_leehom_bam { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + + fasta = [] + + SAMTOOLS_VIEW ( input, fasta ) + LEEHOM ( SAMTOOLS_VIEW.out.bam ) +} + +workflow test_leehom_se_fq { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + + LEEHOM ( input ) +} + +workflow test_leehom_pe_fq { + + input = [ [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] ] + + LEEHOM ( input ) +} diff --git a/tests/modules/leehom/test.yml b/tests/modules/leehom/test.yml new file mode 100644 index 00000000..8a9f083e --- /dev/null +++ b/tests/modules/leehom/test.yml @@ -0,0 +1,43 @@ +- name: leehom test_leehom_bam + command: nextflow run tests/modules/leehom -entry test_leehom_bam -c tests/config/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.bam + md5sum: 19a1bf95714523868791f1d4d3aaee73 + - path: output/leehom/test.log + md5sum: d1f5da273eb69f41babda510797c7671 + - path: output/samtools/test.bam + md5sum: 25d13b3b31b147bb3836dea9932c38dd + +- name: leehom test_leehom_se_fq + command: nextflow run tests/modules/leehom -entry test_leehom_se_fq -c tests/config/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test.fq.gz + md5sum: ed10c4bbf5c3082ca68823535b91e1e2 + - path: output/leehom/test.log + md5sum: 59aa280cb72dfbea05ba913cb89db143 + +- name: leehom test_leehom_pe_fq + command: nextflow run tests/modules/leehom -entry test_leehom_pe_fq -c tests/config/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test.fq.gz + md5sum: 84929b78e3f89371ecd3b4c915b9ec33 + - path: output/leehom/test.log + md5sum: 800b5a88dc0822886bfbb271029e2a4a + - path: output/leehom/test_r1.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test_r1.fq.gz + md5sum: e9258420fa712e8536106995a7d1d97a + - path: output/leehom/test_r2.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test_r2.fq.gz + md5sum: 27230bcc5eae81ec5c1701798d39c1af From 7be60774b6562b27ee07f246ba293fc4bff074c4 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 07:35:55 -0700 Subject: [PATCH 032/101] add module for tbprofiler (#947) * add module for tbprofiler * Update test.yml * Update meta.yml Co-authored-by: Abhinav Sharma Co-authored-by: FriederikeHanssen --- modules/tbprofiler/profile/functions.nf | 78 +++++++++++++++++++++++ modules/tbprofiler/profile/main.nf | 48 ++++++++++++++ modules/tbprofiler/profile/meta.yml | 59 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/tbprofiler/profile/main.nf | 24 +++++++ tests/modules/tbprofiler/profile/test.yml | 21 ++++++ 6 files changed, 234 insertions(+) create mode 100644 modules/tbprofiler/profile/functions.nf create mode 100644 modules/tbprofiler/profile/main.nf create mode 100644 modules/tbprofiler/profile/meta.yml create mode 100644 tests/modules/tbprofiler/profile/main.nf create mode 100644 tests/modules/tbprofiler/profile/test.yml diff --git a/modules/tbprofiler/profile/functions.nf b/modules/tbprofiler/profile/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/tbprofiler/profile/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/tbprofiler/profile/main.nf b/modules/tbprofiler/profile/main.nf new file mode 100644 index 00000000..afd78b05 --- /dev/null +++ b/modules/tbprofiler/profile/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process TBPROFILER_PROFILE { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::tb-profiler=3.0.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/tb-profiler:3.0.8--pypyh5e36f6f_0" + } else { + container "quay.io/biocontainers/tb-profiler:3.0.8--pypyh5e36f6f_0" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("bam/*.bam") , emit: bam + tuple val(meta), path("results/*.csv") , emit: csv, optional: true + tuple val(meta), path("results/*.json"), emit: json + tuple val(meta), path("results/*.txt") , emit: txt, optional: true + tuple val(meta), path("vcf/*.vcf.gz") , emit: vcf + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def input_reads = meta.single_end ? "--read1 $reads" : "--read1 ${reads[0]} --read2 ${reads[1]}" + """ + tb-profiler \\ + profile \\ + $options.args \\ + --prefix ${prefix} \\ + --threads $task.cpus \\ + $input_reads + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(tb-profiler --version 2>&1) | sed 's/TBProfiler version //') + END_VERSIONS + """ +} diff --git a/modules/tbprofiler/profile/meta.yml b/modules/tbprofiler/profile/meta.yml new file mode 100644 index 00000000..0cac6d6b --- /dev/null +++ b/modules/tbprofiler/profile/meta.yml @@ -0,0 +1,59 @@ +name: tbprofiler_profile +description: A tool to detect resistance and lineages of M. tuberculosis genomes +keywords: + - Mycobacterium tuberculosis + - resistance + - serotype +tools: + - tbprofiler: + description: Profiling tool for Mycobacterium tuberculosis to detect drug resistance and lineage from WGS data + homepage: https://github.com/jodyphelan/TBProfiler + documentation: https://jodyphelan.gitbook.io/tb-profiler/ + tool_dev_url: https://github.com/jodyphelan/TBProfiler + doi: "10.1186/s13073-019-0650-x" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ file + pattern: "*.{fastq.gz,fq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: BAM file with alignment details + pattern: "*.bam" + - csv: + type: file + description: Optional CSV formated result file of resistance and strain type + pattern: "*.csv" + - json: + type: file + description: JSON formated result file of resistance and strain type + pattern: "*.json" + - txt: + type: file + description: Optional text file of resistance and strain type + pattern: "*.txt" + - vcf: + type: file + description: VCF with variant info again refernce genomes + pattern: "*.vcf" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index e64e67f9..73d3c19b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1251,6 +1251,10 @@ tabix/tabix: - modules/tabix/tabix/** - tests/modules/tabix/tabix/** +tbprofiler/profile: + - modules/tbprofiler/profile/** + - tests/modules/tbprofiler/profile/** + tiddit/cov: - modules/tiddit/cov/** - tests/modules/tiddit/cov/** diff --git a/tests/modules/tbprofiler/profile/main.nf b/tests/modules/tbprofiler/profile/main.nf new file mode 100644 index 00000000..e0c6ef56 --- /dev/null +++ b/tests/modules/tbprofiler/profile/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_ILLUMINA } from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform illumina'] ) +include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_NANOPORE} from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform nanopore'] ) + +workflow test_tbprofiler_profile_illumina { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + + TBPROFILER_PROFILE_ILLUMINA ( input ) +} + + +workflow test_tbprofiler_profile_nanopore { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + + TBPROFILER_PROFILE_NANOPORE ( input ) +} diff --git a/tests/modules/tbprofiler/profile/test.yml b/tests/modules/tbprofiler/profile/test.yml new file mode 100644 index 00000000..abfb552d --- /dev/null +++ b/tests/modules/tbprofiler/profile/test.yml @@ -0,0 +1,21 @@ +- name: tbprofiler profile illumina + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_illumina -c tests/config/nextflow.config + tags: + - tbprofiler + - tbprofiler/profile + files: + - path: output/tbprofiler/bam/test.bam + - path: output/tbprofiler/results/test.results.json + contains: ['genome_positions', 'locus_tag', 'tbprofiler_version'] + - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz + +- name: tbprofiler profile nanopore + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_nanopore -c tests/config/nextflow.config + tags: + - tbprofiler + - tbprofiler/profile + files: + - path: output/tbprofiler/bam/test.bam + - path: output/tbprofiler/results/test.results.json + contains: ['genome_positions', 'locus_tag', 'tbprofiler_version'] + - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz From b5b3ff16ce6ab062ed3a1191a4b0b68ec0eeaa12 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 07:41:36 -0700 Subject: [PATCH 033/101] add module for fastq-scan (#935) * add module for fastq-scan * change fastq to reads * remove uncompressed support Co-authored-by: Gregor Sturm Co-authored-by: FriederikeHanssen --- modules/fastqscan/functions.nf | 78 ++++++++++++++++++++++++++++++++ modules/fastqscan/main.nf | 40 ++++++++++++++++ modules/fastqscan/meta.yml | 43 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/fastqscan/main.nf | 13 ++++++ tests/modules/fastqscan/test.yml | 7 +++ 6 files changed, 185 insertions(+) create mode 100644 modules/fastqscan/functions.nf create mode 100644 modules/fastqscan/main.nf create mode 100644 modules/fastqscan/meta.yml create mode 100644 tests/modules/fastqscan/main.nf create mode 100644 tests/modules/fastqscan/test.yml diff --git a/modules/fastqscan/functions.nf b/modules/fastqscan/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/fastqscan/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/fastqscan/main.nf b/modules/fastqscan/main.nf new file mode 100644 index 00000000..0106892f --- /dev/null +++ b/modules/fastqscan/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process FASTQSCAN { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fastq-scan=0.4.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fastq-scan:0.4.4--h7d875b9_0" + } else { + container "quay.io/biocontainers/fastq-scan:0.4.4--h7d875b9_0" + } + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.json"), emit: json + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + zcat $reads | \\ + fastq-scan \\ + $options.args > ${prefix}.json + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(fastq-scan -v 2>&1) | sed 's/^.*fastq-scan //' ) + END_VERSIONS + """ +} diff --git a/modules/fastqscan/meta.yml b/modules/fastqscan/meta.yml new file mode 100644 index 00000000..99538b5a --- /dev/null +++ b/modules/fastqscan/meta.yml @@ -0,0 +1,43 @@ +name: fastqscan +description: FASTQ summary statistics in JSON format +keywords: + - fastq + - summary + - statistics +tools: + - fastqscan: + description: FASTQ summary statistics in JSON format + homepage: https://github.com/rpetit3/fastq-scan + documentation: https://github.com/rpetit3/fastq-scan + tool_dev_url: https://github.com/rpetit3/fastq-scan + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ file + pattern: "*.{fastq.gz,fq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: JSON formatted file of summary statistics + pattern: "*.json" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 73d3c19b..9362b10d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -394,6 +394,10 @@ fastqc: - modules/fastqc/** - tests/modules/fastqc/** +fastqscan: + - modules/fastqscan/** + - tests/modules/fastqscan/** + fasttree: - modules/fasttree/** - tests/modules/fasttree/** diff --git a/tests/modules/fastqscan/main.nf b/tests/modules/fastqscan/main.nf new file mode 100644 index 00000000..5fd824f6 --- /dev/null +++ b/tests/modules/fastqscan/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FASTQSCAN } from '../../../modules/fastqscan/main.nf' addParams( options: [ args: "-g 30000"] ) + +workflow test_fastqscan { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + + FASTQSCAN ( input ) +} diff --git a/tests/modules/fastqscan/test.yml b/tests/modules/fastqscan/test.yml new file mode 100644 index 00000000..80bcbc47 --- /dev/null +++ b/tests/modules/fastqscan/test.yml @@ -0,0 +1,7 @@ +- name: fastqscan test_fastqscan + command: nextflow run tests/modules/fastqscan -entry test_fastqscan -c tests/config/nextflow.config + tags: + - fastqscan + files: + - path: output/fastqscan/test.json + md5sum: b9d59a36fe85e556b5a80573ea0b0266 From 51f2d9a806f5b7c64dfea8c3d5a5abfa7513fcf8 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 07:47:41 -0700 Subject: [PATCH 034/101] Update main.nf (#938) Co-authored-by: FriederikeHanssen --- modules/spatyper/main.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf index ce320bfc..34207dbf 100644 --- a/modules/spatyper/main.nf +++ b/modules/spatyper/main.nf @@ -31,7 +31,6 @@ process SPATYPER { def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" """ - env spaTyper \\ $options.args \\ $input_args \\ From 73a09850fb159af232a80ec10539642ba44956f6 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 08:06:02 -0700 Subject: [PATCH 035/101] Update agrvate version (#970) Co-authored-by: FriederikeHanssen --- modules/agrvate/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index c1a6748e..c45bbe06 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -11,11 +11,11 @@ process AGRVATE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::agrvate=1.0.1" : null) + conda (params.enable_conda ? "bioconda::agrvate=1.0.2" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/agrvate:1.0.1--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/agrvate:1.0.2--hdfd78af_0" } else { - container "quay.io/biocontainers/agrvate:1.0.1--hdfd78af_0" + container "quay.io/biocontainers/agrvate:1.0.2--hdfd78af_0" } input: From a6ca2b006b9eb4f1a07098966867c3c5fea42c51 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 10:18:43 -0500 Subject: [PATCH 036/101] Cooler merge (#515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add software/cooler * fix the wrong files uploaded. * create a branch for cooler/merge * remove the bin_size from metadata. * update the test_data to test-datasets * update pytest_modules.yml * update the test file from single input file to two input file. update the output file from hdf5 to bedpe. * update the version.txt to version.yml and functions.nf * change version.yml to versions * update the test file path and fix the output versions. * Update meta.yml Correct "version" to "versions" * Update main.nf Fix typo * Update main.nf Remove some spaces Co-authored-by: Gregor Sturm Co-authored-by: Sébastien Guizard --- modules/cooler/merge/functions.nf | 78 +++++++++++++++++++++++++++++ modules/cooler/merge/main.nf | 41 +++++++++++++++ modules/cooler/merge/meta.yml | 41 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 ++ tests/modules/cooler/merge/main.nf | 16 ++++++ tests/modules/cooler/merge/test.yml | 8 +++ 7 files changed, 192 insertions(+) create mode 100644 modules/cooler/merge/functions.nf create mode 100644 modules/cooler/merge/main.nf create mode 100644 modules/cooler/merge/meta.yml create mode 100644 tests/modules/cooler/merge/main.nf create mode 100644 tests/modules/cooler/merge/test.yml diff --git a/modules/cooler/merge/functions.nf b/modules/cooler/merge/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cooler/merge/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cooler/merge/main.nf b/modules/cooler/merge/main.nf new file mode 100644 index 00000000..b15439a4 --- /dev/null +++ b/modules/cooler/merge/main.nf @@ -0,0 +1,41 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process COOLER_MERGE { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" + } else { + container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" + } + + input: + tuple val(meta), path(cool) + + output: + tuple val(meta), path("*.cool"), emit: cool + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + cooler merge \\ + $options.args \\ + ${prefix}.cool \\ + ${cool} + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/merge/meta.yml b/modules/cooler/merge/meta.yml new file mode 100644 index 00000000..f5c0a733 --- /dev/null +++ b/modules/cooler/merge/meta.yml @@ -0,0 +1,41 @@ +name: cooler_merge +description: Merge multiple coolers with identical axes +keywords: + - merge +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cool: + type: file + description: Path to COOL file + pattern: "*.{cool,mcool}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + - cool: + type: file + description: Path to COOL file + pattern: "*.cool" + +authors: + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9362b10d..6fcf33dd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -294,6 +294,10 @@ cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** +cooler/merge: + - modules/cooler/merge/** + - tests/modules/cooler/merge/** + csvtk/concat: - modules/csvtk/concat/** - tests/modules/csvtk/concat/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index c34696f2..6504783c 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -255,6 +255,10 @@ params { 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } + 'cooler' { + test_merge_cool = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cool" + test_merge_cool_cp2 = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cp2.cool" + } } 'bacteroides_fragilis'{ 'genome' { diff --git a/tests/modules/cooler/merge/main.nf b/tests/modules/cooler/merge/main.nf new file mode 100644 index 00000000..564660c5 --- /dev/null +++ b/tests/modules/cooler/merge/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_MERGE } from '../../../../modules/cooler/merge/main.nf' addParams( options: [publish_files:[:]] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) + +workflow test_cooler_merge { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_merge_cool_cp2'], checkIfExists: true)] + ] + + COOLER_MERGE ( input ).cool | COOLER_DUMP +} diff --git a/tests/modules/cooler/merge/test.yml b/tests/modules/cooler/merge/test.yml new file mode 100644 index 00000000..3ac388e7 --- /dev/null +++ b/tests/modules/cooler/merge/test.yml @@ -0,0 +1,8 @@ +- name: cooler merge test_cooler_merge + command: nextflow run tests/modules/cooler/merge -entry test_cooler_merge -c tests/config/nextflow.config + tags: + - cooler/merge + - cooler + files: + - path: output/cooler/test.bedpe + md5sum: 0ce5e715bfc4674cdda02f2d7e7e3170 From 1a4c7cec1b9d82fdaa15897d8e9a9e9a4767444d Mon Sep 17 00:00:00 2001 From: santiagorevale Date: Mon, 15 Nov 2021 15:43:06 +0000 Subject: [PATCH 037/101] New modules added: issues #200 and #310 (#884) * New modules added: issues #200 and #310 * Update main.nf * Update meta.yml * Update tests/modules/gatk4/genotypegvcfs/main.nf * Apply suggestions from code review * Update main.nf * Updating tests for GenomicsDB input and adding the path for this test resource to test_data.config * Some minor changes on one of the test files I forgot to include Co-authored-by: Harshil Patel Co-authored-by: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> --- modules/gatk4/genotypegvcfs/functions.nf | 78 ++++++++ modules/gatk4/genotypegvcfs/main.nf | 54 ++++++ modules/gatk4/genotypegvcfs/meta.yml | 69 +++++++ modules/gatk4/indexfeaturefile/functions.nf | 78 ++++++++ modules/gatk4/indexfeaturefile/main.nf | 40 ++++ modules/gatk4/indexfeaturefile/meta.yml | 42 ++++ tests/config/pytest_modules.yml | 8 + tests/config/test_data.config | 2 + tests/modules/gatk4/genotypegvcfs/main.nf | 180 ++++++++++++++++++ tests/modules/gatk4/genotypegvcfs/test.yml | 80 ++++++++ tests/modules/gatk4/indexfeaturefile/main.nf | 45 +++++ tests/modules/gatk4/indexfeaturefile/test.yml | 39 ++++ 12 files changed, 715 insertions(+) create mode 100644 modules/gatk4/genotypegvcfs/functions.nf create mode 100644 modules/gatk4/genotypegvcfs/main.nf create mode 100644 modules/gatk4/genotypegvcfs/meta.yml create mode 100644 modules/gatk4/indexfeaturefile/functions.nf create mode 100644 modules/gatk4/indexfeaturefile/main.nf create mode 100644 modules/gatk4/indexfeaturefile/meta.yml create mode 100644 tests/modules/gatk4/genotypegvcfs/main.nf create mode 100644 tests/modules/gatk4/genotypegvcfs/test.yml create mode 100644 tests/modules/gatk4/indexfeaturefile/main.nf create mode 100644 tests/modules/gatk4/indexfeaturefile/test.yml diff --git a/modules/gatk4/genotypegvcfs/functions.nf b/modules/gatk4/genotypegvcfs/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf new file mode 100644 index 00000000..6fbbe663 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -0,0 +1,54 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_GENOTYPEGVCFS { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(gvcf), path(gvcf_index) + path fasta + path fasta_index + path fasta_dict + path dbsnp + path dbsnp_index + path intervals_bed + + output: + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" + def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" + def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" + """ + gatk \\ + GenotypeGVCFs \\ + $options.args \\ + $interval_options \\ + $dbsnp_options \\ + -R $fasta \\ + -V $gvcf_options \\ + -O ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/genotypegvcfs/meta.yml b/modules/gatk4/genotypegvcfs/meta.yml new file mode 100644 index 00000000..cd7457a7 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/meta.yml @@ -0,0 +1,69 @@ +name: gatk4_genotypegvcfs +description: | + Perform joint genotyping on one or more samples pre-called with HaplotypeCaller. +keywords: + - joint genotyping + - genotype + - gvcf +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - gvcf: + type: tuple of files + description: | + Tuple of gVCF(.gz) file (first) and its index (second) or the path to a GenomicsDB (and empty) + pattern: ["*.{vcf,vcf.gz}", "*.{idx,tbi}"] + - fasta: + type: file + description: Reference fasta file + pattern: "*.fasta" + - fasta_index: + type: file + description: Reference fasta index file + pattern: "*.fai" + - fasta_dict: + type: file + description: Reference fasta sequence dict file + pattern: "*.dict" + - dbsnp: + type: file + description: dbSNP VCF file + pattern: "*.vcf.gz" + - dbsnp_index: + type: tuple of files + description: dbSNP VCF index file + pattern: "*.tbi" + - intervals_bed: + type: file + description: An intevals BED file + pattern: "*.bed" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Genotyped VCF file + pattern: "*.vcf.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@santiagorevale" diff --git a/modules/gatk4/indexfeaturefile/functions.nf b/modules/gatk4/indexfeaturefile/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/gatk4/indexfeaturefile/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf new file mode 100644 index 00000000..8f40a3e3 --- /dev/null +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -0,0 +1,40 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process GATK4_INDEXFEATUREFILE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" + } else { + container "quay.io/biocontainers/gatk4:4.2.0.0--0" + } + + input: + tuple val(meta), path(feature_file) + + output: + tuple val(meta), path("*.{tbi,idx}"), emit: index + path "versions.yml" , emit: versions + + script: + """ + gatk \\ + IndexFeatureFile \\ + $options.args \\ + -I $feature_file + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/indexfeaturefile/meta.yml b/modules/gatk4/indexfeaturefile/meta.yml new file mode 100644 index 00000000..eebe6b85 --- /dev/null +++ b/modules/gatk4/indexfeaturefile/meta.yml @@ -0,0 +1,42 @@ +name: gatk4_indexfeaturefile +description: Creates an index for a feature file, e.g. VCF or BED file. +keywords: + - index + - feature +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - feature_file: + type: file + description: VCF/BED file + pattern: "*.{vcf,vcf.gz,bed,bed.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - index: + type: file + description: Index for VCF/BED file + pattern: "*.{tbi,idx}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@santiagorevale" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6fcf33dd..34085bcd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -474,6 +474,10 @@ gatk4/genomicsdbimport: - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** +gatk4/genotypegvcfs: + - modules/gatk4/genotypegvcfs/** + - tests/modules/gatk4/genotypegvcfs/** + gatk4/getpileupsummaries: - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -482,6 +486,10 @@ gatk4/haplotypecaller: - modules/gatk4/haplotypecaller/** - tests/modules/gatk4/haplotypecaller/** +gatk4/indexfeaturefile: + - modules/gatk4/indexfeaturefile/** + - tests/modules/gatk4/indexfeaturefile/** + gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6504783c..7538046d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -191,6 +191,8 @@ params { test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" + test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" + test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" test_test2_paired_mutect2_calls_vcf_gz_stats = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.stats" diff --git a/tests/modules/gatk4/genotypegvcfs/main.nf b/tests/modules/gatk4/genotypegvcfs/main.nf new file mode 100644 index 00000000..0b555180 --- /dev/null +++ b/tests/modules/gatk4/genotypegvcfs/main.nf @@ -0,0 +1,180 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_GENOTYPEGVCFS } from '../../../../modules/gatk4/genotypegvcfs/main.nf' addParams( options: [suffix:'.genotyped'] ) +include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) + +// Basic parameters with uncompressed VCF input +workflow test_gatk4_genotypegvcfs_vcf_input { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], [] ) +} + +// Basic parameters with compressed VCF input +workflow test_gatk4_genotypegvcfs_gz_input { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], [] ) +} + +// Basic parameters + optional dbSNP +workflow test_gatk4_genotypegvcfs_gz_input_dbsnp { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, [] ) +} + +// Basic parameters + optional intervals +workflow test_gatk4_genotypegvcfs_gz_input_intervals { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], intervalsBed ) +} + +// Basic parameters + optional dbSNP + optional intervals +workflow test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, intervalsBed ) +} + +// Basic parameters with GenomicsDB input +workflow test_gatk4_genotypegvcfs_gendb_input { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], [] ) +} + +// Basic parameters with GenomicsDB + optional dbSNP +workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, [] ) +} + +// Basic parameters with GenomicsDB + optional intervals +workflow test_gatk4_genotypegvcfs_gendb_input_intervals { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [], intervalsBed ) +} + +// Basic parameters with GenomicsDB + optional dbSNP + optional intervals +workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals { + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + intervalsBed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + + UNTAR ( test_genomicsdb ) + + Channel.of(file("mock_gvcf_index.txt")).set{mock_gvcf_index} + Channel + .of([ id:'test' ]) + .combine(UNTAR.out.untar) + .combine(mock_gvcf_index) + .set{ input } + + GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex, intervalsBed ) +} diff --git a/tests/modules/gatk4/genotypegvcfs/test.yml b/tests/modules/gatk4/genotypegvcfs/test.yml new file mode 100644 index 00000000..ad39a48d --- /dev/null +++ b/tests/modules/gatk4/genotypegvcfs/test.yml @@ -0,0 +1,80 @@ +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_vcf_input + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_vcf_input -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] + +- name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals + command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/genotypegvcfs + files: + - path: output/gatk4/test.genotyped.vcf.gz + contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] diff --git a/tests/modules/gatk4/indexfeaturefile/main.nf b/tests/modules/gatk4/indexfeaturefile/main.nf new file mode 100644 index 00000000..e523606a --- /dev/null +++ b/tests/modules/gatk4/indexfeaturefile/main.nf @@ -0,0 +1,45 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_INDEXFEATUREFILE } from '../../../../modules/gatk4/indexfeaturefile/main.nf' addParams( options: [:] ) + +workflow test_gatk4_indexfeaturefile_bed { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} + +workflow test_gatk4_indexfeaturefile_bed_gz { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} + +workflow test_gatk4_indexfeaturefile_vcf { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} + +workflow test_gatk4_indexfeaturefile_vcf_gz { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true) + ] + + GATK4_INDEXFEATUREFILE ( input ) +} diff --git a/tests/modules/gatk4/indexfeaturefile/test.yml b/tests/modules/gatk4/indexfeaturefile/test.yml new file mode 100644 index 00000000..5883695a --- /dev/null +++ b/tests/modules/gatk4/indexfeaturefile/test.yml @@ -0,0 +1,39 @@ +# We can't use an md5sum or check file contents because: +# a) the path to the file is embedded inside it, +# b) the file is binary so we can't check for text inside it. +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/genome.bed.idx + +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed_gz + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed_gz -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/genome.bed.gz.tbi + md5sum: 2eb6ed0a0b049efe4caa1413089dcd74 + +# We can't use an md5sum or check file contents because: +# a) the path to the file is embedded inside it, +# b) the file is binary so we can't check for text inside it. +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/test.genome.vcf.idx + +- name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf_gz + command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf_gz -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/indexfeaturefile + files: + - path: output/gatk4/test.genome.vcf.gz.tbi + md5sum: ea03cd1d1f178eefa656787537053c37 From 0754d49db847f6e7bcba25c11fdcfe9f071055af Mon Sep 17 00:00:00 2001 From: Michael J Cipriano <42848032+mjcipriano@users.noreply.github.com> Date: Mon, 15 Nov 2021 10:53:41 -0500 Subject: [PATCH 038/101] Initial commit mummer module (#940) --- modules/mummer/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/mummer/main.nf | 55 +++++++++++++++++++++++ modules/mummer/meta.yml | 48 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/mummer/main.nf | 14 ++++++ tests/modules/mummer/test.yml | 7 +++ 6 files changed, 206 insertions(+) create mode 100644 modules/mummer/functions.nf create mode 100644 modules/mummer/main.nf create mode 100644 modules/mummer/meta.yml create mode 100644 tests/modules/mummer/main.nf create mode 100644 tests/modules/mummer/test.yml diff --git a/modules/mummer/functions.nf b/modules/mummer/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/mummer/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/mummer/main.nf b/modules/mummer/main.nf new file mode 100644 index 00000000..e46fd799 --- /dev/null +++ b/modules/mummer/main.nf @@ -0,0 +1,55 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '3.23' + +process MUMMER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mummer=3.23" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" + } else { + container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" + } + + input: + tuple val(meta), path(ref), path(query) + + output: + tuple val(meta), path("*.coords"), emit: coords + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false + def fasta_name_ref = ref.getName().replace(".gz", "") + + def is_compressed_query = query.getName().endsWith(".gz") ? true : false + def fasta_name_query = query.getName().replace(".gz", "") + """ + if [ "$is_compressed_ref" == "true" ]; then + gzip -c -d $ref > $fasta_name_ref + fi + if [ "$is_compressed_query" == "true" ]; then + gzip -c -d $query > $fasta_name_query + fi + mummer \\ + $options.args \\ + $fasta_name_ref \\ + $fasta_name_query \\ + > ${prefix}.coords + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo $VERSION ) + END_VERSIONS + """ +} diff --git a/modules/mummer/meta.yml b/modules/mummer/meta.yml new file mode 100644 index 00000000..5f7a983c --- /dev/null +++ b/modules/mummer/meta.yml @@ -0,0 +1,48 @@ +name: mummer +description: MUMmer is a system for rapidly aligning entire genomes +keywords: + - align + - genome + - fasta +tools: + - mummer: + description: MUMmer is a system for rapidly aligning entire genomes + homepage: http://mummer.sourceforge.net/ + documentation: http://mummer.sourceforge.net/ + tool_dev_url: http://mummer.sourceforge.net/ + doi: https://doi.org/10.1186/gb-2004-5-2-r12 + licence: ['The Artistic License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ref: + type: file + description: FASTA file of the reference sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - query: + type: file + description: FASTA file of the query sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - coords: + type: file + description: File containing coordinates of matches between reference and query sequence + pattern: "*.coords" + +authors: + - "@mjcipriano" + - "@sateeshperi" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 34085bcd..d459d330 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -867,6 +867,10 @@ multiqc: - modules/multiqc/** - tests/modules/multiqc/** +mummer: + - modules/mummer/** + - tests/modules/mummer/** + muscle: - modules/muscle/** - tests/modules/muscle/** diff --git a/tests/modules/mummer/main.nf b/tests/modules/mummer/main.nf new file mode 100644 index 00000000..b24f8b16 --- /dev/null +++ b/tests/modules/mummer/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MUMMER } from '../../../modules/mummer/main.nf' addParams( options: [:] ) + +workflow test_mummer { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + MUMMER ( input ) +} diff --git a/tests/modules/mummer/test.yml b/tests/modules/mummer/test.yml new file mode 100644 index 00000000..1d368d14 --- /dev/null +++ b/tests/modules/mummer/test.yml @@ -0,0 +1,7 @@ +- name: mummer test_mummer + command: nextflow run tests/modules/mummer -entry test_mummer -c tests/config/nextflow.config + tags: + - mummer + files: + - path: output/mummer/test.coords + md5sum: 6084fe43c7cb2eca8b96d674560bdefc From 4a9bfec61dca9e99b884a2577e273056ff92230c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Mon, 15 Nov 2021 15:57:58 +0000 Subject: [PATCH 039/101] New module: `Ultra` (#871) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Update .gitignore * 📦 Add ultra module * 👌 IMPROVE: Update test input * 👌 IMPROVE: Update and clean code - Update to last versions.yml file - Update meta.yml - Correct typos * 👌 IMPROVE: Update output channels + Rename following subtool * 👌 IMPROVE: Remove old ultre files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 👌 IMPROVE: Remove old ultra files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 🐛 Fix: add unsaved modifications * 🐛 FIX: Remove one inconstant md5sum * 🐛 FIX: Grab software name using ${getSoftwareName(task.process)} * 🐛 FIX: Remove md5sums for pickle files (not constant). * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * 👌 IMPROVE: update output directory, update meta.yml * 👌 IMPROVE: Use modules to gunzip and sort gtf * 🐛 FIX: Set up channel correctly * 👌 IMPROVE: Remove pickles files and databases Those data might be useful in a debugging purpose. * Apply suggestions from code review * Update main.nf Co-authored-by: Harshil Patel --- modules/ultra/pipeline/functions.nf | 78 +++++++++++++++++++++++++++ modules/ultra/pipeline/main.nf | 48 +++++++++++++++++ modules/ultra/pipeline/meta.yml | 50 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ultra/pipeline/main.nf | 25 +++++++++ tests/modules/ultra/pipeline/test.yml | 12 +++++ 6 files changed, 217 insertions(+) create mode 100644 modules/ultra/pipeline/functions.nf create mode 100644 modules/ultra/pipeline/main.nf create mode 100644 modules/ultra/pipeline/meta.yml create mode 100644 tests/modules/ultra/pipeline/main.nf create mode 100644 tests/modules/ultra/pipeline/test.yml diff --git a/modules/ultra/pipeline/functions.nf b/modules/ultra/pipeline/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ultra/pipeline/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf new file mode 100644 index 00000000..5a5c2c3e --- /dev/null +++ b/modules/ultra/pipeline/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ULTRA_PIPELINE { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + } else { + container "quay.io/biocontainers/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + } + + input: + tuple val(meta), path(reads) + path genome + path gtf + + output: + tuple val(meta), path("*.sam"), emit: sam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + uLTRA \\ + pipeline \\ + --t $task.cpus \\ + --prefix $prefix \\ + $options.args \\ + \$(pwd)/$genome \\ + \$(pwd)/$gtf \\ + \$(pwd)/$reads \\ + ./ + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( uLTRA --version|sed 's/uLTRA //g' ) + END_VERSIONS + """ +} diff --git a/modules/ultra/pipeline/meta.yml b/modules/ultra/pipeline/meta.yml new file mode 100644 index 00000000..d0008cfc --- /dev/null +++ b/modules/ultra/pipeline/meta.yml @@ -0,0 +1,50 @@ +name: ultra_pipeline +description: uLTRA aligner - A wrapper around minimap2 to improve small exon detection +keywords: + - uLTRA + - minimap2 +tools: + - ultra: + description: Splice aligner of long transcriptomic reads to genome. + homepage: https://github.com/ksahlin/uLTRA + documentation: https://github.com/ksahlin/uLTRA + tool_dev_url: https://github.com/ksahlin/uLTRA + doi: "10.1093/bioinformatics/btab540" + licence: ['GNU GPLV3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - genome: + type: file + description: fasta file of reference genome + pattern: "*.fasta" + - gtf: + type: file + description: A annotation of use the genome + pattern: "*.gtf" + - reads: + type: file + description: A fasta or fastq file of reads to align + pattern: "*.{fasta,fastq}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - sam: + type: file + description: The aligned reads in sam format + pattern: "*.sam" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d459d330..794c7f4d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1311,6 +1311,10 @@ ucsc/wigtobigwig: - modules/ucsc/wigtobigwig/** - tests/modules/ucsc/wigtobigwig/** +ultra/pipeline: + - modules/ultra/pipeline/** + - tests/modules/ultra/pipeline/** + unicycler: - modules/unicycler/** - tests/modules/unicycler/** diff --git a/tests/modules/ultra/pipeline/main.nf b/tests/modules/ultra/pipeline/main.nf new file mode 100644 index 00000000..881fe9a7 --- /dev/null +++ b/tests/modules/ultra/pipeline/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' addParams( options: [:] ) +include { GUNZIP } from '../../../../modules/gunzip/main.nf' addParams( options: [:] ) +include { GFFREAD } from '../../../../modules/gffread/main.nf' addParams( options: [args: "--sort-alpha --keep-genes -T", suffix: "_sorted"] ) + +workflow test_ultra_pipeline { + + fastq = file(params.test_data['homo_sapiens']['pacbio']['hifi'] , checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'] , checkIfExists: true) + genome = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + GUNZIP ( fastq ) + GFFREAD ( gtf ) + + GUNZIP + .out + .gunzip + .map { [ [ id:'test', single_end:false ], it ] } + .set { input } + + ULTRA_PIPELINE ( input, genome, GFFREAD.out.gtf ) +} diff --git a/tests/modules/ultra/pipeline/test.yml b/tests/modules/ultra/pipeline/test.yml new file mode 100644 index 00000000..fa378e58 --- /dev/null +++ b/tests/modules/ultra/pipeline/test.yml @@ -0,0 +1,12 @@ +- name: ultra pipeline test_ultra_pipeline + command: nextflow run tests/modules/ultra/pipeline -entry test_ultra_pipeline -c tests/config/nextflow.config + tags: + - ultra/pipeline + - ultra + files: + - path: output/gffread/genome_sorted.gtf + md5sum: c0b034860c679a354cd093109ed90437 + - path: output/gunzip/test_hifi.fastq + md5sum: 20e41c569d5828c1e87337e13a5185d3 + - path: output/ultra/test.sam + md5sum: a37a1f9594a3099522dc1f6a903b2b12 From f93c2f2604e98e44e6fd5110751f80cbcfb2d610 Mon Sep 17 00:00:00 2001 From: mjakobs <25904555+mjakobs@users.noreply.github.com> Date: Mon, 15 Nov 2021 16:01:46 +0000 Subject: [PATCH 040/101] add Krona_db module (#995) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add Krona_db module * removed md5 sum * Update tests/modules/kronatools/kronadb/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * added input information * removed trailing white spaces * changed krona version to 2.7.1 * Apply suggestions from code review * Update modules/kronatools/kronadb/meta.yml * Update modules/kronatools/kronadb/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> --- modules/kronatools/kronadb/functions.nf | 78 +++++++++++++++++++++++ modules/kronatools/kronadb/main.nf | 35 ++++++++++ modules/kronatools/kronadb/meta.yml | 30 +++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/kronatools/kronadb/main.nf | 9 +++ tests/modules/kronatools/kronadb/test.yml | 7 ++ 6 files changed, 163 insertions(+) create mode 100644 modules/kronatools/kronadb/functions.nf create mode 100644 modules/kronatools/kronadb/main.nf create mode 100644 modules/kronatools/kronadb/meta.yml create mode 100644 tests/modules/kronatools/kronadb/main.nf create mode 100644 tests/modules/kronatools/kronadb/test.yml diff --git a/modules/kronatools/kronadb/functions.nf b/modules/kronatools/kronadb/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/kronatools/kronadb/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/kronatools/kronadb/main.nf b/modules/kronatools/kronadb/main.nf new file mode 100644 index 00000000..7dee12d0 --- /dev/null +++ b/modules/kronatools/kronadb/main.nf @@ -0,0 +1,35 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process KRONATOOLS_KRONADB { + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::krona=2.7.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/krona:2.7.1--pl526_5" + } else { + container "quay.io/biocontainers/krona:2.7.1--pl526_5" + } + input: + + output: + path 'taxonomy/taxonomy.tab', emit: db + path "versions.yml" , emit: versions + + script: + def VERSION='2.7.1' + """ + ktUpdateTaxonomy.sh taxonomy + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/kronadb/meta.yml b/modules/kronatools/kronadb/meta.yml new file mode 100644 index 00000000..5a637949 --- /dev/null +++ b/modules/kronatools/kronadb/meta.yml @@ -0,0 +1,30 @@ +name: kronatools_kronadb +description: KronaTools Update Taxonomy downloads a taxonomy database +keywords: + - database + - taxonomy + - krona +tools: + - kronatools: + description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. + homepage: https://github.com/marbl/Krona/wiki/KronaTools + documentation: https://github.com/marbl/Krona/wiki/Installing + tool_dev_url: + doi: https://doi.org/10.1186/1471-2105-12-385 + licence: + +input: + - none: There is no input. This module downloads a pre-built taxonomy database for use with Krona Tools. + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - db: + type: file + description: A TAB separated file that contains a taxonomy database. + pattern: "*.{tab}" + +authors: + - "@mjakobs" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 794c7f4d..fb239baf 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -702,6 +702,10 @@ kraken2/kraken2: - modules/untar/** - tests/modules/kraken2/kraken2/** +kronatools/kronadb: + - modules/kronatools/kronadb/** + - tests/modules/kronatools/kronadb/** + last/dotplot: - modules/last/dotplot/** - tests/modules/last/dotplot/** diff --git a/tests/modules/kronatools/kronadb/main.nf b/tests/modules/kronatools/kronadb/main.nf new file mode 100644 index 00000000..90b6e30c --- /dev/null +++ b/tests/modules/kronatools/kronadb/main.nf @@ -0,0 +1,9 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONATOOLS_KRONADB } from '../../../../modules/kronatools/kronadb/main.nf' addParams( options: [:] ) + +workflow test_kronatools_kronadb { + KRONATOOLS_KRONADB ( ) +} diff --git a/tests/modules/kronatools/kronadb/test.yml b/tests/modules/kronatools/kronadb/test.yml new file mode 100644 index 00000000..3f346a9d --- /dev/null +++ b/tests/modules/kronatools/kronadb/test.yml @@ -0,0 +1,7 @@ +- name: kronatools kronadb test_kronatools_kronadb + command: nextflow run tests/modules/kronatools/kronadb -entry test_kronatools_kronadb -c tests/config/nextflow.config + tags: + - kronatools + - kronatools/kronadb + files: + - path: output/kronatools/taxonomy/taxonomy.tab From 7ad42eae1b24e95f426a03fe544f0fcfe2912cf2 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 11:18:02 -0500 Subject: [PATCH 041/101] Cooler zoomify (#514) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add software/cooler * fix the wrong files uploaded. * create a branch for cooler/zoomify * Apply suggestions from code review * update functions.nf to new version. * update the test file to test-datasets. * update the test method of zoomify * update dump test file. * update version.txt to version.yml * Update modules/cooler/dump/main.nf Co-authored-by: Harshil Patel * fix the output bug of versions update to pytest_modules.yml * update the test file path and fix the output versions. * Update modules/cooler/dump/main.nf * indent Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Sébastien Guizard Co-authored-by: FriederikeHanssen --- modules/cooler/dump/main.nf | 6 ++- modules/cooler/zoomify/functions.nf | 78 +++++++++++++++++++++++++++ modules/cooler/zoomify/main.nf | 42 +++++++++++++++ modules/cooler/zoomify/meta.yml | 41 ++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 1 + tests/modules/cooler/dump/main.nf | 2 +- tests/modules/cooler/zoomify/main.nf | 14 +++++ tests/modules/cooler/zoomify/test.yml | 8 +++ 9 files changed, 193 insertions(+), 3 deletions(-) create mode 100644 modules/cooler/zoomify/functions.nf create mode 100644 modules/cooler/zoomify/main.nf create mode 100644 modules/cooler/zoomify/meta.yml create mode 100644 tests/modules/cooler/zoomify/main.nf create mode 100644 tests/modules/cooler/zoomify/test.yml diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 2028f5f0..1ca11c7d 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -20,18 +20,20 @@ process COOLER_DUMP { input: tuple val(meta), path(cool) + val resolution output: tuple val(meta), path("*.bedpe"), emit: bedpe - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def suffix = resolution ? "::$resolution" : "" """ cooler dump \\ $options.args \\ -o ${prefix}.bedpe \\ - $cool + $cool$suffix cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/modules/cooler/zoomify/functions.nf b/modules/cooler/zoomify/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cooler/zoomify/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cooler/zoomify/main.nf b/modules/cooler/zoomify/main.nf new file mode 100644 index 00000000..3f1ed4e7 --- /dev/null +++ b/modules/cooler/zoomify/main.nf @@ -0,0 +1,42 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process COOLER_ZOOMIFY { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" + } else { + container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" + } + + input: + tuple val(meta), path(cool) + + output: + tuple val(meta), path("*.mcool"), emit: mcool + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + cooler zoomify \\ + $options.args \\ + -n $task.cpus \\ + -o ${prefix}.mcool \\ + $cool + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/zoomify/meta.yml b/modules/cooler/zoomify/meta.yml new file mode 100644 index 00000000..74bdbf44 --- /dev/null +++ b/modules/cooler/zoomify/meta.yml @@ -0,0 +1,41 @@ +name: cooler_zoomify +description: Generate a multi-resolution cooler file by coarsening +keywords: + - mcool +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cool: + type: file + description: Path to COOL file + pattern: "*.{cool,mcool}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - mcool: + type: file + description: Output mcool file + pattern: "*.mcool" + +authors: + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index fb239baf..86e4fe22 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -294,6 +294,10 @@ cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** +cooler/zoomify: + - modules/cooler/zoomify/** + - tests/software/cooler/zoomify/** + cooler/merge: - modules/cooler/merge/** - tests/modules/cooler/merge/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 7538046d..46232ef9 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -260,6 +260,7 @@ params { 'cooler' { test_merge_cool = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cool" test_merge_cool_cp2 = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cp2.cool" + } } 'bacteroides_fragilis'{ diff --git a/tests/modules/cooler/dump/main.nf b/tests/modules/cooler/dump/main.nf index e2a647c5..deeeb21f 100644 --- a/tests/modules/cooler/dump/main.nf +++ b/tests/modules/cooler/dump/main.nf @@ -9,5 +9,5 @@ workflow test_cooler_dump { input = [ [ id:'test' ], // meta map file("https://raw.githubusercontent.com/open2c/cooler/master/tests/data/toy.asymm.16.cool", checkIfExists: true) ] - COOLER_DUMP ( input ) + COOLER_DUMP ( input, [:] ) } diff --git a/tests/modules/cooler/zoomify/main.nf b/tests/modules/cooler/zoomify/main.nf new file mode 100644 index 00000000..72c33983 --- /dev/null +++ b/tests/modules/cooler/zoomify/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_ZOOMIFY } from '../../../../modules/cooler/zoomify/main.nf' addParams( options: ['args':'-r 2,4,8', publish_files:[:]] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) + +workflow test_cooler_zoomify { + input = [ [ id:'test' ], // meta map + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true)] + + COOLER_ZOOMIFY ( input ) + COOLER_DUMP(COOLER_ZOOMIFY.out.mcool, "/resolutions/2") +} diff --git a/tests/modules/cooler/zoomify/test.yml b/tests/modules/cooler/zoomify/test.yml new file mode 100644 index 00000000..79a5af2c --- /dev/null +++ b/tests/modules/cooler/zoomify/test.yml @@ -0,0 +1,8 @@ +- name: cooler zoomify test_cooler_zoomify + command: nextflow run tests/modules/cooler/zoomify -entry test_cooler_zoomify -c tests/config/nextflow.config + tags: + - cooler + - cooler/zoomify + files: + - path: output/cooler/test.bedpe + md5sum: 8d792beb609fff62b536c326661f9507 From eff515891de1673eb54c540b4966c479b28a7e7b Mon Sep 17 00:00:00 2001 From: louperelo <44900284+louperelo@users.noreply.github.com> Date: Mon, 15 Nov 2021 17:42:48 +0100 Subject: [PATCH 042/101] new module fargene (#1068) * new module fargene * Update main.nf * Update modules/fargene/main.nf * Update main.nf Co-authored-by: Harshil Patel --- modules/fargene/functions.nf | 78 ++++++++++++++++++++++++ modules/fargene/main.nf | 63 ++++++++++++++++++++ modules/fargene/meta.yml | 101 ++++++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/fargene/main.nf | 14 +++++ tests/modules/fargene/test.yml | 12 ++++ 6 files changed, 272 insertions(+) create mode 100644 modules/fargene/functions.nf create mode 100644 modules/fargene/main.nf create mode 100644 modules/fargene/meta.yml create mode 100644 tests/modules/fargene/main.nf create mode 100644 tests/modules/fargene/test.yml diff --git a/modules/fargene/functions.nf b/modules/fargene/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/fargene/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf new file mode 100644 index 00000000..f2afe4be --- /dev/null +++ b/modules/fargene/main.nf @@ -0,0 +1,63 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +def VERSION = '0.1' + +process FARGENE { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::fargene=0.1" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/fargene:0.1--py27h21c881e_4" + } else { + container "quay.io/biocontainers/fargene:0.1--py27h21c881e_4" + } + + input: + // input may be fasta (for genomes or longer contigs) or paired-end fastq (for metagenome), the latter in addition with --meta flag + tuple val(meta), path(input) + val hmm_model + + output: + path "*.log" , emit: log + path "${prefix}/results_summary.txt" , emit: txt + tuple val(meta), path("${prefix}/hmmsearchresults/*.out") , optional: true, emit: hmm + tuple val(meta), path("${prefix}/predictedGenes/predicted-orfs.fasta") , optional: true, emit: orfs + tuple val(meta), path("${prefix}/predictedGenes/predicted-orfs-amino.fasta") , optional: true, emit: orfs_amino + tuple val(meta), path("${prefix}/predictedGenes/retrieved-contigs.fasta") , optional: true, emit: contigs + tuple val(meta), path("${prefix}/predictedGenes/retrieved-contigs-peptides.fasta") , optional: true, emit: contigs_pept + tuple val(meta), path("${prefix}/predictedGenes/*filtered.fasta") , optional: true, emit: filtered + tuple val(meta), path("${prefix}/predictedGenes/*filtered-peptides.fasta") , optional: true, emit: filtered_pept + tuple val(meta), path("${prefix}/retrievedFragments/all_retrieved_*.fastq") , optional: true, emit: fragments + tuple val(meta), path("${prefix}/retrievedFragments/retrievedFragments/trimmedReads/*.fasta"), optional: true, emit: trimmed + tuple val(meta), path("${prefix}/spades_assembly/*") , optional: true, emit: spades + tuple val(meta), path("${prefix}/tmpdir/*.fasta") , optional: true, emit: metagenome + tuple val(meta), path("${prefix}/tmpdir/*.out") , optional: true, emit: tmp + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + gzip \\ + -cdf $input \\ + > unziped.fa | + fargene \\ + $options.args \\ + -p $task.cpus \\ + -i unziped.fa \\ + --hmm-model $hmm_model \\ + -o $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo $VERSION) + END_VERSIONS + """ +} diff --git a/modules/fargene/meta.yml b/modules/fargene/meta.yml new file mode 100644 index 00000000..98ec12bb --- /dev/null +++ b/modules/fargene/meta.yml @@ -0,0 +1,101 @@ +name: fargene +description: tool that takes either fragmented metagenomic data or longer sequences as input and predicts and delivers full-length antiobiotic resistance genes as output. +keywords: + - antibiotic resistance genes + - ARGs + - identifier + - metagenomic + - contigs +tools: + - fargene: + description: Fragmented Antibiotic Resistance Gene Identifier takes either fragmented metagenomic data or longer sequences as input and predicts and delivers full-length antiobiotic resistance genes as output + homepage: https://github.com/fannyhb/fargene + documentation: https://github.com/fannyhb/fargene + tool_dev_url: https://github.com/fannyhb/fargene + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: fasta or paired-end fastq file containing either genomes or longer contigs as nucleotide or protein sequences (fasta) or fragmented metagenomic reads (fastq) + pattern: "*.{fasta}" + - hmm_model: + type: string + description: name of custom hidden markov model to be used [pre-defined class_a, class_b_1_2, class_b_3, class_c, class_d_1, class_d_2, qnr, tet_efflux, tet_rpg, tet_enzyme] + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - log: + type: file + description: log file + pattern: "*.{log}" + - txt: + type: file + description: analysis summary text file + pattern: "*.{txt}" + - hmm: + type: file + description: output from hmmsearch + pattern: "*.{out}" + - orfs: + type: file + description: open reading frames (ORFs) + pattern: "*.{fasta}" + - orfs_amino: + type: file + description: protein translation of open reading frames (ORFs) + pattern: "*.{fasta}" + - contigs: + type: file + description: (complete) contigs that passed the final full-length classification + pattern: "*.{fasta}" + - contigs_pept: + type: file + description: parts of the contigs that passed the final classification step that aligned with the HMM, as amino acid sequences + pattern: "*.{fasta}" + - filtered: + type: file + description: sequences that passed the final classification step, but only the parts that where predicted by the HMM to be part of the gene + pattern: "*.{fasta}" + - filtered_pept: + type: file + description: sequences from filtered.fasta, translated in the same frame as the gene is predicted to be located + pattern: "*.{fasta}" + - fragments: + type: file + description: All quality controlled retrieved fragments that were classified as positive, together with its read-pair, gathered in two files + pattern: "*.{fastq}" + - trimmed: + type: file + description: The quality controlled retrieved fragments from each input file. + pattern: "*.{fasta}" + - spades: + type: directory + description: The output from the SPAdes assembly + pattern: "spades_assembly" + - metagenome: + type: file + description: The FASTQ to FASTA converted input files from metagenomic reads. + pattern: "*.{fasta}" + - tmp: + type: file + description: The from FASTQ to FASTA converted input files and their translated input sequences. Are only saved if option --store-peptides is used. + pattern: "*.{fasta}" + + +authors: + - "@louperelo" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 86e4fe22..d05d6155 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -390,6 +390,10 @@ expansionhunter: - modules/expansionhunter/** - tests/modules/expansionhunter/** +fargene: + - modules/fargene/** + - tests/modules/fargene/** + fastani: - modules/fastani/** - tests/modules/fastani/** diff --git a/tests/modules/fargene/main.nf b/tests/modules/fargene/main.nf new file mode 100644 index 00000000..f89392ff --- /dev/null +++ b/tests/modules/fargene/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FARGENE } from '../../../modules/fargene/main.nf' addParams( options: [:] ) + +workflow test_fargene { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) ] + hmm_model = 'class_a' + + FARGENE ( input, hmm_model ) +} diff --git a/tests/modules/fargene/test.yml b/tests/modules/fargene/test.yml new file mode 100644 index 00000000..3db6699c --- /dev/null +++ b/tests/modules/fargene/test.yml @@ -0,0 +1,12 @@ +- name: fargene + command: nextflow run tests/modules/fargene -entry test_fargene -c tests/config/nextflow.config + tags: + - fargene + files: + - path: output/fargene/fargene_analysis.log + - path: output/fargene/test/hmmsearchresults/unziped-class_A-hmmsearched.out + - path: output/fargene/test/results_summary.txt + md5sum: 690d351cfc52577263ef4cfab1c81f50 + - path: output/fargene/test/tmpdir/tmp.out + - path: output/fargene/test/tmpdir/unziped-positives.out + md5sum: d41d8cd98f00b204e9800998ecf8427e From 632587a7fcf6c1d7d71b21560f60f1d4802e5d0e Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 15 Nov 2021 17:44:12 +0100 Subject: [PATCH 043/101] Add `bamutil/trimbam` (#1060) * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add bamUtil trimBam * Update modules/bamutil/trimbam/main.nf Co-authored-by: Harshil Patel * Update modules/bamutil/trimbam/main.nf * Changes after code-review * YAML lint Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry --- modules/bamutil/trimbam/functions.nf | 78 ++++++++++++++++++++++++++ modules/bamutil/trimbam/main.nf | 44 +++++++++++++++ modules/bamutil/trimbam/meta.yml | 51 +++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bamutil/trimbam/main.nf | 15 +++++ tests/modules/bamutil/trimbam/test.yml | 8 +++ 6 files changed, 200 insertions(+) create mode 100644 modules/bamutil/trimbam/functions.nf create mode 100644 modules/bamutil/trimbam/main.nf create mode 100644 modules/bamutil/trimbam/meta.yml create mode 100644 tests/modules/bamutil/trimbam/main.nf create mode 100644 tests/modules/bamutil/trimbam/test.yml diff --git a/modules/bamutil/trimbam/functions.nf b/modules/bamutil/trimbam/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/bamutil/trimbam/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/bamutil/trimbam/main.nf b/modules/bamutil/trimbam/main.nf new file mode 100644 index 00000000..60949338 --- /dev/null +++ b/modules/bamutil/trimbam/main.nf @@ -0,0 +1,44 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BAMUTIL_TRIMBAM { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bamutil=1.0.15" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bamutil:1.0.15--h2e03b76_1" + } else { + container "quay.io/biocontainers/bamutil:1.0.15--h2e03b76_1" + } + + input: + tuple val(meta), path(bam), val(trim_left), val(trim_right) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + bam \\ + trimBam \\ + $bam \\ + ${prefix}.bam \\ + $options.args \\ + -L $trim_left \\ + -R $trim_right + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$( bam trimBam 2>&1 ) | sed 's/^Version: //;s/;.*//' ) + END_VERSIONS + """ +} diff --git a/modules/bamutil/trimbam/meta.yml b/modules/bamutil/trimbam/meta.yml new file mode 100644 index 00000000..a91ba0e1 --- /dev/null +++ b/modules/bamutil/trimbam/meta.yml @@ -0,0 +1,51 @@ +name: bamutil_trimbam +description: trims the end of reads in a SAM/BAM file, changing read ends to ‘N’ and quality to ‘!’, or by soft clipping +keywords: + - bam + - trim + - clipping + - bamUtil + - trimBam +tools: + - bamutil: + description: Programs that perform operations on SAM/BAM files, all built into a single executable, bam. + homepage: https://genome.sph.umich.edu/wiki/BamUtil + documentation: https://genome.sph.umich.edu/wiki/BamUtil:_trimBam + tool_dev_url: https://github.com/statgen/bamUtil + doi: "10.1101/gr.176552.114" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - trim_left: + type: integer + description: Number of bases to trim off the right-hand side of a read. Reverse strands are reversed before trimming. + - trim_right: + type: integer + description: Number of bases to trim off the right-hand side of a read. Reverse strands are reversed before trimming. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Trimmed but unsorted BAM file + pattern: "*.bam" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d05d6155..13ef5868 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -46,6 +46,10 @@ bamtools/split: - modules/bamtools/split/** - tests/modules/bamtools/split/** +bamutil/trimbam: + - modules/bamutil/trimbam/** + - tests/modules/bamutil/trimbam/** + bandage/image: - modules/bandage/image/** - tests/modules/bandage/image/** diff --git a/tests/modules/bamutil/trimbam/main.nf b/tests/modules/bamutil/trimbam/main.nf new file mode 100644 index 00000000..3699756c --- /dev/null +++ b/tests/modules/bamutil/trimbam/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAMUTIL_TRIMBAM } from '../../../../modules/bamutil/trimbam/main.nf' addParams( options: [:] ) + +workflow test_bamutil_trimbam { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + 2, + 2 ] + + BAMUTIL_TRIMBAM ( input ) +} diff --git a/tests/modules/bamutil/trimbam/test.yml b/tests/modules/bamutil/trimbam/test.yml new file mode 100644 index 00000000..95ddc3b3 --- /dev/null +++ b/tests/modules/bamutil/trimbam/test.yml @@ -0,0 +1,8 @@ +- name: bamutil trimbam test_bamutil_trimbam + command: nextflow run tests/modules/bamutil/trimbam -entry test_bamutil_trimbam -c tests/config/nextflow.config + tags: + - bamutil/trimbam + - bamutil + files: + - path: output/bamutil/test.bam + md5sum: 9ddd0ecca82f7f3433383f3d1308970e From 5b1ce484b98bc8a5217c352f293543febdffcca4 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Mon, 15 Nov 2021 17:49:20 +0100 Subject: [PATCH 044/101] feat: _idx -> _tbi (#1074) Co-authored-by: FriederikeHanssen --- modules/gatk4/getpileupsummaries/main.nf | 2 +- modules/gatk4/getpileupsummaries/meta.yml | 2 +- modules/gatk4/mutect2/main.nf | 4 +-- modules/gatk4/mutect2/meta.yml | 8 ++--- .../modules/gatk4/getpileupsummaries/main.nf | 8 ++--- tests/modules/gatk4/mutect2/main.nf | 30 +++++++++---------- 6 files changed, 27 insertions(+), 27 deletions(-) diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 7919678c..f08d4d91 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -21,7 +21,7 @@ process GATK4_GETPILEUPSUMMARIES { input: tuple val(meta), path(bam), path(bai) path variants - path variants_idx + path variants_tbi path sites output: diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml index 70158a8d..0add299b 100644 --- a/modules/gatk4/getpileupsummaries/meta.yml +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -35,7 +35,7 @@ input: type: file description: Population vcf of germline sequencing, containing allele fractions. Is also used as sites file if no separate sites file is specified. pattern: "*.vcf.gz" - - variants_idx: + - variants_tbi: type: file description: Index file for the germline resource. pattern: "*.vcf.gz.tbi" diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 748b1673..dd8da406 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -28,9 +28,9 @@ process GATK4_MUTECT2 { path fastaidx path dict path germline_resource - path germline_resource_idx + path germline_resource_tbi path panel_of_normals - path panel_of_normals_idx + path panel_of_normals_tbi output: tuple val(meta), path("*.vcf.gz") , emit: vcf diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 44601e41..4a49b07a 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -66,18 +66,18 @@ input: type: file description: Population vcf of germline sequencing, containing allele fractions. pattern: "*.vcf.gz" - - germline_resource_idx: + - germline_resource_tbi: type: file description: Index file for the germline resource. - pattern: "*.vcf.gz_tbi" + pattern: "*.vcf.gz.tbi" - panel_of_normals: type: file description: vcf file to be used as a panel of normals. pattern: "*.vcf.gz" - - panel_of_normals_idx: + - panel_of_normals_tbi: type: file description: Index for the panel of normals. - pattern: "*.vcf.gz_tbi" + pattern: "*.vcf.gz.tbi" output: - vcf: diff --git a/tests/modules/gatk4/getpileupsummaries/main.nf b/tests/modules/gatk4/getpileupsummaries/main.nf index 0c7d3fb6..66ee4990 100644 --- a/tests/modules/gatk4/getpileupsummaries/main.nf +++ b/tests/modules/gatk4/getpileupsummaries/main.nf @@ -11,10 +11,10 @@ workflow test_gatk4_getpileupsummaries_just_variants { file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) ] variants = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - variants_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + variants_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) sites = [] - GATK4_GETPILEUPSUMMARIES ( input , variants , variants_idx , sites ) + GATK4_GETPILEUPSUMMARIES ( input , variants , variants_tbi , sites ) } workflow test_gatk4_getpileupsummaries_separate_sites { @@ -24,8 +24,8 @@ workflow test_gatk4_getpileupsummaries_separate_sites { file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) ] variants = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - variants_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + variants_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) sites = file( "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/genome.interval_list" , checkIfExists: true) - GATK4_GETPILEUPSUMMARIES ( input , variants , variants_idx , sites ) + GATK4_GETPILEUPSUMMARIES ( input , variants , variants_tbi , sites ) } diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 293739e4..a3821b64 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -20,11 +20,11 @@ workflow test_gatk4_mutect2_tumor_normal_pair { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) - panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_TEMPFIX_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_TEMPFIX_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } workflow test_gatk4_mutect2_tumor_single { @@ -41,11 +41,11 @@ workflow test_gatk4_mutect2_tumor_single { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) - panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } workflow test_gatk4_mutect2_cram_input { @@ -62,11 +62,11 @@ workflow test_gatk4_mutect2_cram_input { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) - germline_resource_idx = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) - panel_of_normals_idx = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } workflow test_gatk4_mutect2_generate_pon { @@ -83,11 +83,11 @@ workflow test_gatk4_mutect2_generate_pon { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] - germline_resource_idx = [] + germline_resource_tbi = [] panel_of_normals = [] - panel_of_normals_idx = [] + panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } // mitochondria mode would ideally have some mitochondria test data, but since the mitochondria settings only increase detection sensitivity, we can use the chr22 data as a stand in as it is already a small dataset, the extra variants detected compared to generate_pon shows the mode is working. @@ -105,9 +105,9 @@ workflow test_gatk4_mutect2_mitochondria { fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] - germline_resource_idx = [] + germline_resource_tbi = [] panel_of_normals = [] - panel_of_normals_idx = [] + panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_idx , panel_of_normals , panel_of_normals_idx ) + GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) } From 34268347447cd60013b69279a42aa9d081592735 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Mon, 15 Nov 2021 18:03:02 +0100 Subject: [PATCH 045/101] feat: fastaidx -> fai (#1073) --- modules/gatk4/applybqsr/main.nf | 2 +- modules/gatk4/applybqsr/meta.yml | 5 +++- modules/gatk4/baserecalibrator/main.nf | 2 +- modules/gatk4/baserecalibrator/meta.yml | 5 +++- .../gatk4/createsomaticpanelofnormals/main.nf | 2 +- .../createsomaticpanelofnormals/meta.yml | 4 ++-- modules/gatk4/filtermutectcalls/main.nf | 2 +- modules/gatk4/filtermutectcalls/meta.yml | 4 ++-- modules/gatk4/mutect2/main.nf | 2 +- modules/gatk4/mutect2/meta.yml | 4 ++-- .../nf-core/gatk_create_som_pon/main.nf | 6 ++--- .../nf-core/gatk_create_som_pon/meta.yml | 4 ++-- tests/modules/gatk4/filtermutectcalls/main.nf | 18 +++++++------- tests/modules/gatk4/mutect2/main.nf | 24 +++++++++---------- .../nf-core/gatk_create_som_pon/main.nf | 5 ++-- 15 files changed, 47 insertions(+), 42 deletions(-) diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index e1a4d7b4..c89a4a4d 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -21,7 +21,7 @@ process GATK4_APPLYBQSR { input: tuple val(meta), path(input), path(input_index), path(bqsr_table) path fasta - path fastaidx + path fai path dict path intervals diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index e7419860..4e3b2f9a 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -34,12 +34,15 @@ input: - fasta: type: file description: The reference fasta file - - fastaidx: + pattern: "*.fasta" + - fai: type: file description: Index of reference fasta file + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary + pattern: "*.dict" - intervalsBed: type: file description: Bed file with the genomic regions included in the library (optional) diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index ff9eb1f9..ce6f5906 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -21,7 +21,7 @@ process GATK4_BASERECALIBRATOR { input: tuple val(meta), path(input), path(input_index) path fasta - path fastaidx + path fai path dict path intervalsBed path knownSites diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index 7fd273e1..188340b4 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -31,12 +31,15 @@ input: - fasta: type: file description: The reference fasta file - - fastaidx: + pattern: "*.fasta" + - fai: type: file description: Index of reference fasta file + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary + pattern: "*.dict" - intervalsBed: type: file description: Bed file with the genomic regions included in the library (optional) diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index b3685171..49136256 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -21,7 +21,7 @@ process GATK4_CREATESOMATICPANELOFNORMALS { input: tuple val(meta), path(genomicsdb) path fasta - path fastaidx + path fai path dict output: diff --git a/modules/gatk4/createsomaticpanelofnormals/meta.yml b/modules/gatk4/createsomaticpanelofnormals/meta.yml index f0199ed6..e450c68a 100644 --- a/modules/gatk4/createsomaticpanelofnormals/meta.yml +++ b/modules/gatk4/createsomaticpanelofnormals/meta.yml @@ -28,10 +28,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index b54e07ed..6e10ff0f 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -21,7 +21,7 @@ process GATK4_FILTERMUTECTCALLS { input: tuple val(meta), path(vcf), path(tbi), path(stats), path(orientationbias), path(segmentation), path(contaminationfile), val(contaminationest) path fasta - path fastaidx + path fai path dict output: diff --git a/modules/gatk4/filtermutectcalls/meta.yml b/modules/gatk4/filtermutectcalls/meta.yml index f14f9404..7d85e2b9 100644 --- a/modules/gatk4/filtermutectcalls/meta.yml +++ b/modules/gatk4/filtermutectcalls/meta.yml @@ -53,10 +53,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index dd8da406..e0e2661b 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -25,7 +25,7 @@ process GATK4_MUTECT2 { val run_mito val interval_label path fasta - path fastaidx + path fai path dict path germline_resource path germline_resource_tbi diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 4a49b07a..83f6cb7c 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -54,10 +54,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/subworkflows/nf-core/gatk_create_som_pon/main.nf b/subworkflows/nf-core/gatk_create_som_pon/main.nf index 9b190584..40269a4a 100644 --- a/subworkflows/nf-core/gatk_create_som_pon/main.nf +++ b/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -14,7 +14,7 @@ workflow GATK_CREATE_SOM_PON { take: ch_mutect2_in // channel: [ val(meta), [ input ], [ input_index ], [] ] fasta // channel: /path/to/reference/fasta - fastaidx // channel: /path/to/reference/fasta/index + fai // channel: /path/to/reference/fasta/index dict // channel: /path/to/reference/fasta/dictionary pon_name // channel: name for panel of normals interval_file // channel: /path/to/interval/file @@ -25,7 +25,7 @@ workflow GATK_CREATE_SOM_PON { // //Perform variant calling for each sample using mutect2 module in panel of normals mode. // - GATK4_MUTECT2 ( input , false , true, false , [] , fasta , fastaidx , dict , [], [] , [] , [] ) + GATK4_MUTECT2 ( input, false, true, false, [], fasta, fai, dict, [], [], [], [] ) ch_versions = ch_versions.mix(GATK4_MUTECT2.out.versions.first()) // @@ -41,7 +41,7 @@ workflow GATK_CREATE_SOM_PON { //Panel of normals made from genomicsdb workspace using createsomaticpanelofnormals. // GATK4_GENOMICSDBIMPORT.out.genomicsdb.view() - GATK4_CREATESOMATICPANELOFNORMALS ( GATK4_GENOMICSDBIMPORT.out.genomicsdb, fasta, fastaidx, dict ) + GATK4_CREATESOMATICPANELOFNORMALS ( GATK4_GENOMICSDBIMPORT.out.genomicsdb, fasta, fai, dict ) ch_versions = ch_versions.mix(GATK4_CREATESOMATICPANELOFNORMALS.out.versions.first()) emit: diff --git a/subworkflows/nf-core/gatk_create_som_pon/meta.yml b/subworkflows/nf-core/gatk_create_som_pon/meta.yml index bc02b885..07404aae 100644 --- a/subworkflows/nf-core/gatk_create_som_pon/meta.yml +++ b/subworkflows/nf-core/gatk_create_som_pon/meta.yml @@ -30,10 +30,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/tests/modules/gatk4/filtermutectcalls/main.nf b/tests/modules/gatk4/filtermutectcalls/main.nf index a425238b..5b2938e8 100644 --- a/tests/modules/gatk4/filtermutectcalls/main.nf +++ b/tests/modules/gatk4/filtermutectcalls/main.nf @@ -6,7 +6,7 @@ include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutect workflow test_gatk4_filtermutectcalls_base { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -18,15 +18,15 @@ workflow test_gatk4_filtermutectcalls_base { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } workflow test_gatk4_filtermutectcalls_with_files { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -38,15 +38,15 @@ workflow test_gatk4_filtermutectcalls_with_files { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } workflow test_gatk4_filtermutectcalls_use_val { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -58,8 +58,8 @@ workflow test_gatk4_filtermutectcalls_use_val { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index a3821b64..e163cf9c 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -8,8 +8,8 @@ include { GATK4_MUTECT2 as GATK4_TEMPFIX_MUTECT2 } from '../../../../modules/gat workflow test_gatk4_mutect2_tumor_normal_pair { input = [ [ id:'test'], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], ["testN"] ] run_single = false @@ -17,14 +17,14 @@ workflow test_gatk4_mutect2_tumor_normal_pair { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_TEMPFIX_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_TEMPFIX_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_tumor_single { @@ -38,14 +38,14 @@ workflow test_gatk4_mutect2_tumor_single { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_cram_input { @@ -59,14 +59,14 @@ workflow test_gatk4_mutect2_cram_input { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input , run_single , run_pon , run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_generate_pon { @@ -80,14 +80,14 @@ workflow test_gatk4_mutect2_generate_pon { run_mito = false interval_label = [] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] germline_resource_tbi = [] panel_of_normals = [] panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } // mitochondria mode would ideally have some mitochondria test data, but since the mitochondria settings only increase detection sensitivity, we can use the chr22 data as a stand in as it is already a small dataset, the extra variants detected compared to generate_pon shows the mode is working. @@ -102,12 +102,12 @@ workflow test_gatk4_mutect2_mitochondria { run_mito = true interval_label = 'chr22' fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) germline_resource = [] germline_resource_tbi = [] panel_of_normals = [] panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input , run_single , run_pon, run_mito , interval_label , fasta , fastaidx , dict , germline_resource, germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, interval_label, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf index d484ac2f..42427a1f 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -16,11 +16,10 @@ workflow test_gatk_create_som_pon { [] ] ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) pon_name = "test_panel" interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) - GATK_CREATE_SOM_PON ( ch_mutect2_in, fasta, fastaidx, dict, pon_name, interval_file ) - + GATK_CREATE_SOM_PON ( ch_mutect2_in, fasta, fai, dict, pon_name, interval_file ) } From 4398056204b7c9685bc3e0888e82a315031b1f32 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 12:17:40 -0500 Subject: [PATCH 046/101] Macs2 calllpeak (#1038) * Add tests and yml file for macs2/callpeak * add format option for macs2 * update macs2/callpeak to accept format argument * update test.yml * update the container version. * try to fix the issue in conda container. * Update conda and containers * Going back to previous container versions Co-authored-by: JoseEspinosa --- modules/macs2/callpeak/main.nf | 13 ++++-- modules/macs2/callpeak/meta.yml | 63 +++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/macs2/callpeak/main.nf | 31 +++++++++++++ tests/modules/macs2/callpeak/test.yml | 38 ++++++++++++++++ 5 files changed, 146 insertions(+), 3 deletions(-) create mode 100644 modules/macs2/callpeak/meta.yml create mode 100644 tests/modules/macs2/callpeak/main.nf create mode 100644 tests/modules/macs2/callpeak/test.yml diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index d54d406d..94f8945b 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -13,9 +13,9 @@ process MACS2_CALLPEAK { conda (params.enable_conda ? "bioconda::macs2=2.2.7.1" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h0213d0e_1" + container "https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h4a8c8d9_3" } else { - container "quay.io/biocontainers/macs2:2.2.7.1--py38h0213d0e_1" + container "quay.io/biocontainers/macs2:2.2.7.1--py38h4a8c8d9_3" } input: @@ -33,12 +33,19 @@ process MACS2_CALLPEAK { script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = options.args.tokenize() def format = meta.single_end ? 'BAM' : 'BAMPE' def control = controlbam ? "--control $controlbam" : '' + if(args.contains('--format')){ + def id = args.findIndexOf{it=='--format'} + format = args[id+1] + args.remove(id+1) + args.remove(id) + } """ macs2 \\ callpeak \\ - $options.args \\ + ${args.join(' ')} \\ --gsize $macs2_gsize \\ --format $format \\ --name $prefix \\ diff --git a/modules/macs2/callpeak/meta.yml b/modules/macs2/callpeak/meta.yml new file mode 100644 index 00000000..afb949ec --- /dev/null +++ b/modules/macs2/callpeak/meta.yml @@ -0,0 +1,63 @@ +name: macs2_callpeak +description: Peak calling of enriched genomic regions of ChIP-seq and ATAC-seq experiments +keywords: + - alignment + - atac-seq + - chip-seq + - peak-calling +tools: + - macs2: + description: Model Based Analysis for ChIP-Seq data + homepage: None + documentation: https://docs.csc.fi/apps/macs2/ + tool_dev_url: https://github.com/macs3-project/MACS + doi: "https://doi.org/10.1101/496521" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ipbam: + type: file + description: The ChIP-seq treatment file + - controlbam: + type: file + description: The control file + - macs2_gsize: + type: string + description: Effective genome size. It can be 1.0e+9 or 1000000000, or shortcuts:'hs' for human (2.7e9), + 'mm' for mouse (1.87e9), 'ce' for C. elegans (9e7) and 'dm' for fruitfly (1.2e8) + +output: + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + - peak: + type: file + description: BED file containing annotated peaks + pattern: "*.gappedPeak,*.narrowPeak}" + - xls: + type: file + description: xls file containing annotated peaks + pattern: "*.xls" + - gapped: + type: file + description: Optional BED file containing gapped peak + pattern: "*.gappedPeak" + - bed: + type: file + description: Optional BED file containing peak summits locations for every peak + pattern: "*.bed" + - bdg: + type: file + description: Optional bedGraph files for input and treatment input samples + pattern: "*.bdg" + +authors: + - "@ntoda03" + - "@JoseEspinosa" + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 13ef5868..6c0b7b34 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -778,6 +778,10 @@ lofreq/indelqual: - modules/lofreq/indelqual/** - tests/modules/lofreq/indelqual/** +macs2/callpeak: + - modules/macs2/callpeak/** + - tests/modules/macs2/callpeak/** + malt/build: - modules/malt/build/** - tests/modules/malt/build_test/** diff --git a/tests/modules/macs2/callpeak/main.nf b/tests/modules/macs2/callpeak/main.nf new file mode 100644 index 00000000..db598564 --- /dev/null +++ b/tests/modules/macs2/callpeak/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MACS2_CALLPEAK } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) +include { MACS2_CALLPEAK as MACS2_CALLPEAK_CTRL } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) +include { MACS2_CALLPEAK as MACS2_CALLPEAK_BED } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--format BED --qval 1 --nomodel --extsize 200"] ) + +workflow test_macs2_callpeak_bed { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['pacbio']['genemodel1'], checkIfExists: true)], + []] + + MACS2_CALLPEAK_BED ( input, 4000 ) +} + +workflow test_macs2_callpeak { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ], + []] + + MACS2_CALLPEAK ( input, 40000 ) +} + +workflow test_macs2_callpeak_ctrl { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ], + [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ]] + + MACS2_CALLPEAK_CTRL ( input, 40000 ) +} diff --git a/tests/modules/macs2/callpeak/test.yml b/tests/modules/macs2/callpeak/test.yml new file mode 100644 index 00000000..424a9746 --- /dev/null +++ b/tests/modules/macs2/callpeak/test.yml @@ -0,0 +1,38 @@ +- name: macs2 callpeak test_macs2_callpeak_bed + command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_bed -c tests/config/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/macs2/test_peaks.xls + md5sum: 762383e3a35e1f9ac3834fd6b2926092 + - path: output/macs2/test_summits.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + +- name: macs2 callpeak test_macs2_callpeak + command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak -c tests/config/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: 2e4da1c1704595e12aaf99cc715ad70c + - path: output/macs2/test_peaks.xls + md5sum: 5d65cb3dbd5421ea3bb5b490a100e9a4 + - path: output/macs2/test_summits.bed + md5sum: 26f0f97b6c14dbca129e947a58067c82 + +- name: macs2 callpeak test_macs2_callpeak_ctrl + command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_ctrl -c tests/config/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: 653e1108cc57ca07d0f60fc0f4fb8ba3 + - path: output/macs2/test_peaks.xls + md5sum: bf86546faa7b581b5209c29b22046a0a + - path: output/macs2/test_summits.bed + md5sum: 4f3c7c53a1d730d90d1b3dd9d3197af4 From 8d9e8ae839df0e6f0070f8615e69f3103f9f3359 Mon Sep 17 00:00:00 2001 From: avantonder Date: Mon, 15 Nov 2021 17:26:37 +0000 Subject: [PATCH 047/101] Add Medaka module (#992) * add racon * add medaka module * add medaka module * add medaka module * add medaka module * add medaka module * add medaka module * Indentation * Apply suggestions from code review Co-authored-by: FriederikeHanssen * Update main.nf * Update main.nf * Apply suggestions from code review Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel --- modules/medaka/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/medaka/main.nf | 47 ++++++++++++++++++++ modules/medaka/meta.yml | 47 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/medaka/main.nf | 16 +++++++ tests/modules/medaka/test.yml | 7 +++ 6 files changed, 199 insertions(+) create mode 100644 modules/medaka/functions.nf create mode 100644 modules/medaka/main.nf create mode 100644 modules/medaka/meta.yml create mode 100644 tests/modules/medaka/main.nf create mode 100644 tests/modules/medaka/test.yml diff --git a/modules/medaka/functions.nf b/modules/medaka/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/medaka/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/medaka/main.nf b/modules/medaka/main.nf new file mode 100644 index 00000000..a0db4150 --- /dev/null +++ b/modules/medaka/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MEDAKA { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::medaka=1.4.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/medaka:1.4.4--py38h130def0_0" + } else { + container "quay.io/biocontainers/medaka:1.4.4--py38h130def0_0" + } + + input: + tuple val(meta), path(reads), path(assembly) + + output: + tuple val(meta), path("*.fa.gz"), emit: assembly + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + medaka_consensus \\ + -t $task.cpus \\ + $options.args \\ + -i $reads \\ + -d $assembly \\ + -o ./ + + mv consensus.fasta ${prefix}.fa + + gzip -n ${prefix}.fa + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( medaka --version 2>&1 | sed 's/medaka //g' ) + END_VERSIONS + """ +} diff --git a/modules/medaka/meta.yml b/modules/medaka/meta.yml new file mode 100644 index 00000000..d194464f --- /dev/null +++ b/modules/medaka/meta.yml @@ -0,0 +1,47 @@ +name: medaka +description: A tool to create consensus sequences and variant calls from nanopore sequencing data +keywords: + - assembly + - polishing + - nanopore +tools: + - medaka: + description: Neural network sequence error correction. + homepage: https://nanoporetech.github.io/medaka/index.html + documentation: https://nanoporetech.github.io/medaka/index.html + tool_dev_url: https://github.com/nanoporetech/medaka + doi: "" + licence: ['Mozilla Public License 2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input nanopore fasta/FastQ files + pattern: "*.{fasta,fa,fastq,fastq.gz,fq,fq.gz}" + - assembly: + type: file + description: Genome assembly + pattern: "*.{fasta,fa}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - assembly: + type: file + description: Polished genome assembly + pattern: "*.fa.gz" + +authors: + - "@avantonder" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6c0b7b34..b286f114 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -822,6 +822,10 @@ maxbin2: - modules/maxbin2/** - tests/modules/maxbin2/** +medaka: + - modules/medaka/** + - tests/modules/medaka/** + megahit: - modules/megahit/** - tests/modules/megahit/** diff --git a/tests/modules/medaka/main.nf b/tests/modules/medaka/main.nf new file mode 100644 index 00000000..300e086b --- /dev/null +++ b/tests/modules/medaka/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MEDAKA } from '../../../modules/medaka/main.nf' addParams( options: [suffix:'.polished.genome'] ) + +workflow test_medaka { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + + MEDAKA ( input ) +} diff --git a/tests/modules/medaka/test.yml b/tests/modules/medaka/test.yml new file mode 100644 index 00000000..9ce5521e --- /dev/null +++ b/tests/modules/medaka/test.yml @@ -0,0 +1,7 @@ +- name: medaka test_medaka + command: nextflow run ./tests/modules/medaka -entry test_medaka -c tests/config/nextflow.config + tags: + - medaka + files: + - path: output/medaka/test.polished.genome.fa.gz + md5sum: f42303f1d6c2c79175faeb00e10b9a6e \ No newline at end of file From 466b964b37b7241a83fff9c3d7ddc14ceada20ff Mon Sep 17 00:00:00 2001 From: mjakobs <25904555+mjakobs@users.noreply.github.com> Date: Mon, 15 Nov 2021 17:40:46 +0000 Subject: [PATCH 048/101] add Kronatools KTImportTaxonomy (#928) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * created and initialised krona module * Added kronatools/ktimporttaxonomy module * removing previous redundant work * added contains info for html * edited contains in test.yml * Update get versions Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * remove old syntax Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * rewording module description Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * added detailed keywords Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * update syntax and tool version * fixed meta.yml issues * remove contains line from test.yml * re-wrote module after nf-core/tools update - should work now * removed md5 * Update modules/kronatools/ktimporttaxonomy/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * update meta save * removed typo * double quotes to single quotes around html * re-ran test, which updated md5 * removed md5 * 'classifier' removed to fix linting * update version * removed erroneous ktimporttaxonomy2 * Updated input to include meta and database * fixed tab issues in yaml * added `contains` to test.yml * edited `contains` in test.yml * trying another `contains` * retrying `contains` * contains with extra line * removed classifier from tag * Apply suggestions from code review * Update meta.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel --- .../kronatools/ktimporttaxonomy/functions.nf | 78 +++++++++++++++++++ modules/kronatools/ktimporttaxonomy/main.nf | 39 ++++++++++ modules/kronatools/ktimporttaxonomy/meta.yml | 44 +++++++++++ tests/config/pytest_modules.yml | 4 + .../kronatools/ktimporttaxonomy/main.nf | 15 ++++ .../kronatools/ktimporttaxonomy/test.yml | 9 +++ 6 files changed, 189 insertions(+) create mode 100644 modules/kronatools/ktimporttaxonomy/functions.nf create mode 100644 modules/kronatools/ktimporttaxonomy/main.nf create mode 100644 modules/kronatools/ktimporttaxonomy/meta.yml create mode 100644 tests/modules/kronatools/ktimporttaxonomy/main.nf create mode 100644 tests/modules/kronatools/ktimporttaxonomy/test.yml diff --git a/modules/kronatools/ktimporttaxonomy/functions.nf b/modules/kronatools/ktimporttaxonomy/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/kronatools/ktimporttaxonomy/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/kronatools/ktimporttaxonomy/main.nf b/modules/kronatools/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..893bc5b2 --- /dev/null +++ b/modules/kronatools/ktimporttaxonomy/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process KRONATOOLS_KTIMPORTTAXONOMY { + tag "${meta.id}" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::krona=2.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/krona:2.8--pl5262hdfd78af_2" + } else { + container "quay.io/biocontainers/krona:2.8--pl5262hdfd78af_2" + } + + input: + tuple val(meta), path(report) + path "taxonomy/taxonomy.tab" + + output: + tuple val(meta), path ('*.html'), emit: html + path "versions.yml" , emit: versions + + script: + def VERSION='2.8' + """ + ktImportTaxonomy "$report" -tax taxonomy + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/ktimporttaxonomy/meta.yml b/modules/kronatools/ktimporttaxonomy/meta.yml new file mode 100644 index 00000000..f37f2db4 --- /dev/null +++ b/modules/kronatools/ktimporttaxonomy/meta.yml @@ -0,0 +1,44 @@ +name: kronatools_ktimporttaxonomy +description: KronaTools Import Taxonomy imports taxonomy classifications and produces an interactive Krona plot. +keywords: + - plot + - taxonomy + - interactive + - html + - visualisation + - krona chart +tools: + - kronatools: + description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. + homepage: https://github.com/marbl/Krona/wiki/KronaTools + documentation: http://manpages.ubuntu.com/manpages/impish/man1/ktImportTaxonomy.1.html + tool_dev_url: + doi: https://doi.org/10.1186/1471-2105-12-385 + licence: + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - database: + type: path + description: "Path to the taxonomy database downloaded by kronatools/kronadb" + - report: + type: file + description: "A tab-delimited file with taxonomy IDs and (optionally) query IDs, magnitudes, and scores. Query IDs are taken from column 1, taxonomy IDs from column 2, and scores from column 3. Lines beginning with # will be ignored." + pattern: "*.{tsv}" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - html: + type: file + description: A html file containing an interactive krona plot. + pattern: "*.{html}" + +authors: + - "@mjakobs" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index b286f114..b8e5e3d3 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -718,6 +718,10 @@ kronatools/kronadb: - modules/kronatools/kronadb/** - tests/modules/kronatools/kronadb/** +kronatools/ktimporttaxonomy: + - modules/kronatools/ktimporttaxonomy/** + - tests/modules/kronatools/ktimporttaxonomy/** + last/dotplot: - modules/last/dotplot/** - tests/modules/last/dotplot/** diff --git a/tests/modules/kronatools/ktimporttaxonomy/main.nf b/tests/modules/kronatools/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..d7b08a2f --- /dev/null +++ b/tests/modules/kronatools/ktimporttaxonomy/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONATOOLS_KTIMPORTTAXONOMY } from '../../../../modules/kronatools/ktimporttaxonomy/main.nf' addParams( options: [:] ) + +workflow test_kronatools_ktimporttaxonomy { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) ] + + taxonomy = [ file(params.test_data['generic']['txt']['hello'] , checkIfExists: true) ] + + KRONATOOLS_KTIMPORTTAXONOMY ( input, taxonomy ) +} diff --git a/tests/modules/kronatools/ktimporttaxonomy/test.yml b/tests/modules/kronatools/ktimporttaxonomy/test.yml new file mode 100644 index 00000000..15882b2e --- /dev/null +++ b/tests/modules/kronatools/ktimporttaxonomy/test.yml @@ -0,0 +1,9 @@ +- name: kronatools ktimporttaxonomy test_kronatools_ktimporttaxonomy + command: nextflow run tests/modules/kronatools/ktimporttaxonomy -entry test_kronatools_ktimporttaxonomy -c tests/config/nextflow.config + tags: + - kronatools/ktimporttaxonomy + - kronatools + files: + - path: output/kronatools/taxonomy.krona.html + contains: + - "DOCTYPE html PUBLIC" From 527ccdb4198a964d09ba43b1b33ef4de3f40cfcf Mon Sep 17 00:00:00 2001 From: avantonder Date: Mon, 15 Nov 2021 17:50:56 +0000 Subject: [PATCH 049/101] Add Miniasm module (#962) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add racon * add miniasm module * edit miniasm module * edit miniasm module * Update tests/modules/racon/main.nf Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update tests/modules/racon/test.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update modules/miniasm/meta.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update main.nf Add some spaces. * Update meta.yml Correct DOI * Update main.nf * Apply suggestions from code review * Update tests/modules/miniasm/test.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel --- modules/miniasm/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/miniasm/main.nf | 48 ++++++++++++++++++++ modules/miniasm/meta.yml | 51 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/miniasm/main.nf | 15 +++++++ tests/modules/miniasm/test.yml | 9 ++++ 6 files changed, 205 insertions(+) create mode 100644 modules/miniasm/functions.nf create mode 100644 modules/miniasm/main.nf create mode 100644 modules/miniasm/meta.yml create mode 100644 tests/modules/miniasm/main.nf create mode 100644 tests/modules/miniasm/test.yml diff --git a/modules/miniasm/functions.nf b/modules/miniasm/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/miniasm/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/miniasm/main.nf b/modules/miniasm/main.nf new file mode 100644 index 00000000..d2652fab --- /dev/null +++ b/modules/miniasm/main.nf @@ -0,0 +1,48 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process MINIASM { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::miniasm=0.3_r179" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/miniasm:0.3_r179--h5bf99c6_2" + } else { + container "quay.io/biocontainers/miniasm:0.3_r179--h5bf99c6_2" + } + + input: + tuple val(meta), path(reads), path(paf) + + output: + tuple val(meta), path("*.gfa.gz") , emit: gfa + tuple val(meta), path("*.fasta.gz"), emit: assembly + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + miniasm \\ + $options.args \\ + -f $reads \\ + $paf > \\ + ${prefix}.gfa + + awk '/^S/{print ">"\$2"\\n"\$3}' "${prefix}.gfa" | fold > ${prefix}.fasta + + gzip -n ${prefix}.gfa + gzip -n ${prefix}.fasta + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( miniasm -V 2>&1 ) + END_VERSIONS + """ +} diff --git a/modules/miniasm/meta.yml b/modules/miniasm/meta.yml new file mode 100644 index 00000000..e8aedb9a --- /dev/null +++ b/modules/miniasm/meta.yml @@ -0,0 +1,51 @@ +name: miniasm +description: A very fast OLC-based de novo assembler for noisy long reads +keywords: + - assembly + - pacbio + - nanopore +tools: + - miniasm: + description: Ultrafast de novo assembly for long noisy reads (though having no consensus step) + homepage: https://github.com/lh3/miniasm + documentation: https://github.com/lh3/miniasm + tool_dev_url: https://github.com/lh3/miniasm + doi: "10.1093/bioinformatics/btw152" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input PacBio/ONT FastQ files. + pattern: "*.{fastq,fastq.gz,fq,fq.gz}" + - paf: + type: file + description: Alignment in PAF format + pattern: "*{.paf,.paf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - gfa: + type: file + description: Assembly graph + pattern: "*.gfa.gz" + - assembly: + type: file + description: Genome assembly + pattern: "*.fasta.gz" + +authors: + - "@avantonder" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index b8e5e3d3..d925b76d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -862,6 +862,10 @@ minia: - modules/minia/** - tests/modules/minia/** +miniasm: + - modules/miniasm/** + - tests/modules/miniasm/** + minimap2/align: - modules/minimap2/align/** - tests/modules/minimap2/align/** diff --git a/tests/modules/miniasm/main.nf b/tests/modules/miniasm/main.nf new file mode 100644 index 00000000..f3d23d56 --- /dev/null +++ b/tests/modules/miniasm/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MINIASM } from '../../../modules/miniasm/main.nf' addParams( options: [suffix:'.assembly'] ) + +workflow test_miniasm { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['bacteroides_fragilis']['nanopore']['test_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['nanopore']['overlap_paf'], checkIfExists: true) + ] + + MINIASM ( input ) +} diff --git a/tests/modules/miniasm/test.yml b/tests/modules/miniasm/test.yml new file mode 100644 index 00000000..7596a269 --- /dev/null +++ b/tests/modules/miniasm/test.yml @@ -0,0 +1,9 @@ +- name: miniasm test_miniasm + command: nextflow run tests/modules/miniasm -entry test_miniasm -c tests/config/nextflow.config + tags: + - miniasm + files: + - path: output/miniasm/test.assembly.gfa.gz + md5sum: c68e4c2b64338d1c0f5b79b32934da14 + - path: output/miniasm/test.assembly.fasta.gz + md5sum: d2f78ae618c02744e7a57bf4706ab8b4 From 2af071ed0d70ec7fa25b08e3fa48ea6fd9564a38 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Mon, 15 Nov 2021 12:18:46 -0600 Subject: [PATCH 050/101] Fix subworkflows seperate from modules (#933) * ci: Remove pytest_subworkflows * ci(bam_sort_samtools): Depend on paths-filter instead of pytest-workflow Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Jose Espinosa-Carrasco * ci: Revert back to one job branch * ci(align_bowtie2): Run tests that depend on bam_sort_samtools * ci: Fix anchor not being created yet * ci: Update sra_fastq tags and pytest_modules * fix(bam_sort_samtools): Update nextflow.config with params * test(subworkflows): Update gatk_create_som_pon tags * ci: Point to subworkflow_hacks branch of nf-core tools Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Jose Espinosa-Carrasco --- .github/workflows/nf-core-linting.yml | 2 +- .github/workflows/pytest-workflow.yml | 118 +----------------- .../nf-core/bam_sort_samtools/nextflow.config | 4 +- tests/config/pytest_modules.yml | 40 +++++- tests/config/pytest_subworkflows.yml | 21 ---- tests/modules/sratools/fasterqdump/test.yml | 6 +- .../nf-core/align_bowtie2/test.yml | 32 ++--- .../nf-core/bam_sort_samtools/test.yml | 24 ++-- .../nf-core/gatk_create_som_pon/test.yml | 7 +- 9 files changed, 77 insertions(+), 177 deletions(-) delete mode 100644 tests/config/pytest_subworkflows.yml diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index 55b8c296..ce441413 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -60,7 +60,7 @@ jobs: # FIXME: Remove this when nf-core modules lint stabilizes and install stable release - name: Install nf-core tools development version - run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev + run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@subworkflow_hacks - name: Install Nextflow env: diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 0b509527..7cbb2689 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -6,11 +6,8 @@ on: branches: [master] jobs: - ########### - # Modules # - ########### - module_changes: - name: Check for changes in the modules + changes: + name: Check for changes runs-on: ubuntu-latest outputs: # Expose matched filters as job 'modules' output variable @@ -23,120 +20,17 @@ jobs: with: filters: "tests/config/pytest_modules.yml" - module_test: + test: runs-on: ubuntu-20.04 name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} - needs: module_changes - if: needs.module_changes.outputs.modules != '[]' + needs: changes + if: needs.changes.outputs.modules != '[]' strategy: fail-fast: false matrix: nxf_version: ["21.04.0"] - tags: ${{ fromJson(needs.module_changes.outputs.modules) }} - profile: ["docker", "singularity", "conda"] - env: - NXF_ANSI_LOG: false - steps: - - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: "3.x" - - - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: "3.x" - - - name: Install Python dependencies - run: python -m pip install --upgrade pip pytest-workflow - - - uses: actions/cache@v2 - with: - path: /usr/local/bin/nextflow - key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }} - restore-keys: | - ${{ runner.os }}-nextflow- - - - name: Install Nextflow - env: - NXF_VER: ${{ matrix.nxf_version }} - CAPSULE_LOG: none - run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ - - - name: Set up Singularity - if: matrix.profile == 'singularity' - uses: eWaterCycle/setup-singularity@v5 - with: - singularity-version: 3.7.1 - - - name: Setup miniconda - if: matrix.profile == 'conda' - uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - channels: conda-forge,bioconda,defaults - python-version: ${{ matrix.python-version }} - - - name: Conda clean - if: matrix.profile == 'conda' - run: conda clean -a - - # Test the module - - name: Run pytest-workflow - # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof - - - name: Upload logs on failure - if: failure() - uses: actions/upload-artifact@v2 - with: - name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} - path: | - /home/runner/pytest_workflow_*/*/.nextflow.log - /home/runner/pytest_workflow_*/*/log.out - /home/runner/pytest_workflow_*/*/log.err - /home/runner/pytest_workflow_*/*/work - - ################ - # Subworkflows # - ################ - subworkflow_changes: - name: Check for changes in the subworkflows - runs-on: ubuntu-latest - outputs: - # Expose matched filters as job 'subworkflows' output variable - subworkflows: ${{ steps.filter.outputs.changes }} - steps: - - uses: actions/checkout@v2 - - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: "tests/config/pytest_subworkflows.yml" - - subworkflow_test: - runs-on: ubuntu-20.04 - - name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} - needs: subworkflow_changes - if: needs.subworkflow_changes.outputs.subworkflows != '[]' - strategy: - fail-fast: false - matrix: - nxf_version: ["21.04.0"] - tags: ${{ fromJson(needs.subworkflow_changes.outputs.subworkflows) }} + tags: ["${{ fromJson(needs.changes.outputs.modules) }}"] profile: ["docker", "singularity", "conda"] env: NXF_ANSI_LOG: false diff --git a/subworkflows/nf-core/bam_sort_samtools/nextflow.config b/subworkflows/nf-core/bam_sort_samtools/nextflow.config index 2fd55747..72128aad 100644 --- a/subworkflows/nf-core/bam_sort_samtools/nextflow.config +++ b/subworkflows/nf-core/bam_sort_samtools/nextflow.config @@ -1 +1,3 @@ -params.options = [:] +params.sort_options = [:] +params.index_options = [:] +params.stats_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d925b76d..baaee3b8 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -466,7 +466,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: +gatk4/createsomaticpanelofnormals: &gatk4/createsomaticpanelofnormals - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -482,7 +482,7 @@ gatk4/filtermutectcalls: - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** -gatk4/genomicsdbimport: +gatk4/genomicsdbimport: &gatk4/genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -1163,7 +1163,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: +samtools/index: &samtools/index - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1175,7 +1175,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: +samtools/sort: &samtools/sort - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1251,11 +1251,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: +sratools/fasterqdump: &sratools/fasterqdump - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: +sratools/prefetch: &sratools/prefetch - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1374,3 +1374,31 @@ yara/index: yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** + +subworkflows/bam_stats_samtools: &subworkflows/bam_stats_samtools + - subworkflows/nf-core/bam_stats_samtools/** + - tests/subworkflows/nf-core/bam_stats_samtools/** + +subworkflows/bam_sort_samtools: &subworkflows/bam_sort_samtools + - subworkflows/nf-core/bam_sort_samtools/** + - tests/subworkflows/nf-core/bam_sort_samtools/** + - *samtools/sort + - *samtools/index + - *subworkflows/bam_stats_samtools + +subworkflows/align_bowtie2: + - subworkflows/nf-core/align_bowtie2/** + - tests/subworkflows/nf-core/align_bowtie2/** + - *subworkflows/bam_sort_samtools + +subworkflows/sra_fastq: + - subworkflows/nf-core/sra_fastq/** + - tests/subworkflows/nf-core/sra_fastq/** + - *sratools/fasterqdump + - *sratools/prefetch + +subworkflows/gatk_create_som_pon: + - subworkflows/nf-core/gatk_create_som_pon/** + - tests/subworkflows/nf-core/gatk_create_som_pon/** + - *gatk4/genomicsdbimport + - *gatk4/createsomaticpanelofnormals diff --git a/tests/config/pytest_subworkflows.yml b/tests/config/pytest_subworkflows.yml deleted file mode 100644 index 4f9c5514..00000000 --- a/tests/config/pytest_subworkflows.yml +++ /dev/null @@ -1,21 +0,0 @@ -subworkflows/align_bowtie2: - - subworkflows/nf-core/align_bowtie2/** - - tests/subworkflows/nf-core/align_bowtie2/** - -subworkflows/bam_stats_samtools: - - subworkflows/nf-core/bam_stats_samtools/** - - tests/subworkflows/nf-core/bam_stats_samtools/** - -subworkflows/bam_sort_samtools: - - subworkflows/nf-core/bam_sort_samtools/** - - tests/subworkflows/nf-core/bam_sort_samtools/** - -subworkflows/sra_fastq: - - subworkflows/nf-core/sra_fastq/** - - tests/subworkflows/nf-core/sra_fastq/** - -subworkflows/gatk_create_som_pon: - - subworkflows/nf-core/gatk_create_som_pon/** - - tests/subworkflows/nf-core/gatk_create_som_pon/** - - \ No newline at end of file diff --git a/tests/modules/sratools/fasterqdump/test.yml b/tests/modules/sratools/fasterqdump/test.yml index 94da4ed8..7d022a0d 100644 --- a/tests/modules/sratools/fasterqdump/test.yml +++ b/tests/modules/sratools/fasterqdump/test.yml @@ -1,8 +1,7 @@ - name: sratools fasterqdump test_sratools_fasterqdump_single_end command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c tests/config/nextflow.config tags: - - sratools - - sratools/fasterqdump + - subworkflows/sra_fastq files: - path: output/sratools/SRR13255544.fastq.gz md5sum: 1054c7b71884acdb5eed8a378f18be82 @@ -12,8 +11,7 @@ - name: sratools fasterqdump test_sratools_fasterqdump_paired_end command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c tests/config/nextflow.config tags: - - sratools - - sratools/fasterqdump + - subworkflows/sra_fastq files: - path: output/sratools/SRR11140744_1.fastq.gz md5sum: 193809c784a4ea132ab2a253fa4f55b6 diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml index 51261a14..116ea961 100644 --- a/tests/subworkflows/nf-core/align_bowtie2/test.yml +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -5,14 +5,14 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - bowtie2 - - bowtie2/align - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - bowtie2 + # - bowtie2/align + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log @@ -46,14 +46,14 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - bowtie2 - - bowtie2/align - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - bowtie2 + # - bowtie2/align + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml index e2fc27d8..88ea9d5a 100644 --- a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -4,12 +4,12 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam md5sum: e4c77897d6824ce4df486d1b100618af @@ -28,12 +28,12 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam md5sum: bbb2db225f140e69a4ac577f74ccc90f diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index a4478044..e6d80409 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -2,11 +2,10 @@ command: nextflow run ./tests/subworkflows/nf-core/gatk_create_som_pon -entry test_gatk_create_som_pon -c tests/config/nextflow.config tags: - subworkflows/gatk_create_som_pon + - gatk4 # Modules - - gatk4 - - gatk4 - - gatk4/genomicsdbimport - - gatk4/createsomaticpanelofnormals + # - gatk4/genomicsdbimport + # - gatk4/createsomaticpanelofnormals files: # gatk4 mutect2 - path: output/gatk4/test1.vcf.gz From ad460103851f353a373ed6a3064cb27ba1bc622e Mon Sep 17 00:00:00 2001 From: SusiJo <43847534+SusiJo@users.noreply.github.com> Date: Mon, 15 Nov 2021 19:29:55 +0100 Subject: [PATCH 051/101] Added new module csvtk/split (#1014) * added module csvtk/split * removed todo statement * adjusted meta map names * changed tests to use generic input files * added module in pytest * updated test-data paths * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/csvtk/split/functions.nf | 78 ++++++++++++++++++++++++++++++ modules/csvtk/split/main.nf | 50 +++++++++++++++++++ modules/csvtk/split/meta.yml | 52 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 10 +++- tests/modules/csvtk/split/main.nf | 27 +++++++++++ tests/modules/csvtk/split/test.yml | 25 ++++++++++ 7 files changed, 244 insertions(+), 2 deletions(-) create mode 100644 modules/csvtk/split/functions.nf create mode 100644 modules/csvtk/split/main.nf create mode 100644 modules/csvtk/split/meta.yml create mode 100644 tests/modules/csvtk/split/main.nf create mode 100644 tests/modules/csvtk/split/test.yml diff --git a/modules/csvtk/split/functions.nf b/modules/csvtk/split/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/csvtk/split/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/csvtk/split/main.nf b/modules/csvtk/split/main.nf new file mode 100644 index 00000000..727e046a --- /dev/null +++ b/modules/csvtk/split/main.nf @@ -0,0 +1,50 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CSVTK_SPLIT { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" + } else { + container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" + } + + input: + tuple val(meta), path(csv) + val in_format + val out_format + + output: + tuple val(meta), path("*.${out_extension}"), emit: split_csv + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def delimiter = in_format == "tsv" ? "--tabs" : (in_format == "csv" ? "--delimiter ',' " : in_format) + def out_delimiter = out_format == "tsv" ? "--out-tabs" : (out_format == "csv" ? "--out-delimiter ',' " : out_format) + out_extension = out_format == "tsv" ? 'tsv' : 'csv' + """ + sed -i.bak '/^##/d' $csv + csvtk \\ + split \\ + $options.args \\ + --num-cpus $task.cpus \\ + $delimiter \\ + $out_delimiter \\ + $csv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$( csvtk version | sed -e 's/csvtk v//g' )) + END_VERSIONS + """ +} diff --git a/modules/csvtk/split/meta.yml b/modules/csvtk/split/meta.yml new file mode 100644 index 00000000..45b71d14 --- /dev/null +++ b/modules/csvtk/split/meta.yml @@ -0,0 +1,52 @@ +name: csvtk_split +description: Splits CSV/TSV into multiple files according to column values +keywords: + - split + - csv + - tsv +tools: + - csvtk: + description: + CSVTK is a cross-platform, efficient and practical CSV/TSV toolkit + that allows rapid data investigation and manipulation. + homepage: https://bioinf.shenwei.me/csvtk/ + documentation: https://bioinf.shenwei.me/csvtk/ + tool_dev_url: https://github.com/shenwei356/csvtk + doi: "" + licence: ['MIT'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - csv: + type: file + description: CSV/TSV file + pattern: "*.{csv,tsv}" + - in_format: + type: string + description: Input format (csv, tab, or a delimiting character) + pattern: "*" + - out_format: + type: string + description: Output format (csv, tab, or a delimiting character) + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - split_csv: + type: file + description: Split CSV/TSV file + pattern: "*.{csv,tsv}" + +authors: + - "@SusiJo" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index baaee3b8..acf36372 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -310,6 +310,10 @@ csvtk/concat: - modules/csvtk/concat/** - tests/modules/csvtk/concat/** +csvtk/split: + - modules/csvtk/split/** + - tests/modules/csvtk/split/** + custom/dumpsoftwareversions: - modules/custom/dumpsoftwareversions/** - tests/modules/custom/dumpsoftwareversions/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 46232ef9..6ac4472c 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -34,7 +34,7 @@ params { contigs_genome_maf_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.maf.gz" contigs_genome_par = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.par" lastdb_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/lastdb.tar.gz" - + baits_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/baits.interval_list" targets_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/targets.interval_list" } @@ -249,11 +249,17 @@ params { } } 'generic' { + 'csv' { + test_csv = "${test_data_dir}/generic/csv/test.csv" + } 'notebooks' { rmarkdown = "${test_data_dir}/generic/notebooks/rmarkdown/rmarkdown_notebook.Rmd" ipython_md = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.md" ipython_ipynb = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.ipynb" } + 'tsv' { + test_tsv = "${test_data_dir}/generic/tsv/test.tsv" + } 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } @@ -285,6 +291,6 @@ params { test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" overlap_paf = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/overlap.paf" } - } + } } } diff --git a/tests/modules/csvtk/split/main.nf b/tests/modules/csvtk/split/main.nf new file mode 100644 index 00000000..8dfd4053 --- /dev/null +++ b/tests/modules/csvtk/split/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CSVTK_SPLIT } from '../../../../modules/csvtk/split/main.nf' addParams( options: [args: "-C '&' --fields 'first_name' "]) + +workflow test_csvtk_split_tsv { + + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['generic']['tsv']['test_tsv'], checkIfExists: true) ] + ] + in_format = "tsv" + out_format = "tsv" + CSVTK_SPLIT ( input, in_format, out_format ) +} + +workflow test_csvtk_split_csv { + + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['generic']['csv']['test_csv'], checkIfExists: true) ] + ] + in_format = "csv" + out_format = "csv" + CSVTK_SPLIT( input, in_format, out_format ) +} diff --git a/tests/modules/csvtk/split/test.yml b/tests/modules/csvtk/split/test.yml new file mode 100644 index 00000000..ade2fe48 --- /dev/null +++ b/tests/modules/csvtk/split/test.yml @@ -0,0 +1,25 @@ +- name: csvtk split test_csvtk_split_tsv + command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_tsv -c tests/config/nextflow.config + tags: + - csvtk/split + - csvtk + files: + - path: output/csvtk/test-Ken.tsv + md5sum: 589a2add7f0b8e998d4959e5d883e7d5 + - path: output/csvtk/test-Rob.tsv + md5sum: 6c5555d689c4e685d35d6e394ad6e1e6 + - path: output/csvtk/test-Robert.tsv + md5sum: 45ae6da8111096746d1736d34220a3ec + +- name: csvtk split test_csvtk_split_csv + command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_csv -c tests/config/nextflow.config + tags: + - csvtk/split + - csvtk + files: + - path: output/csvtk/test-Ken.csv + md5sum: 71a931dae6f15f5ddb0318c7d4afe81e + - path: output/csvtk/test-Rob.csv + md5sum: efc4bc507021043a3bf2fb0724c4a216 + - path: output/csvtk/test-Robert.csv + md5sum: 8de2f076e64252c2abed69b9c2a3a386 From 2294ff7826eb8f49b006e5428328638473cdd028 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 11:32:53 -0700 Subject: [PATCH 052/101] add ncbi-genome-download module (#980) * add ncbi-genome-download module * Update modules/ncbigenomedownload/main.nf Co-authored-by: Gregor Sturm Co-authored-by: Harshil Patel --- modules/ncbigenomedownload/functions.nf | 78 +++++++++++++++++++ modules/ncbigenomedownload/main.nf | 56 ++++++++++++++ modules/ncbigenomedownload/meta.yml | 91 +++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/modules/ncbigenomedownload/main.nf | 16 ++++ tests/modules/ncbigenomedownload/test.yml | 11 +++ 6 files changed, 256 insertions(+) create mode 100644 modules/ncbigenomedownload/functions.nf create mode 100644 modules/ncbigenomedownload/main.nf create mode 100644 modules/ncbigenomedownload/meta.yml create mode 100644 tests/modules/ncbigenomedownload/main.nf create mode 100644 tests/modules/ncbigenomedownload/test.yml diff --git a/modules/ncbigenomedownload/functions.nf b/modules/ncbigenomedownload/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ncbigenomedownload/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ncbigenomedownload/main.nf b/modules/ncbigenomedownload/main.nf new file mode 100644 index 00000000..ffa53871 --- /dev/null +++ b/modules/ncbigenomedownload/main.nf @@ -0,0 +1,56 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process NCBIGENOMEDOWNLOAD { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ncbi-genome-download=0.3.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ncbi-genome-download:0.3.0--pyh864c0ab_1" + } else { + container "quay.io/biocontainers/ncbi-genome-download:0.3.0--pyh864c0ab_1" + } + + input: + val meta + path accessions + + output: + tuple val(meta), path("*_genomic.gbff.gz") , emit: gbk , optional: true + tuple val(meta), path("*_genomic.fna.gz") , emit: fna , optional: true + tuple val(meta), path("*_rm.out.gz") , emit: rm , optional: true + tuple val(meta), path("*_feature_table.txt.gz") , emit: features, optional: true + tuple val(meta), path("*_genomic.gff.gz") , emit: gff , optional: true + tuple val(meta), path("*_protein.faa.gz") , emit: faa , optional: true + tuple val(meta), path("*_protein.gpff.gz") , emit: gpff , optional: true + tuple val(meta), path("*_wgsmaster.gbff.gz") , emit: wgs_gbk , optional: true + tuple val(meta), path("*_cds_from_genomic.fna.gz"), emit: cds , optional: true + tuple val(meta), path("*_rna.fna.gz") , emit: rna , optional: true + tuple val(meta), path("*_rna_from_genomic.fna.gz"), emit: rna_fna , optional: true + tuple val(meta), path("*_assembly_report.txt") , emit: report , optional: true + tuple val(meta), path("*_assembly_stats.txt") , emit: stats , optional: true + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def accessions_opt = accessions ? "-A ${accessions}" : "" + """ + ncbi-genome-download \\ + $options.args \\ + $accessions_opt \\ + --output-folder ./ \\ + --flat-output + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( ncbi-genome-download --version ) + END_VERSIONS + """ +} diff --git a/modules/ncbigenomedownload/meta.yml b/modules/ncbigenomedownload/meta.yml new file mode 100644 index 00000000..fd9e0a45 --- /dev/null +++ b/modules/ncbigenomedownload/meta.yml @@ -0,0 +1,91 @@ +name: ncbigenomedownload +description: A tool to quickly download assemblies from NCBI's Assembly database +keywords: + - fasta + - download + - assembly +tools: + - ncbigenomedownload: + description: Download genome files from the NCBI FTP server. + homepage: https://github.com/kblin/ncbi-genome-download + documentation: https://github.com/kblin/ncbi-genome-download + tool_dev_url: https://github.com/kblin/ncbi-genome-download + doi: "" + licence: ['Apache Software License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - accessions: + type: file + description: List of accessions (one per line) to download + pattern: "*.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - gbk: + type: file + description: GenBank format of the genomic sequence(s) in the assembly + pattern: "*_genomic.gbff.gz" + - fna: + type: file + description: FASTA format of the genomic sequence(s) in the assembly. + pattern: "*_genomic.fna.gz" + - rm: + type: file + description: RepeatMasker output for eukaryotes. + pattern: "*_rm.out.gz" + - features: + type: file + description: Tab-delimited text file reporting locations and attributes for a subset of annotated features + pattern: "*_feature_table.txt.gz" + - gff: + type: file + description: Annotation of the genomic sequence(s) in GFF3 format + pattern: "*_genomic.gff.gz" + - faa: + type: file + description: FASTA format of the accessioned protein products annotated on the genome assembly. + pattern: "*_protein.faa.gz" + - gpff: + type: file + description: GenPept format of the accessioned protein products annotated on the genome assembly. + pattern: "*_protein.gpff.gz" + - wgs_gbk: + type: file + description: GenBank flat file format of the WGS master for the assembly + pattern: "*_wgsmaster.gbff.gz" + - cds: + type: file + description: FASTA format of the nucleotide sequences corresponding to all CDS features annotated on the assembly + pattern: "*_cds_from_genomic.fna.gz" + - rna: + type: file + description: FASTA format of accessioned RNA products annotated on the genome assembly + pattern: "*_rna.fna.gz" + - rna_fna: + type: file + description: FASTA format of the nucleotide sequences corresponding to all RNA features annotated on the assembly + pattern: "*_rna_from_genomic.fna.gz" + - report: + type: file + description: Tab-delimited text file reporting the name, role and sequence accession.version for objects in the assembly + pattern: "*_assembly_report.txt" + - stats: + type: file + description: Tab-delimited text file reporting statistics for the assembly + pattern: "*_assembly_stats.txt" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index acf36372..293e333a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -919,6 +919,10 @@ nanoplot: - modules/nanoplot/** - tests/modules/nanoplot/** +ncbigenomedownload: + - modules/ncbigenomedownload/** + - tests/modules/ncbigenomedownload/** + nextclade: - modules/nextclade/** - tests/modules/nextclade/** diff --git a/tests/modules/ncbigenomedownload/main.nf b/tests/modules/ncbigenomedownload/main.nf new file mode 100644 index 00000000..f729b91d --- /dev/null +++ b/tests/modules/ncbigenomedownload/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NCBIGENOMEDOWNLOAD } from '../../../modules/ncbigenomedownload/main.nf' addParams( options: [ args: '-A GCF_000013425.1 --formats genbank,fasta,assembly-stats bacteria '] ) + +workflow test_ncbigenomedownload { + + input = [ [ id:'test', single_end:false ] ] + + accessions = [] + + NCBIGENOMEDOWNLOAD ( input, accessions) +} + + diff --git a/tests/modules/ncbigenomedownload/test.yml b/tests/modules/ncbigenomedownload/test.yml new file mode 100644 index 00000000..7d1f7c74 --- /dev/null +++ b/tests/modules/ncbigenomedownload/test.yml @@ -0,0 +1,11 @@ +- name: ncbigenomedownload test_ncbigenomedownload + command: nextflow run tests/modules/ncbigenomedownload -entry test_ncbigenomedownload -c tests/config/nextflow.config + tags: + - ncbigenomedownload + files: + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_assembly_stats.txt + md5sum: f78c6a373130e50fac5472962a5fdf44 + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_genomic.fna.gz + md5sum: b086eb1020e7df022afa545dc6d93297 + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_genomic.gbff.gz + md5sum: ae2da70e32c783858e6c60c72e9eeb7a From 13b8a16f4a6945af9df146b67972eb70b52e9844 Mon Sep 17 00:00:00 2001 From: tamara-hodgetts <88095902+tamara-hodgetts@users.noreply.github.com> Date: Mon, 15 Nov 2021 19:22:12 +0000 Subject: [PATCH 053/101] Add module get_chrom_sizes (#1063) * hifiasm copied from fastqc * hifiasm tests init from fastqc * meta.yml init; test.yml and main.nf for printing version * Add hifiasm version printing * Removed spaced on an empty line * Reverted hifiasm from main * init getchromsizes * add tests for getchromsizes * Included meta.yml * removed whitespace * Moved getchromsizes to custom folder * Update modules/custom/getchromsizes/main.nf Co-authored-by: Harshil Patel Co-authored-by: Sviatoslav Sidorov Co-authored-by: Svyatoslav Sidorov Co-authored-by: Chris Cheshire Co-authored-by: Tamara Hodgetts Co-authored-by: Harshil Patel --- modules/custom/getchromsizes/functions.nf | 78 +++++++++++++++++++++ modules/custom/getchromsizes/main.nf | 39 +++++++++++ modules/custom/getchromsizes/meta.yml | 39 +++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/custom/getchromsizes/main.nf | 12 ++++ tests/modules/custom/getchromsizes/test.yml | 10 +++ 6 files changed, 182 insertions(+) create mode 100644 modules/custom/getchromsizes/functions.nf create mode 100644 modules/custom/getchromsizes/main.nf create mode 100644 modules/custom/getchromsizes/meta.yml create mode 100644 tests/modules/custom/getchromsizes/main.nf create mode 100644 tests/modules/custom/getchromsizes/test.yml diff --git a/modules/custom/getchromsizes/functions.nf b/modules/custom/getchromsizes/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/custom/getchromsizes/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/custom/getchromsizes/main.nf b/modules/custom/getchromsizes/main.nf new file mode 100644 index 00000000..fb46986b --- /dev/null +++ b/modules/custom/getchromsizes/main.nf @@ -0,0 +1,39 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CUSTOM_GETCHROMSIZES { + tag "$fasta" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" + } else { + container "quay.io/biocontainers/samtools:1.14--hb421002_0" + } + + input: + path fasta + + output: + path '*.sizes' , emit: sizes + path '*.fai' , emit: fai + path "versions.yml", emit: versions + + script: + """ + samtools faidx $fasta + cut -f 1,2 ${fasta}.fai > ${fasta}.sizes + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/custom/getchromsizes/meta.yml b/modules/custom/getchromsizes/meta.yml new file mode 100644 index 00000000..eb1db4bb --- /dev/null +++ b/modules/custom/getchromsizes/meta.yml @@ -0,0 +1,39 @@ +name: custom_getchromsizes +description: Generates a FASTA file of chromosome sizes and a fasta index file +keywords: + - fasta + - chromosome + - indexing +tools: + - samtools: + description: Tools for dealing with SAM, BAM and CRAM files + homepage: http://www.htslib.org/ + documentation: http://www.htslib.org/doc/samtools.html + tool_dev_url: https://github.com/samtools/samtools + doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] + +input: + - fasta: + type: file + description: FASTA file + pattern: "*.{fasta}" + +output: + - sizes: + type: file + description: File containing chromosome lengths + pattern: "*.{sizes}" + - fai: + type: file + description: FASTA index file + pattern: "*.{fai}" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + + +authors: + - "@tamara-hodgetts" + - "@chris-cheshire" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 293e333a..994b6947 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -318,6 +318,10 @@ custom/dumpsoftwareversions: - modules/custom/dumpsoftwareversions/** - tests/modules/custom/dumpsoftwareversions/** +custom/getchromsizes: + - modules/custom/getchromsizes/** + - tests/modules/custom/getchromsizes/** + cutadapt: - modules/cutadapt/** - tests/modules/cutadapt/** diff --git a/tests/modules/custom/getchromsizes/main.nf b/tests/modules/custom/getchromsizes/main.nf new file mode 100644 index 00000000..503668ec --- /dev/null +++ b/tests/modules/custom/getchromsizes/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CUSTOM_GETCHROMSIZES } from '../../../../modules/custom/getchromsizes/main.nf' addParams( options: [:] ) + +workflow test_custom_getchromsizes { + + input = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + CUSTOM_GETCHROMSIZES ( input ) +} diff --git a/tests/modules/custom/getchromsizes/test.yml b/tests/modules/custom/getchromsizes/test.yml new file mode 100644 index 00000000..1265f478 --- /dev/null +++ b/tests/modules/custom/getchromsizes/test.yml @@ -0,0 +1,10 @@ +- name: custom getchromsizes + command: nextflow run ./tests/modules/custom/getchromsizes -entry test_custom_getchromsizes -c tests/config/nextflow.config + tags: + - custom + - custom/getchromsizes + files: + - path: output/custom/genome.fasta.fai + md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 + - path: output/custom/genome.fasta.sizes + md5sum: a57c401f27ae5133823fb09fb21c8a3c From c2bba7a65d04fff9a908b2b20eaa89e4aff69078 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Mon, 15 Nov 2021 12:43:53 -0700 Subject: [PATCH 054/101] add clonalframeml module (#974) * add clonalframeml module * Update main.nf * try recommended gzip * Update main.nf Co-authored-by: Chris Cheshire --- modules/clonalframeml/functions.nf | 78 ++++++++++++++++++++++++++++ modules/clonalframeml/main.nf | 47 +++++++++++++++++ modules/clonalframeml/meta.yml | 67 ++++++++++++++++++++++++ tests/config/pytest_modules.yml | 6 ++- tests/modules/clonalframeml/main.nf | 14 +++++ tests/modules/clonalframeml/test.yml | 15 ++++++ 6 files changed, 226 insertions(+), 1 deletion(-) create mode 100644 modules/clonalframeml/functions.nf create mode 100644 modules/clonalframeml/main.nf create mode 100644 modules/clonalframeml/meta.yml create mode 100644 tests/modules/clonalframeml/main.nf create mode 100644 tests/modules/clonalframeml/test.yml diff --git a/modules/clonalframeml/functions.nf b/modules/clonalframeml/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/clonalframeml/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/clonalframeml/main.nf b/modules/clonalframeml/main.nf new file mode 100644 index 00000000..f99f944b --- /dev/null +++ b/modules/clonalframeml/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process CLONALFRAMEML { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::clonalframeml=1.12" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/clonalframeml:1.12--h7d875b9_1" + } else { + container "quay.io/biocontainers/clonalframeml:1.12--h7d875b9_1" + } + + input: + tuple val(meta), path(newick), path(msa) + + output: + tuple val(meta), path("*.emsim.txt") , emit: emsim, optional: true + tuple val(meta), path("*.em.txt") , emit: em + tuple val(meta), path("*.importation_status.txt") , emit: status + tuple val(meta), path("*.labelled_tree.newick") , emit: newick + tuple val(meta), path("*.ML_sequence.fasta") , emit: fasta + tuple val(meta), path("*.position_cross_reference.txt"), emit: pos_ref + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + ClonalFrameML \\ + $newick \\ + <(gzip -cdf $msa) \\ + $prefix \\ + $options.args + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(ClonalFrameML -version 2>&1) | sed 's/^.*ClonalFrameML v//' ) + END_VERSIONS + """ +} diff --git a/modules/clonalframeml/meta.yml b/modules/clonalframeml/meta.yml new file mode 100644 index 00000000..874a04be --- /dev/null +++ b/modules/clonalframeml/meta.yml @@ -0,0 +1,67 @@ +name: clonalframeml +description: Predict recomination events in bacterial genomes +keywords: + - fasta + - multiple sequence alignment + - recombination +tools: + - clonalframeml: + description: Efficient inferencing of recombination in bacterial genomes + homepage: https://github.com/xavierdidelot/ClonalFrameML + documentation: https://github.com/xavierdidelot/clonalframeml/wiki + tool_dev_url: https://github.com/xavierdidelot/ClonalFrameML + doi: "10.1371/journal.pcbi.1004041" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - msa: + type: file + description: A multiple seqeunce alignmnet in FASTA format + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - newick: + type: file + description: A Newick formated tree based on multiple sequence alignment + pattern: "*.{newick,treefile,dnd}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - emsim: + type: file + description: Bootstrapped values for the three parameters R/theta, nu and delta + pattern: "*.emsim.txt" + - em: + type: file + description: Point estimates for R/theta, nu, delta and the branch lengths + pattern: "*.em.txt" + - fasta: + type: file + description: Sequence reconstructed by maximum likelihood + pattern: "*.ML_sequence.fasta" + - newick: + type: file + description: Tree with all nodes labelled + pattern: "*.labelled_tree.newick" + - pos_ref: + type: file + description: CSV mapping input sequence files to the sequences in the *.ML_sequence.fasta + pattern: "*.position_cross_reference.txt" + - status: + type: file + description: List of reconstructed recombination events + pattern: "*.importation_status.txt" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 994b6947..7db586b5 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -282,6 +282,10 @@ chromap/index: - modules/chromap/index/** - tests/modules/chromap/index/** +clonalframeml: + - modules/clonalframeml/** + - tests/modules/clonalframeml/** + cmseq/polymut: - modules/cmseq/polymut/** - tests/modules/cmseq/polymut/** @@ -1210,7 +1214,7 @@ seacr/callpeak: seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** - + seqsero2: - modules/seqsero2/** - tests/modules/seqsero2/** diff --git a/tests/modules/clonalframeml/main.nf b/tests/modules/clonalframeml/main.nf new file mode 100644 index 00000000..35ecaa79 --- /dev/null +++ b/tests/modules/clonalframeml/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CLONALFRAMEML } from '../../../modules/clonalframeml/main.nf' addParams( options: [:] ) + +workflow test_clonalframeml { + + input = [ [ id:'test' ], // meta map + file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.newick", checkIfExists: true), + file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.fa.gz", checkIfExists: true),] + + CLONALFRAMEML ( input ) +} diff --git a/tests/modules/clonalframeml/test.yml b/tests/modules/clonalframeml/test.yml new file mode 100644 index 00000000..f2b68115 --- /dev/null +++ b/tests/modules/clonalframeml/test.yml @@ -0,0 +1,15 @@ +- name: clonalframeml test_clonalframeml + command: nextflow run tests/modules/clonalframeml -entry test_clonalframeml -c tests/config/nextflow.config + tags: + - clonalframeml + files: + - path: output/clonalframeml/test.ML_sequence.fasta + md5sum: 1b75cdaea78f5920ebb92125422a2589 + - path: output/clonalframeml/test.em.txt + md5sum: 5439d59897a9a90390bb175207bf2b9b + - path: output/clonalframeml/test.importation_status.txt + md5sum: 6ce9dbc7746b1c884af042fa02311fba + - path: output/clonalframeml/test.labelled_tree.newick + md5sum: aa47754eea8a3b6bab56bd7c83ba78db + - path: output/clonalframeml/test.position_cross_reference.txt + md5sum: 8ff60768b348fc6f7a1e787aca72f596 From 52c541b080e8a4735a2ef5d78026d73f6f338624 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Mon, 15 Nov 2021 16:43:55 -0500 Subject: [PATCH 055/101] Cooler cload (#634) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * rebuild cooler cload. * update test file path of cload. * add pytest for cload * update to version.yml * update the test data path * Update tests/modules/cooler/cload/main.nf Co-authored-by: Sébastien Guizard * Update modules/cooler/cload/main.nf Co-authored-by: Sébastien Guizard * Update tests/config/test_data.config Co-authored-by: Sébastien Guizard * Update main.nf Remove a lonely curly bracket. * Update test.yml Updated with new workflows. * update the test files * merge the conflicts. * update the test.yml * update for change of cooler/dump Co-authored-by: Gregor Sturm Co-authored-by: Sébastien Guizard Co-authored-by: Harshil Patel --- modules/cooler/cload/functions.nf | 78 +++++++++++++++++++++++++++++ modules/cooler/cload/main.nf | 47 +++++++++++++++++ modules/cooler/cload/meta.yml | 52 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 8 ++- tests/modules/cooler/cload/main.nf | 52 +++++++++++++++++++ tests/modules/cooler/cload/test.yml | 29 +++++++++++ 7 files changed, 269 insertions(+), 1 deletion(-) create mode 100644 modules/cooler/cload/functions.nf create mode 100644 modules/cooler/cload/main.nf create mode 100644 modules/cooler/cload/meta.yml create mode 100644 tests/modules/cooler/cload/main.nf create mode 100644 tests/modules/cooler/cload/test.yml diff --git a/modules/cooler/cload/functions.nf b/modules/cooler/cload/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/cooler/cload/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/cooler/cload/main.nf b/modules/cooler/cload/main.nf new file mode 100644 index 00000000..ec0cad56 --- /dev/null +++ b/modules/cooler/cload/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process COOLER_CLOAD { + tag "$meta.id" + label 'process_high' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" + } else { + container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" + } + + input: + tuple val(meta), path(pairs), path(index) + val cool_bin + path chromsizes + + output: + tuple val(meta), val(cool_bin), path("*.cool"), emit: cool + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def nproc = options.args.contains('pairix') || options.args.contains('tabix')? "--nproc ${task.cpus}" : '' + + """ + cooler cload \\ + $options.args \\ + $nproc \\ + ${chromsizes}:${cool_bin} \\ + $pairs \\ + ${prefix}.${cool_bin}.cool + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/cload/meta.yml b/modules/cooler/cload/meta.yml new file mode 100644 index 00000000..8ac75911 --- /dev/null +++ b/modules/cooler/cload/meta.yml @@ -0,0 +1,52 @@ +name: cooler_cload +description: Create a cooler from genomic pairs and bins +keywords: + - cool +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - pairs: + type: file + description: Path to contacts (i.e. read pairs) file. + - index: + type: file + description: Path to index file of the contacts. + - cool_bin: + type: value + description: Bins size in bp + - chromsizes: + type: file + description: Path to a chromsizes file. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - cool: + type: file + description: Output COOL file path + pattern: "*.cool" + - cool_bin: + type: value + description: Bins size in bp + +authors: + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7db586b5..9d6be566 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -298,6 +298,10 @@ cooler/digest: - modules/cooler/digest/** - tests/modules/cooler/digest/** +cooler/cload: + - modules/cooler/cload/** + - tests/modules/cooler/cload/** + cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 6ac4472c..3351204d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -263,7 +263,13 @@ params { 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } - 'cooler' { + 'cooler'{ + test_pairix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz" + test_pairix_pair_gz_px2 = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz.px2" + test_pairs_pair = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.sample1.pairs" + test_tabix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.sorted.possrt.txt.gz" + test_tabix_pair_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.sorted.possrt.txt.gz.tbi" + hg19_chrom_sizes = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.chrom.sizes" test_merge_cool = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cool" test_merge_cool_cp2 = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cp2.cool" diff --git a/tests/modules/cooler/cload/main.nf b/tests/modules/cooler/cload/main.nf new file mode 100644 index 00000000..dd9b3e98 --- /dev/null +++ b/tests/modules/cooler/cload/main.nf @@ -0,0 +1,52 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_CLOAD } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairix'] ) +include { COOLER_CLOAD as COOLER_CLOAD_PAIRS } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairs --chrom1 1 --pos1 2 --chrom2 4 --pos2 5 -N'] ) +include { COOLER_CLOAD as COOLER_CLOAD_TABIX } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'tabix'] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_DUMP as COOLER_DUMP_PAIRS} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_DUMP as COOLER_DUMP_TABIX} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) + +workflow test_cooler_cload_pairix { + + input = [ [ id:'test_pairix', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_pairix_pair_gz'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_pairix_pair_gz_px2'], checkIfExists: true)] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD ( input, bin_size, sizes ) + COOLER_DUMP(COOLER_CLOAD.out.cool.map{[it[0], it[2]]}, []) + +} + +workflow test_cooler_cload_pairs { + + input = [ [ id:'test_pairs', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_pairs_pair'], checkIfExists: true), + []] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD_PAIRS ( input, bin_size, sizes ) + COOLER_DUMP_PAIRS(COOLER_CLOAD_PAIRS.out.cool.map{[it[0], it[2]]}, []) + +} + +workflow test_cooler_cload_tabix { + + input = [ [ id:'test_tabix', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_tabix_pair_gz'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_tabix_pair_gz_tbi'], checkIfExists: true)] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD_TABIX ( input, bin_size, sizes ) + COOLER_DUMP_TABIX(COOLER_CLOAD_TABIX.out.cool.map{[it[0], it[2]]}, []) + +} diff --git a/tests/modules/cooler/cload/test.yml b/tests/modules/cooler/cload/test.yml new file mode 100644 index 00000000..7cb9a0bd --- /dev/null +++ b/tests/modules/cooler/cload/test.yml @@ -0,0 +1,29 @@ +- name: cooler cload test_cooler_cload_pairix + command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairix -c tests/config/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_pairix.2000000.cool + - path: output/cooler/test_pairix.bedpe + md5sum: 0cd85311089669688ec17468eae02111 + +- name: cooler cload test_cooler_cload_pairs + command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairs -c tests/config/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_pairs.2000000.cool + - path: output/cooler/test_pairs.bedpe + md5sum: 7f832733fc7853ebb1937b33e4c1e0de + +- name: cooler cload test_cooler_cload_tabix + command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_tabix -c tests/config/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_tabix.2000000.cool + - path: output/cooler/test_tabix.bedpe + md5sum: 0cd85311089669688ec17468eae02111 From d5f69856072cf366bc1e023c9f89bc6e738e4904 Mon Sep 17 00:00:00 2001 From: Sateesh <33637490+sateeshperi@users.noreply.github.com> Date: Mon, 15 Nov 2021 17:05:34 -0500 Subject: [PATCH 056/101] add new nucmer module (#945) * add new nucmer module * Apply suggestions from code review Co-authored-by: Robert A. Petit III * update tests with file produced by input * Update main.nf * Update meta.yml Co-authored-by: Michael Cipriano Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/nucmer/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/nucmer/main.nf | 55 +++++++++++++++++++++++ modules/nucmer/meta.yml | 50 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/nucmer/main.nf | 14 ++++++ tests/modules/nucmer/test.yml | 9 ++++ 6 files changed, 210 insertions(+) create mode 100644 modules/nucmer/functions.nf create mode 100644 modules/nucmer/main.nf create mode 100644 modules/nucmer/meta.yml create mode 100644 tests/modules/nucmer/main.nf create mode 100644 tests/modules/nucmer/test.yml diff --git a/modules/nucmer/functions.nf b/modules/nucmer/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/nucmer/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/nucmer/main.nf b/modules/nucmer/main.nf new file mode 100644 index 00000000..49a275f4 --- /dev/null +++ b/modules/nucmer/main.nf @@ -0,0 +1,55 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process NUCMER { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::mummer=3.23" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" + } else { + container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" + } + + input: + tuple val(meta), path(ref), path(query) + + output: + tuple val(meta), path("*.delta") , emit: delta + tuple val(meta), path("*.coords"), emit: coords + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false + def is_compressed_query = query.getName().endsWith(".gz") ? true : false + def fasta_name_ref = ref.getName().replace(".gz", "") + def fasta_name_query = query.getName().replace(".gz", "") + """ + if [ "$is_compressed_ref" == "true" ]; then + gzip -c -d $ref > $fasta_name_ref + fi + if [ "$is_compressed_query" == "true" ]; then + gzip -c -d $query > $fasta_name_query + fi + + nucmer \\ + -p $prefix \\ + --coords \\ + $options.args \\ + $fasta_name_ref \\ + $fasta_name_query + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( nucmer --version 2>&1 | grep "version" | sed -e "s/NUCmer (NUCleotide MUMmer) version //g; s/nucmer//g;" ) + END_VERSIONS + """ +} diff --git a/modules/nucmer/meta.yml b/modules/nucmer/meta.yml new file mode 100644 index 00000000..cccf723f --- /dev/null +++ b/modules/nucmer/meta.yml @@ -0,0 +1,50 @@ +name: nucmer +description: NUCmer is a pipeline for the alignment of multiple closely related nucleotide sequences. +keywords: + - align + - nucleotide +tools: + - nucmer: + description: NUCmer is a pipeline for the alignment of multiple closely related nucleotide sequences. + homepage: http://mummer.sourceforge.net/ + documentation: http://mummer.sourceforge.net/ + tool_dev_url: http://mummer.sourceforge.net/ + doi: "https://doi.org/10.1186/gb-2004-5-2-r12" + licence: ['The Artistic License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ref: + type: file + description: FASTA file of the reference sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - query: + type: file + description: FASTA file of the query sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - delta: + type: file + description: File containing coordinates of matches between reference and query + - coords: + type: file + description: NUCmer1.1 coords output file + pattern: "*.{coords}" + +authors: + - "@sateeshperi" + - "@mjcipriano" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9d6be566..6c32a0ff 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -943,6 +943,10 @@ ngmaster: - modules/ngmaster/** - tests/modules/ngmaster/** +nucmer: + - modules/nucmer/** + - tests/modules/nucmer/** + optitype: - modules/optitype/** - tests/modules/optitype/** diff --git a/tests/modules/nucmer/main.nf b/tests/modules/nucmer/main.nf new file mode 100644 index 00000000..8021f577 --- /dev/null +++ b/tests/modules/nucmer/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NUCMER } from '../../../modules/nucmer/main.nf' addParams( options: [:] ) + +workflow test_nucmer { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + NUCMER ( input ) +} diff --git a/tests/modules/nucmer/test.yml b/tests/modules/nucmer/test.yml new file mode 100644 index 00000000..86b3df5d --- /dev/null +++ b/tests/modules/nucmer/test.yml @@ -0,0 +1,9 @@ +- name: nucmer test_nucmer + command: nextflow run tests/modules/nucmer -entry test_nucmer -c tests/config/nextflow.config + tags: + - nucmer + files: + - path: output/nucmer/test.coords + contains: ['MT192765.1'] + - path: output/nucmer/test.delta + contains: ['MT192765.1'] From bc8899f1bf298185a444511d73cdb9dd21ac10f4 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Tue, 16 Nov 2021 02:23:07 -0700 Subject: [PATCH 057/101] [fix] hicap module allow optional outputs (#937) * make hicap outputs optional * update test data * typo * Update main.nf * use mkfifo * Update main.nf Co-authored-by: Gisela Gabernet Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel --- modules/hicap/main.nf | 7 +++---- tests/modules/hicap/main.nf | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf index e2e70678..fbc157b1 100644 --- a/modules/hicap/main.nf +++ b/modules/hicap/main.nf @@ -24,9 +24,9 @@ process HICAP { path model_fp output: - tuple val(meta), path("*.gbk"), emit: gbk - tuple val(meta), path("*.svg"), emit: svg - tuple val(meta), path("*.tsv"), emit: tsv + tuple val(meta), path("*.gbk"), emit: gbk, optional: true + tuple val(meta), path("*.svg"), emit: svg, optional: true + tuple val(meta), path("*.tsv"), emit: tsv, optional: true path "versions.yml" , emit: versions script: @@ -39,7 +39,6 @@ process HICAP { if [ "$is_compressed" == "true" ]; then gzip -c -d $fasta > $fasta_name fi - hicap \\ --query_fp $fasta_name \\ $database_args \\ diff --git a/tests/modules/hicap/main.nf b/tests/modules/hicap/main.nf index 77c309a5..3ac9c20b 100644 --- a/tests/modules/hicap/main.nf +++ b/tests/modules/hicap/main.nf @@ -7,7 +7,7 @@ include { HICAP } from '../../../modules/hicap/main.nf' addParams( options: [:] workflow test_hicap { input = [ [ id:'test', single_end:false ], // meta map - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species-specific/haemophilus-influenzae/GCF_900478275.fna.gz", checkIfExists: true) ] + file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/GCF_900478275.fna.gz", checkIfExists: true) ] database_dir = [] model_fp = [] From b50f4e3d59810447c10520c8dc9026c96d7c6470 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Tue, 16 Nov 2021 05:37:27 -0600 Subject: [PATCH 058/101] Update dsh-bio to 2.0.6. (#1075) Co-authored-by: Harshil Patel --- modules/dshbio/exportsegments/main.nf | 6 +++--- modules/dshbio/filterbed/main.nf | 6 +++--- modules/dshbio/filtergff3/main.nf | 6 +++--- modules/dshbio/splitbed/main.nf | 6 +++--- modules/dshbio/splitgff3/main.nf | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index 84f59e89..ec471000 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -11,11 +11,11 @@ process DSHBIO_EXPORTSEGMENTS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 35039f21..9ad8ce8b 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -11,11 +11,11 @@ process DSHBIO_FILTERBED { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index bf677da8..bf729dbf 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -11,11 +11,11 @@ process DSHBIO_FILTERGFF3 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 3e8d656c..20e679f4 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -11,11 +11,11 @@ process DSHBIO_SPLITBED { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index dd477181..e0312a19 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -11,11 +11,11 @@ process DSHBIO_SPLITGFF3 { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" + container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" + container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" } input: From 691feeafdcd8b7874f540695ece122cce01a524f Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Tue, 16 Nov 2021 15:09:30 +0100 Subject: [PATCH 059/101] GATK4 SplitNCigarReads: fasta_fai_dict tuple is now split into separate input channels (#1076) * fasta_fai_dict tuple is now split into separate input channels * fix: lint errors * fix: pytest errors * Update modules/gatk4/splitncigarreads/meta.yml * Update modules/gatk4/splitncigarreads/main.nf Co-authored-by: Maxime U. Garcia --- modules/gatk4/splitncigarreads/main.nf | 4 +++- modules/gatk4/splitncigarreads/meta.yml | 15 +++++++++++---- tests/modules/gatk4/splitncigarreads/main.nf | 9 ++++----- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 01b1d05a..26fb799d 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -20,7 +20,9 @@ process GATK4_SPLITNCIGARREADS { input: tuple val(meta), path(bam) - tuple path(fasta), path(fai), path(dict) + path fasta + path fai + path dict output: tuple val(meta), path('*.bam'), emit: bam diff --git a/modules/gatk4/splitncigarreads/meta.yml b/modules/gatk4/splitncigarreads/meta.yml index f287ede4..fd6edda0 100644 --- a/modules/gatk4/splitncigarreads/meta.yml +++ b/modules/gatk4/splitncigarreads/meta.yml @@ -24,10 +24,17 @@ input: description: BAM/SAM/CRAM file containing reads pattern: "*.{bam,sam,cram}" - fasta: - type: tuple of files - description: | - Tuple of fasta file (first), sequence dict (second) and fasta index (third) - pattern: ["*.fasta", "*.dict", "*.fai"] + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" output: - bam: type: file diff --git a/tests/modules/gatk4/splitncigarreads/main.nf b/tests/modules/gatk4/splitncigarreads/main.nf index 3e6bde80..0934593f 100644 --- a/tests/modules/gatk4/splitncigarreads/main.nf +++ b/tests/modules/gatk4/splitncigarreads/main.nf @@ -8,10 +8,9 @@ workflow test_gatk4_splitncigarreads { input = [ [ id:'test' ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] ] - fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) - ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) - GATK4_SPLITNCIGARREADS ( input, fasta ) + GATK4_SPLITNCIGARREADS ( input, fasta, fai, dict ) } From 2d4549122be989b36fe5299b292f4032601c865d Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Tue, 16 Nov 2021 08:49:57 -0600 Subject: [PATCH 060/101] feat: Add main.nf (#1077) Allows for launching of subworkflows --- main.nf | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 main.nf diff --git a/main.nf b/main.nf new file mode 100644 index 00000000..de12f619 --- /dev/null +++ b/main.nf @@ -0,0 +1,3 @@ +/* + * not actually used - just a placeholder + */ From 071b1d50a8272037f0c9c8485c18d3953ac703d6 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Wed, 17 Nov 2021 10:07:17 +0000 Subject: [PATCH 061/101] Add gatk somatic tumour calling subworkflow (#1064) * initial commit to set up new branch * save changes to checkout * workflow working, still needs test.yml and meta.yml, also fix versions file * subworkflow finished * Update pytest_subworkflows.yml * Update pytest_subworkflows.yml * Update pytest_subworkflows.yml * fix config subworkflow name * Update main.nf * Update pytest_subworkflows.yml * fixed md5sum issue likely caused by gatk version update * tumour changed to tumor * old dir deleted * Comments added to explain use of placeholders '[]' * updated index names, input channel renamed to input * Apply suggestions from code review * updated to perform new subworkflow testing Co-authored-by: GCJMackenzie Co-authored-by: Maxime U. Garcia --- .../main.nf | 88 ++++++++++++++ .../meta.yml | 108 ++++++++++++++++++ .../nextflow.config | 4 + tests/config/pytest_modules.yml | 16 ++- .../main.nf | 24 ++++ .../test.yml | 28 +++++ 6 files changed, 264 insertions(+), 4 deletions(-) create mode 100644 subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf create mode 100644 subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml create mode 100644 subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config create mode 100644 tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf create mode 100644 tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf new file mode 100644 index 00000000..20d8a176 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf @@ -0,0 +1,88 @@ +// +// Run GATK mutect2 in tumor only mode, getepileupsummaries, calculatecontamination and filtermutectcalls +// + +params.mutect2_options = [:] +params.getpileup_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [suffix: '_filtered'] + +include { GATK4_MUTECT2 as MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES } from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_options ) +include { GATK4_CALCULATECONTAMINATION as CALCULATECONTAMINATION } from '../../../modules/gatk4/calculatecontamination/main' addParams( options: params.calccontam_options ) +include { GATK4_FILTERMUTECTCALLS as FILTERMUTECTCALLS } from '../../../modules/gatk4/filtermutectcalls/main' addParams( options: params.filtercalls_options ) + +workflow GATK_TUMOR_ONLY_SOMATIC_VARIANT_CALLING { + take: + input // channel: [ val(meta), [ input ], [ input_index ], [] ] + fasta // channel: /path/to/reference/fasta + fai // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + germline_resource // channel: /path/to/germline/resource + germline_resource_tbi // channel: /path/to/germline/index + panel_of_normals // channel: /path/to/panel/of/normals + panel_of_normals_tbi // channel: /path/to/panel/of/normals/index + interval_file // channel: /path/to/interval/file + + + main: + ch_versions = Channel.empty() + mutect2_input = channel.from(input) + + // + //Perform variant calling using mutect2 module in tumor single mode. + // + MUTECT2 ( mutect2_input , true , false , false , [] , fasta , fai , dict , germline_resource , germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + ch_versions = ch_versions.mix(MUTECT2.out.versions) + + // + //Generate pileup summary table using getepileupsummaries. + // + pileup_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[0], input_index[0]] + } + GETPILEUPSUMMARIES ( pileup_input , germline_resource , germline_resource_tbi , interval_file ) + ch_versions = ch_versions.mix(GETPILEUPSUMMARIES.out.versions) + + // + //Contamination and segmentation tables created using calculatecontamination on the pileup summary table. + // + ch_pileup = GETPILEUPSUMMARIES.out.table.collect() + //[] is a placeholder for the optional input where the matched normal sample would be passed in for tumor-normal samples, which is not necessary for this workflow. + ch_pileup.add([]) + CALCULATECONTAMINATION ( ch_pileup, true ) + ch_versions = ch_versions.mix(CALCULATECONTAMINATION.out.versions) + + // + //Mutect2 calls filtered by filtermutectcalls using the contamination and segmentation tables. + // + ch_vcf = MUTECT2.out.vcf.collect() + ch_tbi = MUTECT2.out.tbi.collect() + ch_stats = MUTECT2.out.stats.collect() + //[] is added as a placeholder for the optional input file artifact priors, which is only used for tumor-normal samples and therefor isn't needed in this workflow. + ch_stats.add([]) + ch_segment = CALCULATECONTAMINATION.out.segmentation.collect() + ch_contamination = CALCULATECONTAMINATION.out.contamination.collect() + //[] is added as a placeholder for entering a contamination estimate value, which is not needed as this workflow uses the contamination table instead. + ch_contamination.add([]) + ch_filtermutect_in = ch_vcf.combine(ch_tbi, by: 0).combine(ch_stats, by: 0).combine(ch_segment, by: 0).combine(ch_contamination, by: 0) + FILTERMUTECTCALLS ( ch_filtermutect_in, fasta, fai, dict ) + ch_versions = ch_versions.mix(FILTERMUTECTCALLS.out.versions) + + emit: + mutect2_vcf = MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_index = MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + pileup_table = GETPILEUPSUMMARIES.out.table.collect() // channel: [ val(meta), [ table ] ] + + contamination_table = CALCULATECONTAMINATION.out.contamination.collect() // channel: [ val(meta), [ contamination ] ] + segmentation_table = CALCULATECONTAMINATION.out.segmentation.collect() // channel: [ val(meta), [ segmentation ] ] + + filtered_vcf = FILTERMUTECTCALLS.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + filtered_index = FILTERMUTECTCALLS.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + filtered_stats = FILTERMUTECTCALLS.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml new file mode 100644 index 00000000..14329691 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml @@ -0,0 +1,108 @@ +name: gatk_tumor_only_somatic_variant_calling +description: | + Perform variant calling on a single tumor sample using mutect2 tumor only mode. + Run the input bam file through getpileupsummarries and then calculatecontaminationto get the contamination and segmentation tables. + Filter the mutect2 output vcf using filtermutectcalls and the contamination & segmentation tables for additional filtering. +keywords: + - gatk4 + - mutect2 + - getpileupsummaries + - calculatecontamination + - filtermutectcalls + - variant_calling + - tumor_only + - filtered_vcf +modules: + - gatk4/mutect2 + - gatk4/getpileupsummaries + - gatk4/calculatecontamination + - gatk4/filtermutectcalls +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list containing one BAM file, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list containing one BAM file indexe, also able to take CRAM index as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - germline_resource: + type: file + description: Population vcf of germline sequencing, containing allele fractions. + pattern: "*.vcf.gz" + - germline_resource_tbi: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz.tbi" + - panel_of_normals: + type: file + description: vcf file to be used as a panel of normals. + pattern: "*.vcf.gz" + - panel_of_normals_tbi: + type: file + description: Index for the panel of normals. + pattern: "*.vcf.gz.tbi" + - interval_file: + type: file + description: File containing intervals. + pattern: "*.interval_list" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: file + description: Compressed vcf file to be used for variant_calling. + pattern: "[ *.vcf.gz ]" + - mutect2_tbi: + type: file + description: Indexes of the mutect2_vcf file + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: file + description: Stats files for the mutect2 vcf + pattern: "[ *vcf.gz.stats ]" + - pileup_table: + type: file + description: File containing the pileup summary table. + pattern: "*.pileups.table" + - contamination_table: + type: file + description: File containing the contamination table. + pattern: "*.contamination.table" + - segmentation_table: + type: file + description: Output table containing segmentation of tumor minor allele fractions. + pattern: "*.segmentation.table" + - filtered_vcf: + type: file + description: file containing filtered mutect2 calls. + pattern: "*.vcf.gz" + - filtered_tbi: + type: file + description: tbi file that pairs with filtered vcf. + pattern: "*.vcf.gz.tbi" + - filtered_stats: + type: file + description: file containing statistics of the filtermutectcalls run. + pattern: "*.filteringStats.tsv" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config new file mode 100644 index 00000000..af50c2b0 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config @@ -0,0 +1,4 @@ +params.mutect2_options = [:] +params.getpileup_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6c32a0ff..29d07639 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -474,7 +474,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: +gatk4/calculatecontamination: &gatk4/calculatecontamination - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -494,7 +494,7 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: +gatk4/filtermutectcalls: &gatk4/filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** @@ -506,7 +506,7 @@ gatk4/genotypegvcfs: - modules/gatk4/genotypegvcfs/** - tests/modules/gatk4/genotypegvcfs/** -gatk4/getpileupsummaries: +gatk4/getpileupsummaries: &gatk4/getpileupsummaries - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -538,7 +538,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: +gatk4/mutect2: &gatk4/mutect2 - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -1426,3 +1426,11 @@ subworkflows/gatk_create_som_pon: - tests/subworkflows/nf-core/gatk_create_som_pon/** - *gatk4/genomicsdbimport - *gatk4/createsomaticpanelofnormals + +subworkflows/gatk_tumor_only_somatic_variant_calling: + - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** + - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** + - *gatk4/mutect2 + - *gatk4/getpileupsummaries + - *gatk4/calculatecontamination + - *gatk4/filtermutectcalls diff --git a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf new file mode 100644 index 00000000..988dc31e --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK_TUMOR_ONLY_SOMATIC_VARIANT_CALLING } from '../../../../subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main' addParams( [:] ) + +workflow test_gatk_tumor_only_somatic_variant_calling { + input = [ + [[ id:'test' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) + + GATK_TUMOR_ONLY_SOMATIC_VARIANT_CALLING ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi, interval_file ) +} diff --git a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml new file mode 100644 index 00000000..797ae936 --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml @@ -0,0 +1,28 @@ +- name: gatk_tumor_only_somatic_variant_calling + command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling -entry test_gatk_tumor_only_somatic_variant_calling -c tests/config/nextflow.config + tags: + - subworkflows/gatk_tumor_only_somatic_variant_calling + # Modules + # - gatk4/mutect2 + # - gatk4/getpileupsummaries + # - gatk4/calculatecontamination + # - gatk4/filtermutectcalls + files: + # gatk4 mutect2 + - path: ./output/mutect2/test.vcf.gz + - path: ./output/mutect2/test.vcf.gz.stats + md5sum: 106c5828b02b906c97922618b6072169 + - path: ./output/mutect2/test.vcf.gz.tbi + # gatk4 getpileupsummaries + - path: ./output/getpileupsummaries/test.pileups.table + md5sum: 8b1b4c8ab831eca50ee9e940463a741f + # gatk4 calculatecontamination + - path: ./output/calculatecontamination/test.contamination.table + md5sum: 5fdcf1728cf98985ce31c038eb24e05c + - path: ./output/calculatecontamination/test.segmentation.table + md5sum: 91f28bfe4727a3256810927fc5eba92f + # gatk4 filtermutectcalls + - path: ./output/filtermutectcalls/test_filtered.vcf.gz + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.filteringStats.tsv + md5sum: 8731945490960546719ce4a71a151e4f + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.tbi From 5b975cc20da2ebbc197be2203926f59d6a668153 Mon Sep 17 00:00:00 2001 From: GCJMackenzie <43276267+GCJMackenzie@users.noreply.github.com> Date: Wed, 17 Nov 2021 10:34:07 +0000 Subject: [PATCH 062/101] Add gatk somatic paired calling subworkflow (#1067) * initial commit to setup branch * workflow finished * Update nextflow.config * tumour to tumor, getpileup passed as nomral and tumor * paired_somatic renamed to tumor_normal_somatic * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Update subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf Co-authored-by: Maxime U. Garcia * updated index names in meta.yml * changed index file names in main script and test * Apply suggestions from code review Co-authored-by: Maxime U. Garcia * Apply suggestions from code review * fixed bug from changes * Apply suggestions from code review * tests should now work after the yml update * Update pytest_modules.yml Co-authored-by: GCJMackenzie Co-authored-by: Maxime U. Garcia Co-authored-by: Maxime U. Garcia --- .../main.nf | 109 +++++++++++++++ .../meta.yml | 127 ++++++++++++++++++ .../nextflow.config | 6 + tests/config/pytest_modules.yml | 11 +- .../main.nf | 25 ++++ .../test.yml | 34 +++++ 6 files changed, 311 insertions(+), 1 deletion(-) create mode 100644 subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf create mode 100644 subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml create mode 100644 subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config create mode 100644 tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf create mode 100644 tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf new file mode 100644 index 00000000..25c63687 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf @@ -0,0 +1,109 @@ +// +// Run GATK mutect2 in tumor normal mode, getepileupsummaries, calculatecontamination, learnreadorientationmodel and filtermutectcalls +// + +params.mutect2_options = [:] +params.learnorientation_options = [:] +params.getpileup_tumor_options = [suffix: '_tumor'] +params.getpileup_normal_options = [suffix: '_normal'] +params.calccontam_options = [:] +params.filtercalls_options = [suffix: '_filtered'] + +include { GATK4_MUTECT2 as MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_LEARNREADORIENTATIONMODEL as LEARNREADORIENTATIONMODEL } from '../../../modules/gatk4/learnreadorientationmodel/main' addParams( options: params.learnorientation_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES_TUMOR } from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_tumor_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES_NORMAL} from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_normal_options ) +include { GATK4_CALCULATECONTAMINATION as CALCULATECONTAMINATION } from '../../../modules/gatk4/calculatecontamination/main' addParams( options: params.calccontam_options ) +include { GATK4_FILTERMUTECTCALLS as FILTERMUTECTCALLS } from '../../../modules/gatk4/filtermutectcalls/main' addParams( options: params.filtercalls_options ) + +workflow GATK_TUMOR_NORMAL_SOMATIC_VARIANT_CALLING { + take: + input // channel: [ val(meta), [ input ], [ input_index ], [which_norm] ] + fasta // channel: /path/to/reference/fasta + fai // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + germline_resource // channel: /path/to/germline/resource + germline_resource_tbi // channel: /path/to/germline/index + panel_of_normals // channel: /path/to/panel/of/normals + panel_of_normals_tbi // channel: /path/to/panel/of/normals/index + interval_file // channel: /path/to/interval/file + + + main: + ch_versions = Channel.empty() + + // + //Perform variant calling using mutect2 module in tumor single mode. + // + mutect2_input = channel.from(input) + MUTECT2 ( mutect2_input, false, false, false, [], fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + ch_versions = ch_versions.mix(MUTECT2.out.versions) + + // + //Generate artifactpriors using learnreadorientationmodel on the f1r2 output of mutect2. + // + ch_learnread_in = MUTECT2.out.f1r2.collect() + LEARNREADORIENTATIONMODEL (ch_learnread_in) + ch_versions = ch_versions.mix(LEARNREADORIENTATIONMODEL.out.versions) + + // + //Generate pileup summary tables using getepileupsummaries. tumor sample should always be passed in as the first input and input list entries of ch_mutect2_in, + //to ensure correct file order for calculatecontamination. + // + pileup_tumor_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[0], input_index[0]] + } + + pileup_normal_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[1], input_index[1]] + } + GETPILEUPSUMMARIES_TUMOR ( pileup_tumor_input, germline_resource, germline_resource_tbi, interval_file ) + GETPILEUPSUMMARIES_NORMAL ( pileup_normal_input, germline_resource, germline_resource_tbi, interval_file ) + ch_versions = ch_versions.mix(GETPILEUPSUMMARIES_NORMAL.out.versions) + + // + //Contamination and segmentation tables created using calculatecontamination on the pileup summary table. + // + ch_pileup_tumor = GETPILEUPSUMMARIES_TUMOR.out.table.collect() + ch_pileup_normal = GETPILEUPSUMMARIES_NORMAL.out.table.collect() + ch_calccon_in = ch_pileup_tumor.combine(ch_pileup_normal, by: 0) + CALCULATECONTAMINATION ( ch_calccon_in, true ) + ch_versions = ch_versions.mix(CALCULATECONTAMINATION.out.versions) + + // + //Mutect2 calls filtered by filtermutectcalls using the artifactpriors, contamination and segmentation tables. + // + ch_vcf = MUTECT2.out.vcf.collect() + ch_tbi = MUTECT2.out.tbi.collect() + ch_stats = MUTECT2.out.stats.collect() + ch_orientation = LEARNREADORIENTATIONMODEL.out.artifactprior.collect() + ch_segment = CALCULATECONTAMINATION.out.segmentation.collect() + ch_contamination = CALCULATECONTAMINATION.out.contamination.collect() + //[] is used as a placeholder for optional input to specify the contamination estimate as a value, since the contamination table is used, this is not needed. + ch_contamination.add([]) + ch_filtermutect_in = ch_vcf.combine(ch_tbi, by: 0).combine(ch_stats, by: 0).combine(ch_orientation, by: 0).combine(ch_segment, by: 0).combine(ch_contamination, by: 0) + FILTERMUTECTCALLS ( ch_filtermutect_in, fasta, fai, dict ) + ch_versions = ch_versions.mix(FILTERMUTECTCALLS.out.versions) + + emit: + mutect2_vcf = MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_tbi = MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + mutect2_f1r2 = MUTECT2.out.f1r2.collect() // channel: [ val(meta), [ f1r2 ] ] + + artifact_priors = LEARNREADORIENTATIONMODEL.out.artifactprior.collect() // channel: [ val(meta), [ artifactprior ] ] + + pileup_table_tumor = GETPILEUPSUMMARIES_TUMOR.out.table.collect() // channel: [ val(meta), [ table_tumor ] ] + pileup_table_normal = GETPILEUPSUMMARIES_NORMAL.out.table.collect() // channel: [ val(meta), [ table_normal ] ] + + contamination_table = CALCULATECONTAMINATION.out.contamination.collect() // channel: [ val(meta), [ contamination ] ] + segmentation_table = CALCULATECONTAMINATION.out.segmentation.collect() // channel: [ val(meta), [ segmentation ] ] + + filtered_vcf = FILTERMUTECTCALLS.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + filtered_tbi = FILTERMUTECTCALLS.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + filtered_stats = FILTERMUTECTCALLS.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml new file mode 100644 index 00000000..4c42addf --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml @@ -0,0 +1,127 @@ +name: gatk_tumor_normal_somatic_variant_calling +description: | + Perform variant calling on a paired tumor normal set of samples using mutect2 tumor normal mode. + f1r2 output of mutect2 is run through learnreadorientationmodel to get the artifact priors. + Run the input bam files through getpileupsummarries and then calculatecontamination to get the contamination and segmentation tables. + Filter the mutect2 output vcf using filtermutectcalls, artifact priors and the contamination & segmentation tables for additional filtering. +keywords: + - gatk4 + - mutect2 + - learnreadorientationmodel + - getpileupsummaries + - calculatecontamination + - filtermutectcalls + - variant_calling + - tumor_only + - filtered_vcf +modules: + - gatk4/mutect2 + - gatk4/learnreadorientationmodel + - gatk4/getpileupsummaries + - gatk4/calculatecontamination + - gatk4/filtermutectcalls +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list containing the tumor and normal BAM files, in that order, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list containing the tumor and normal BAM file indexes, in that order, also able to take CRAM index as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - which_norm: + type: list + description: optional list of sample headers contained in the normal sample input file. + pattern: "testN" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - germline_resource: + type: file + description: Population vcf of germline sequencing, containing allele fractions. + pattern: "*.vcf.gz" + - germline_resource_tbi: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz.tbi" + - panel_of_normals: + type: file + description: vcf file to be used as a panel of normals. + pattern: "*.vcf.gz" + - panel_of_normals_tbi: + type: file + description: Index for the panel of normals. + pattern: "*.vcf.gz.tbi" + - interval_file: + type: file + description: File containing intervals. + pattern: "*.interval_list" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: file + description: Compressed vcf file to be used for variant_calling. + pattern: "[ *.vcf.gz ]" + - mutect2_tbi: + type: file + description: Indexes of the mutect2_vcf file + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: file + description: Stats files for the mutect2 vcf + pattern: "[ *vcf.gz.stats ]" + - mutect2_f1r2: + type: file + description: file containing information to be passed to LearnReadOrientationModel. + pattern: "*.f1r2.tar.gz" + - artifact_priors: + type: file + description: file containing artifact-priors to be used by filtermutectcalls. + pattern: "*.tar.gz" + - pileup_table_tumor: + type: file + description: File containing the tumor pileup summary table, kept separate as calculatecontamination needs them individually specified. + pattern: "*_tumor.pileups.table" + - pileup_table_normal: + type: file + description: File containing the normal pileup summary table, kept separate as calculatecontamination needs them individually specified. + pattern: "*_normal.pileups.table" + - contamination_table: + type: file + description: File containing the contamination table. + pattern: "*.contamination.table" + - segmentation_table: + type: file + description: Output table containing segmentation of tumor minor allele fractions. + pattern: "*.segmentation.table" + - filtered_vcf: + type: file + description: file containing filtered mutect2 calls. + pattern: "*.vcf.gz" + - filtered_tbi: + type: file + description: tbi file that pairs with filtered vcf. + pattern: "*.vcf.gz.tbi" + - filtered_stats: + type: file + description: file containing statistics of the filtermutectcalls run. + pattern: "*.filteringStats.tsv" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config new file mode 100644 index 00000000..bb8d1bc4 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config @@ -0,0 +1,6 @@ +params.mutect2_options = [:] +params.learnorientation_options = [:] +params.getpileup_tumor_options = [:] +params.getpileup_normal_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 29d07639..9ed9f55c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -522,7 +522,7 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: +gatk4/learnreadorientationmodel: &gatk4/learnreadorientationmodel - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -1427,6 +1427,15 @@ subworkflows/gatk_create_som_pon: - *gatk4/genomicsdbimport - *gatk4/createsomaticpanelofnormals +subworkflows/gatk_tumor_normal_somatic_variant_calling: + - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** + - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** + - *gatk4/mutect2 + - *gatk4/learnreadorientationmodel + - *gatk4/getpileupsummaries + - *gatk4/calculatecontamination + - *gatk4/filtermutectcalls + subworkflows/gatk_tumor_only_somatic_variant_calling: - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** diff --git a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf new file mode 100644 index 00000000..21e35998 --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK_TUMOR_NORMAL_SOMATIC_VARIANT_CALLING } from '../../../../subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main' addParams( [:] ) + +workflow test_gatk_tumor_normal_somatic_variant_calling { + input = [ + [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + ["testN"] + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_vcf_gz_tbi'], checkIfExists: true) + interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) + + GATK_TUMOR_NORMAL_SOMATIC_VARIANT_CALLING ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi, interval_file ) +} diff --git a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml new file mode 100644 index 00000000..3c6753fb --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml @@ -0,0 +1,34 @@ +- name: gatk_tumor_normal_somatic_variant_calling + command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling -entry test_gatk_tumor_normal_somatic_variant_calling -c tests/config/nextflow.config + tags: + - subworkflows/gatk_tumor_normal_somatic_variant_calling + # Modules + # - gatk4/mutect2 + # - gatk4/learnreadorientationmodel + # - gatk4/getpileupsummaries + # - gatk4/calculatecontamination + # - gatk4/filtermutectcalls + files: + # gatk4 mutect2 + - path: ./output/mutect2/test.vcf.gz + - path: ./output/mutect2/test.vcf.gz.stats + md5sum: 6ecb874e6a95aa48233587b876c2a7a9 + - path: ./output/mutect2/test.vcf.gz.tbi + - path: ./output/mutect2/test.f1r2.tar.gz + # gatk4 learnreadorientationmodel + - path: ./output/learnreadorientationmodel/test.tar.gz + # gatk4 getpileupsummaries + - path: ./output/getpileupsummaries/test_tumor.pileups.table + md5sum: 8b1b4c8ab831eca50ee9e940463a741f + - path: ./output/getpileupsummaries/test_normal.pileups.table + md5sum: 0d19674bef2ff0700d5b02b3463dd210 + # gatk4 calculatecontamination + - path: ./output/calculatecontamination/test.contamination.table + md5sum: 5fdcf1728cf98985ce31c038eb24e05c + - path: ./output/calculatecontamination/test.segmentation.table + md5sum: 91f28bfe4727a3256810927fc5eba92f + # gatk4 filtermutectcalls + - path: ./output/filtermutectcalls/test_filtered.vcf.gz + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.filteringStats.tsv + md5sum: 98e1b87a52999eb8f429ef4a7877eb3f + - path: ./output/filtermutectcalls/test_filtered.vcf.gz.tbi From f052dc445c8f1f6791ae0a15530300a94b9f7d2c Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Thu, 18 Nov 2021 11:09:09 -0700 Subject: [PATCH 063/101] use underscores in anchors and references (#1080) * use underscores in anchors and references * Dummy change to trigger CI * use dev branch * underscore anchor --- .github/workflows/nf-core-linting.yml | 2 +- .../nf-core/gatk_create_som_pon/main.nf | 1 - tests/config/pytest_modules.yml | 60 +++++++++---------- 3 files changed, 31 insertions(+), 32 deletions(-) diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index ce441413..55b8c296 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -60,7 +60,7 @@ jobs: # FIXME: Remove this when nf-core modules lint stabilizes and install stable release - name: Install nf-core tools development version - run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@subworkflow_hacks + run: python -m pip install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev - name: Install Nextflow env: diff --git a/subworkflows/nf-core/gatk_create_som_pon/main.nf b/subworkflows/nf-core/gatk_create_som_pon/main.nf index 40269a4a..89a9566e 100644 --- a/subworkflows/nf-core/gatk_create_som_pon/main.nf +++ b/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -1,7 +1,6 @@ // // Run GATK mutect2, genomicsdbimport and createsomaticpanelofnormals // - params.mutect2_options = [args: '--max-mnp-distance 0'] params.gendbimport_options = [:] params.createsompon_options = [:] diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9ed9f55c..daa48bc2 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -474,7 +474,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: &gatk4/calculatecontamination +gatk4/calculatecontamination: &gatk4_calculatecontamination - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -482,7 +482,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: &gatk4/createsomaticpanelofnormals +gatk4/createsomaticpanelofnormals: &gatk4_createsomaticpanelofnormals - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -494,11 +494,11 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: &gatk4/filtermutectcalls +gatk4/filtermutectcalls: &gatk4_filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** -gatk4/genomicsdbimport: &gatk4/genomicsdbimport +gatk4/genomicsdbimport: &gatk4_genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -506,7 +506,7 @@ gatk4/genotypegvcfs: - modules/gatk4/genotypegvcfs/** - tests/modules/gatk4/genotypegvcfs/** -gatk4/getpileupsummaries: &gatk4/getpileupsummaries +gatk4/getpileupsummaries: &gatk4_getpileupsummaries - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -522,7 +522,7 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: &gatk4/learnreadorientationmodel +gatk4/learnreadorientationmodel: &gatk4_learnreadorientationmodel - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -538,7 +538,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: &gatk4/mutect2 +gatk4/mutect2: &gatk4_mutect2 - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -1187,7 +1187,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: &samtools/index +samtools/index: &samtools_index - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1199,7 +1199,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: &samtools/sort +samtools/sort: &samtools_sort - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1275,11 +1275,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: &sratools/fasterqdump +sratools/fasterqdump: &sratools_fasterqdump - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: &sratools/prefetch +sratools/prefetch: &sratools_prefetch - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1399,47 +1399,47 @@ yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** -subworkflows/bam_stats_samtools: &subworkflows/bam_stats_samtools +subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools - subworkflows/nf-core/bam_stats_samtools/** - tests/subworkflows/nf-core/bam_stats_samtools/** -subworkflows/bam_sort_samtools: &subworkflows/bam_sort_samtools +subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools - subworkflows/nf-core/bam_sort_samtools/** - tests/subworkflows/nf-core/bam_sort_samtools/** - - *samtools/sort - - *samtools/index - - *subworkflows/bam_stats_samtools + - *samtools_sort + - *samtools_index + - *subworkflows_bam_stats_samtools subworkflows/align_bowtie2: - subworkflows/nf-core/align_bowtie2/** - tests/subworkflows/nf-core/align_bowtie2/** - - *subworkflows/bam_sort_samtools + - *subworkflows_bam_sort_samtools subworkflows/sra_fastq: - subworkflows/nf-core/sra_fastq/** - tests/subworkflows/nf-core/sra_fastq/** - - *sratools/fasterqdump - - *sratools/prefetch + - *sratools_fasterqdump + - *sratools_prefetch subworkflows/gatk_create_som_pon: - subworkflows/nf-core/gatk_create_som_pon/** - tests/subworkflows/nf-core/gatk_create_som_pon/** - - *gatk4/genomicsdbimport - - *gatk4/createsomaticpanelofnormals + - *gatk4_genomicsdbimport + - *gatk4_createsomaticpanelofnormals subworkflows/gatk_tumor_normal_somatic_variant_calling: - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - - *gatk4/mutect2 - - *gatk4/learnreadorientationmodel - - *gatk4/getpileupsummaries - - *gatk4/calculatecontamination - - *gatk4/filtermutectcalls + - *gatk4_mutect2 + - *gatk4_learnreadorientationmodel + - *gatk4_getpileupsummaries + - *gatk4_calculatecontamination + - *gatk4_filtermutectcalls subworkflows/gatk_tumor_only_somatic_variant_calling: - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - - *gatk4/mutect2 - - *gatk4/getpileupsummaries - - *gatk4/calculatecontamination - - *gatk4/filtermutectcalls + - *gatk4_mutect2 + - *gatk4_getpileupsummaries + - *gatk4_calculatecontamination + - *gatk4_filtermutectcalls From 43a1c1c6d18d7d9395846503046f1649ba72a06e Mon Sep 17 00:00:00 2001 From: praveenraj2018 <43108054+praveenraj2018@users.noreply.github.com> Date: Thu, 18 Nov 2021 21:47:40 +0100 Subject: [PATCH 064/101] Update in GATK4 variantfiltration: Added vcf_index to vcf tuple; output to vcf.gz format. (#1083) * Added vcf_index to vcf tuple; output to vcf.gz format. * Fix: extra new line in meta.yml. * addressed review feedback * fix: editorconfig error * fix: gatk memory flag * fix: editorconfig error * fix: Indentation fix: Indentation * Fix: lint editorconfig error Removed one extra space Co-authored-by: Robert A. Petit III --- modules/gatk4/variantfiltration/main.nf | 20 ++++++++---- modules/gatk4/variantfiltration/meta.yml | 16 +++++++--- tests/modules/gatk4/variantfiltration/main.nf | 32 +++++++++++++++---- .../modules/gatk4/variantfiltration/test.yml | 20 +++++++++--- 4 files changed, 65 insertions(+), 23 deletions(-) diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index a4e950ae..e0f0727a 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -19,23 +19,29 @@ process GATK4_VARIANTFILTRATION { } input: - tuple val(meta), path(vcf) + tuple val(meta), path(vcf), path(vcf_tbi) path fasta path fai path dict output: - tuple val(meta), path("*.vcf"), emit: vcf - path "versions.yml" , emit: versions - + tuple val(meta), path("*.vcf.gz"), emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.toGiga() + } """ - gatk VariantFiltration \\ + gatk --java-options "-Xmx${avail_mem}G" VariantFiltration \\ -R $fasta \\ -V $vcf \\ - -O ${prefix}.vcf \\ + -O ${prefix}.vcf.gz \\ $options.args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/variantfiltration/meta.yml b/modules/gatk4/variantfiltration/meta.yml index 6d4983a6..71f0b8b2 100644 --- a/modules/gatk4/variantfiltration/meta.yml +++ b/modules/gatk4/variantfiltration/meta.yml @@ -21,8 +21,12 @@ input: e.g. [ id:'test'] - vcf: type: list - description: Input VCF file - pattern: "*.{vcf}" + description: List of VCF(.gz) files + pattern: "*.{vcf,vcf.gz}" + - vcf_tbi: + type: list + description: List of VCF file indexes + pattern: "*.{idx,tbi}" - fasta: type: file description: Fasta file of reference genome @@ -38,8 +42,12 @@ input: output: - vcf: type: file - description: filtered VCF file - pattern: "*.filtered.{vcf}" + description: Compressed VCF file + pattern: "*.vcf.gz" + - tbi: + type: file + description: Index of VCF file + pattern: "*.vcf.gz.tbi" - versions: type: file description: File containing software versions diff --git a/tests/modules/gatk4/variantfiltration/main.nf b/tests/modules/gatk4/variantfiltration/main.nf index 04bebf6f..67c9daec 100644 --- a/tests/modules/gatk4/variantfiltration/main.nf +++ b/tests/modules/gatk4/variantfiltration/main.nf @@ -5,14 +5,32 @@ nextflow.enable.dsl = 2 test_options = ['args': '--filter-name "test_filter" --filter-expression "MQ0 > 0"', 'suffix': '.filtered'] include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' addParams( options: test_options ) -workflow test_gatk4_variantfiltration { +// Basic parameters with uncompressed VCF input +workflow test_gatk4_variantfiltration_vcf_input { + input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] - fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] - fai = [ file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) ] - genome_dict = [ file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) ] + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_VARIANTFILTRATION ( input, fasta, fai, genome_dict ) + GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) } + +// Basic parameters with compressed VCF input +workflow test_gatk4_variantfiltration_gz_input { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) +} + + diff --git a/tests/modules/gatk4/variantfiltration/test.yml b/tests/modules/gatk4/variantfiltration/test.yml index 1a2bf6d2..e3177cfc 100644 --- a/tests/modules/gatk4/variantfiltration/test.yml +++ b/tests/modules/gatk4/variantfiltration/test.yml @@ -1,9 +1,19 @@ -- name: gatk4 variantfiltration test_gatk4_variantfiltration - command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration -c tests/config/nextflow.config +- name: gatk4 variantfiltration test_gatk4_variantfiltration_vcf_input + command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_vcf_input -c tests/config/nextflow.config tags: - gatk4/variantfiltration - gatk4 files: - - path: output/gatk4/test.filtered.vcf - contains: - - "AC=2;AN=2;DP=1;DP4=0,0,1,0;MQ=60;MQ0F=0;SGB=-0.379885" + - path: output/gatk4/test.filtered.vcf.gz + contains: ['BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365'] + - path: output/gatk4/test.filtered.vcf.gz.tbi + +- name: gatk4 variantfiltration test_gatk4_variantfiltration_gz_input + command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c tests/config/nextflow.config + tags: + - gatk4/variantfiltration + - gatk4 + files: + - path: output/gatk4/test.filtered.vcf.gz + contains: ['BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365'] + - path: output/gatk4/test.filtered.vcf.gz.tbi From 4e5b6ed843ee0691848aba58088e4347cd3aae98 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Thu, 18 Nov 2021 21:54:14 +0100 Subject: [PATCH 065/101] Fix picard markduplicates (#1084) * Fix picard/markduplicates with new options syntax * Delete md5sum for bam files and add contains for metrics.txt Co-authored-by: Robert A. Petit III Co-authored-by: FriederikeHanssen --- modules/picard/markduplicates/main.nf | 6 +++--- tests/modules/picard/markduplicates/main.nf | 2 +- tests/modules/picard/markduplicates/test.yml | 6 ++++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 37b825d7..130a1e52 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -40,9 +40,9 @@ process PICARD_MARKDUPLICATES { -Xmx${avail_mem}g \\ MarkDuplicates \\ $options.args \\ - -I $bam \\ - -O ${prefix}.bam \\ - -M ${prefix}.MarkDuplicates.metrics.txt + I=$bam \\ + O=${prefix}.bam \\ + M=${prefix}.MarkDuplicates.metrics.txt cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: diff --git a/tests/modules/picard/markduplicates/main.nf b/tests/modules/picard/markduplicates/main.nf index 78643f8b..7c9c63cd 100644 --- a/tests/modules/picard/markduplicates/main.nf +++ b/tests/modules/picard/markduplicates/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [:] ) -include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : '--ASSUME_SORT_ORDER queryname' ] ) +include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : 'ASSUME_SORT_ORDER=queryname' ] ) workflow test_picard_markduplicates_sorted_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index 04075548..4c314814 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -5,8 +5,9 @@ - picard/markduplicates files: - path: ./output/picard/test.MarkDuplicates.metrics.txt + contains: + - "1.0 97 97" - path: ./output/picard/test.bam - md5sum: b520ccdc3a9edf3c6a314983752881f2 - name: picard markduplicates unsorted bam command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config tags: @@ -14,6 +15,7 @@ - picard/markduplicates files: - path: ./output/picard/test.MarkDuplicates.metrics.txt + contains: + - "1.0 97 97" - path: ./output/picard/test.bam - md5sum: 46a6fc76048ba801d328f869ac9db020 From 19035c99d1cd8412b5046c6bb4af787658b84e5b Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Thu, 18 Nov 2021 22:24:47 +0100 Subject: [PATCH 066/101] Add thread option to samtools modules (#1069) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add thread option to samtools modules * fix tests * fix tests * fix tests * Fix naming to fix tests * cpus-1 to account for mainthread * remove thread in ampliconclip, docu doesn't report this param * add -1 to all other applicable samtools modules * Update samtools version * Update checksums * retrigger GHA after update * Update modules/samtools/fastq/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/fixmate/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/flagstat/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/index/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/merge/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/stats/main.nf Co-authored-by: Patrick Hüther * Update modules/samtools/view/main.nf Co-authored-by: Patrick Hüther * Fix md5sum fixmate * Fix md5sums * sth funny with the fixmate checksums * more md5sums updates Co-authored-by: Patrick Hüther --- modules/samtools/ampliconclip/main.nf | 7 +++---- modules/samtools/faidx/main.nf | 6 +++--- modules/samtools/fastq/main.nf | 10 +++++----- modules/samtools/fixmate/main.nf | 2 +- modules/samtools/flagstat/main.nf | 8 ++++---- modules/samtools/idxstats/main.nf | 6 +++--- modules/samtools/index/main.nf | 9 +++++---- modules/samtools/merge/main.nf | 9 +++++---- modules/samtools/mpileup/main.nf | 6 +++--- modules/samtools/sort/main.nf | 6 +++--- modules/samtools/stats/main.nf | 9 +++++---- modules/samtools/view/main.nf | 15 ++++++++------- tests/modules/samtools/ampliconclip/test.yml | 12 ++++++------ tests/modules/samtools/fixmate/test.yml | 4 ++-- tests/modules/samtools/index/test.yml | 6 +++--- tests/modules/samtools/merge/test.yml | 4 ++-- tests/modules/samtools/sort/test.yml | 2 +- tests/modules/samtools/stats/main.nf | 6 +++--- tests/modules/samtools/stats/test.yml | 8 ++++---- .../subworkflows/nf-core/align_bowtie2/test.yml | 4 ++-- .../nf-core/bam_sort_samtools/test.yml | 16 ++++++++-------- 21 files changed, 79 insertions(+), 76 deletions(-) diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 3da1d6fe..4cf98d3f 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_AMPLICONCLIP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.13" : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -38,7 +38,6 @@ process SAMTOOLS_AMPLICONCLIP { samtools \\ ampliconclip \\ $options.args \\ - -@ $task.cpus \\ $rejects \\ $stats \\ -b $bed \\ diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 80cedeab..80708084 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FAIDX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 0b454360..fb7e3904 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FASTQ { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -23,7 +23,7 @@ process SAMTOOLS_FASTQ { output: tuple val(meta), path("*.fastq.gz"), emit: fastq - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" @@ -32,7 +32,7 @@ process SAMTOOLS_FASTQ { """ samtools fastq \\ $options.args \\ - -@ $task.cpus \\ + --threads ${task.cpus-1} \\ $endedness \\ $bam cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index e1a766a1..af1cf829 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -33,7 +33,7 @@ process SAMTOOLS_FIXMATE { samtools \\ fixmate \\ $options.args \\ - -@ $task.cpus \\ + --threads ${task.cpus-1} \\ $bam \\ ${prefix}.bam \\ diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index f9115c6b..072a135f 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_FLAGSTAT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -27,7 +27,7 @@ process SAMTOOLS_FLAGSTAT { script: """ - samtools flagstat $bam > ${bam}.flagstat + samtools flagstat --threads ${task.cpus-1} $bam > ${bam}.flagstat cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index b005088a..fa0e7dc3 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_IDXSTATS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index 62254bc8..d66e4513 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_INDEX { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -29,7 +29,8 @@ process SAMTOOLS_INDEX { script: """ - samtools index $options.args $input + samtools index -@ ${task.cpus-1} $options.args $input + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index fefb423b..ab641bb9 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_MERGE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -32,7 +32,8 @@ process SAMTOOLS_MERGE { def file_type = input_files[0].getExtension() def reference = fasta ? "--reference ${fasta}" : "" """ - samtools merge ${reference} ${prefix}.${file_type} $input_files + samtools merge --threads ${task.cpus-1} $options.args ${reference} ${prefix}.${file_type} $input_files + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 9e120526..081682ed 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_MPILEUP { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index b30f6f45..f980b472 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_SORT { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index aab43410..e0a2b50d 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_STATS { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -29,7 +29,8 @@ process SAMTOOLS_STATS { script: def reference = fasta ? "--reference ${fasta}" : "" """ - samtools stats ${reference} ${input} > ${input}.stats + samtools stats --threads ${task.cpus-1} ${reference} ${input} > ${input}.stats + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index b7a047ee..e5ff5546 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -11,11 +11,11 @@ process SAMTOOLS_VIEW { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" + container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" + container "quay.io/biocontainers/samtools:1.14--hb421002_0" } input: @@ -23,16 +23,17 @@ process SAMTOOLS_VIEW { path fasta output: - tuple val(meta), path("*.bam") , optional: true, emit: bam - tuple val(meta), path("*.cram"), optional: true, emit: cram - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam") , emit: bam , optional: true + tuple val(meta), path("*.cram"), emit: cram, optional: true + path "versions.yml" , emit: versions script: def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" def reference = fasta ? "--reference ${fasta} -C" : "" def file_type = input.getExtension() """ - samtools view ${reference} $options.args $input > ${prefix}.${file_type} + samtools view --threads ${task.cpus-1} ${reference} $options.args $input > ${prefix}.${file_type} + cat <<-END_VERSIONS > versions.yml ${getProcessName(task.process)}: ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml index 9e41ce5b..9e8e1f9f 100644 --- a/tests/modules/samtools/ampliconclip/test.yml +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -7,7 +7,7 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 1c705ebe39f68f1dac164733ae99c9d2 + md5sum: 678f9ab04fbe3206f0f96e170fd833e9 - name: samtools ampliconclip no stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c tests/config/nextflow.config @@ -16,9 +16,9 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 86c7bfb5378d57b16855c5b399000b2a + md5sum: bbf65ea626539d96c8271e17d1fc988b - path: output/samtools/test.cliprejects.bam - md5sum: 8e2eea2c0005b4d4e77c0eb549599133 + md5sum: a0bee15aead020d16d0c81bd9667df46 - name: samtools ampliconclip with stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c tests/config/nextflow.config @@ -27,8 +27,8 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: d96f5eebef0ff4635e68090e89756d4a + md5sum: f5a3611ecad34ba2dde77096e1c7dd93 - path: output/samtools/test.cliprejects.bam - md5sum: ad83a523d6ff1c58caade4ddafbaaed7 + md5sum: 90ee7ce908b4bdb89ab41e4410de9012 - path: output/samtools/test.clipstats.txt - md5sum: 6fbde83d658cd2813b79900d33800d1d + md5sum: fc23355e1743d47f2541f2cb1a7a0cda diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml index c7864c04..0b3aa2a9 100644 --- a/tests/modules/samtools/fixmate/test.yml +++ b/tests/modules/samtools/fixmate/test.yml @@ -1,8 +1,8 @@ - name: samtools fixmate test_samtools_fixmate command: nextflow run tests/modules/samtools/fixmate -entry test_samtools_fixmate -c tests/config/nextflow.config tags: - - samtools/fixmate - samtools + - samtools/fixmate files: - path: output/samtools/test.bam - md5sum: 92c8463710cdcaef2010aa02ed9e01fd + md5sum: a4092657a4b17170c7702a76cbf192a1 diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 66ab8211..279b99d8 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -1,4 +1,4 @@ -- name: samtools index bai +- name: samtools index test_samtools_index_bai command: nextflow run tests/modules/samtools/index -entry test_samtools_index_bai -c tests/config/nextflow.config tags: - samtools @@ -7,7 +7,7 @@ - path: output/samtools/test.paired_end.sorted.bam.bai md5sum: 704c10dd1326482448ca3073fdebc2f4 -- name: samtools index crai +- name: samtools index test_samtools_index_crai command: nextflow run tests/modules/samtools/index -entry test_samtools_index_crai -c tests/config/nextflow.config tags: - samtools @@ -16,7 +16,7 @@ - path: output/samtools/test.paired_end.recalibrated.sorted.cram.crai md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 -- name: samtools index csi +- name: samtools index test_samtools_index_csi command: nextflow run tests/modules/samtools/index -entry test_samtools_index_csi -c tests/config/nextflow.config tags: - samtools diff --git a/tests/modules/samtools/merge/test.yml b/tests/modules/samtools/merge/test.yml index b39ca2ec..f04aa74b 100644 --- a/tests/modules/samtools/merge/test.yml +++ b/tests/modules/samtools/merge/test.yml @@ -1,15 +1,15 @@ - name: samtools merge test_samtools_merge command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge -c tests/config/nextflow.config tags: - - samtools/merge - samtools + - samtools/merge files: - path: output/samtools/test_merged.bam - name: samtools merge test_samtools_merge_cram command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge_cram -c tests/config/nextflow.config tags: - - samtools/merge - samtools + - samtools/merge files: - path: output/samtools/test_merged.cram diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 12e6669f..785ec03b 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -5,4 +5,4 @@ - samtools/sort files: - path: output/samtools/test.sorted.bam - md5sum: bbb2db225f140e69a4ac577f74ccc90f + md5sum: 4adc495469724a375d5e1a9f3485e38d diff --git a/tests/modules/samtools/stats/main.nf b/tests/modules/samtools/stats/main.nf index 8e8b0c88..4e92b366 100644 --- a/tests/modules/samtools/stats/main.nf +++ b/tests/modules/samtools/stats/main.nf @@ -14,9 +14,9 @@ workflow test_samtools_stats { } workflow test_samtools_stats_cram { - input = [ [ id: 'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + input = [ [ id: 'test', single_end:true ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index a194c666..c186665a 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -1,17 +1,17 @@ - name: samtools stats test_samtools_stats command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats -c tests/config/nextflow.config tags: - - samtools - samtools/stats + - samtools files: - path: output/samtools/test.paired_end.sorted.bam.stats - md5sum: a7f36cf11fd3bf97e0a0ae29c0627296 + md5sum: 09146eeecfcae2a84fb8615c86cd8d64 - name: samtools stats test_samtools_stats_cram command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats_cram -c tests/config/nextflow.config tags: - - samtools - samtools/stats + - samtools files: - path: output/samtools/test.paired_end.recalibrated.sorted.cram.stats - md5sum: bd55a1da30028403f4b66dacf7a2a20e + md5sum: 62377b29c3f6253e37308a28d13a496d diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml index 116ea961..07c0b1b4 100644 --- a/tests/subworkflows/nf-core/align_bowtie2/test.yml +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -37,7 +37,7 @@ - path: ./output/samtools/test.sorted.bam.idxstats md5sum: e16eb632f7f462514b0873c7ac8ac905 - path: ./output/samtools/test.sorted.bam.stats - md5sum: 2d837cd72432cd856fca70d33f02ffb5 + md5sum: d9eb909c2cde69d6ae83999a72d770d7 - name: align bowtie2 paired-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_paired_end -c tests/config/nextflow.config @@ -78,4 +78,4 @@ - path: ./output/samtools/test.sorted.bam.idxstats md5sum: 29ff2fa56d35b2a47625b8f517f1a947 - path: ./output/samtools/test.sorted.bam.stats - md5sum: 98aa88a39d26244c89bd4e577953fb48 + md5sum: d0c7a1a4fbd2c1aed437ca419a9e344f diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml index 88ea9d5a..b84735e5 100644 --- a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -4,15 +4,15 @@ - subworkflows/bam_sort_samtools - subworkflows/bam_stats_samtools # Modules - # - samtools - # - samtools/index - # - samtools/sort - # - samtools/stats - # - samtools/idxstats - # - samtools/flagstat + - samtools + - samtools/index + - samtools/sort + - samtools/stats + - samtools/idxstats + - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam - md5sum: e4c77897d6824ce4df486d1b100618af + md5sum: 8b56bb7d26ced04112f712250d915aaa - path: ./output/samtools/test.sorted.bam.bai md5sum: a70940ce9ba2e700ec2984e0a6526099 # samtools stats @@ -36,7 +36,7 @@ # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam - md5sum: bbb2db225f140e69a4ac577f74ccc90f + md5sum: 4adc495469724a375d5e1a9f3485e38d - path: ./output/samtools/test.sorted.bam.bai md5sum: 20c91e3a0fd4661d7cb967f40d2486ba # samtools stats From 72c94dbed93cf7b5cc8f937e73a2caefd8ae8c04 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Fri, 19 Nov 2021 08:33:29 +0100 Subject: [PATCH 067/101] Add new module: Das Tool (#1004) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * add dastool/scaffolds2bin * add dastool * remove non reproducible md5sum check for compressed files * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * remove metabat2 from PR * fix linting errors * remove traling whitespace * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip * PR review updates * update from PR review * update test files * add bacillus fragilis alignments * switch tests to bacillus fragilis * add string check * update test string * add pr comment answer * last fixes for PR review Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm --- modules/dastool/dastool/functions.nf | 78 +++++++++++++++ modules/dastool/dastool/main.nf | 73 ++++++++++++++ modules/dastool/dastool/meta.yml | 100 +++++++++++++++++++ modules/dastool/scaffolds2bin/functions.nf | 78 +++++++++++++++ modules/dastool/scaffolds2bin/main.nf | 46 +++++++++ modules/dastool/scaffolds2bin/meta.yml | 58 +++++++++++ tests/config/pytest_modules.yml | 8 ++ tests/modules/dastool/dastool/main.nf | 33 ++++++ tests/modules/dastool/dastool/test.yml | 29 ++++++ tests/modules/dastool/scaffolds2bin/main.nf | 25 +++++ tests/modules/dastool/scaffolds2bin/test.yml | 14 +++ 11 files changed, 542 insertions(+) create mode 100644 modules/dastool/dastool/functions.nf create mode 100644 modules/dastool/dastool/main.nf create mode 100644 modules/dastool/dastool/meta.yml create mode 100644 modules/dastool/scaffolds2bin/functions.nf create mode 100644 modules/dastool/scaffolds2bin/main.nf create mode 100644 modules/dastool/scaffolds2bin/meta.yml create mode 100644 tests/modules/dastool/dastool/main.nf create mode 100644 tests/modules/dastool/dastool/test.yml create mode 100644 tests/modules/dastool/scaffolds2bin/main.nf create mode 100644 tests/modules/dastool/scaffolds2bin/test.yml diff --git a/modules/dastool/dastool/functions.nf b/modules/dastool/dastool/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/dastool/dastool/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf new file mode 100644 index 00000000..dff32294 --- /dev/null +++ b/modules/dastool/dastool/main.nf @@ -0,0 +1,73 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DASTOOL_DASTOOL { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" + } else { + container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" + } + + input: + tuple val(meta), path(contigs), path(bins) + path(proteins) + path(db_directory) + val(search_engine) + + output: + tuple val(meta), path("*.log") , emit: log + tuple val(meta), path("*_summary.txt") , emit: summary + tuple val(meta), path("*_DASTool_scaffolds2bin.txt") , emit: scaffolds2bin + tuple val(meta), path("*.eval") , optional: true, emit: eval + tuple val(meta), path("*_DASTool_bins/*.fa") , optional: true, emit: bins + tuple val(meta), path("*.pdf") , optional: true, emit: pdfs + tuple val(meta), path("*.proteins.faa") , optional: true, emit: fasta_proteins + tuple val(meta), path("*.archaea.scg") , optional: true, emit: fasta_archaea_scg + tuple val(meta), path("*.bacteria.scg") , optional: true, emit: fasta_bacteria_scg + tuple val(meta), path("*.seqlength") , optional: true, emit: seqlength + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def bin_list = bins instanceof List ? bins.join(",") : "$bins" + def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" + def db_dir = db_directory ? "--db_directory $db_directory" : "" + def clean_contigs = contigs.toString() - ".gz" + def decompress_contigs = contigs.toString() == clean_contigs ? "" : "gunzip -q -f $contigs" + def decompress_proteins = proteins ? "gunzip -f $proteins" : "" + def clean_proteins = proteins ? proteins.toString() - ".gz" : "" + def proteins_pred = proteins ? "--proteins $clean_proteins" : "" + + if (! search_engine) { + log.info('[DAS_Tool] Default search engine (USEARCH) is proprietary software and not available in bioconda. Using DIAMOND as alternative.') + } + + """ + $decompress_proteins + $decompress_contigs + + DAS_Tool \\ + $options.args \\ + $proteins_pred \\ + $db_dir \\ + $engine \\ + -t $task.cpus \\ + --bins $bin_list \\ + -c $clean_contigs \\ + -o $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + END_VERSIONS + """ +} diff --git a/modules/dastool/dastool/meta.yml b/modules/dastool/dastool/meta.yml new file mode 100644 index 00000000..12d31e9f --- /dev/null +++ b/modules/dastool/dastool/meta.yml @@ -0,0 +1,100 @@ +name: dastool_dastool +description: DAS Tool binning step. +keywords: + - binning + - das tool + - table + - de novo + - bins + - contigs + - assembly + - das_tool +tools: + - dastool: + description: | + DAS Tool is an automated method that integrates the results + of a flexible number of binning algorithms to calculate an optimized, non-redundant + set of bins from a single assembly. + + homepage: https://github.com/cmks/DAS_Tool + documentation: https://github.com/cmks/DAS_Tool + tool_dev_url: https://github.com/cmks/DAS_Tool + doi: "10.1038/s41564-018-0171-1" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - contigs: + type: file + description: fasta file + pattern: "*.{fa.gz,fas.gz,fasta.gz}" + - bins: + type: file + description: "Scaffolds2bin tabular file generated with dastool/scaffolds2bin" + pattern: "*.scaffolds2bin.tsv" + - proteins: + type: file + description: Predicted proteins in prodigal fasta format (>scaffoldID_geneNo) + pattern: "*.{fa.gz,fas.gz,fasta.gz}" + - db_directory: + type: file + description: (optional) Directory of single copy gene database. + - search_engine: + type: val + description: Engine used for single copy gene identification. USEARCH is not supported due to it being proprietary [blast/diamond] + + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - log: + type: file + description: Log file of the run + pattern: "*.log" + - summary: + type: file + description: Summary of output bins including quality and completeness estimates + pattern: "*summary.txt" + - scaffolds2bin: + type: file + description: Scaffolds to bin file of output bins + pattern: "*.scaffolds2bin.txt" + - eval: + type: file + description: Quality and completeness estimates of input bin sets + pattern: "*.eval" + - pdfs: + type: file + description: Plots showing the amount of high quality bins and score distribution of bins per method + pattern: "*.pdf" + - fasta_proteins: + type: file + description: Output from prodigal if not already supplied + pattern: "*.proteins.faa" + - fasta_archaea_scg: + type: file + description: Results of archaeal single-copy-gene prediction + pattern: "*.archaea.scg" + - fasta_bacteria_scg: + type: file + description: Results of bacterial single-copy-gene prediction + pattern: "*.bacteria.scg" + - seqlength: + type: file + description: Summary of contig lengths + pattern: "*.seqlength" + +authors: + - "@maxibor" + - "@jfy133" diff --git a/modules/dastool/scaffolds2bin/functions.nf b/modules/dastool/scaffolds2bin/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/dastool/scaffolds2bin/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf new file mode 100644 index 00000000..b51a6e6e --- /dev/null +++ b/modules/dastool/scaffolds2bin/main.nf @@ -0,0 +1,46 @@ +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process DASTOOL_SCAFFOLDS2BIN { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" + } else { + container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + val(extension) + + output: + tuple val(meta), path("*.tsv"), emit: scaffolds2bin + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def file_extension = extension ? extension : "fasta" + + """ + gunzip -f *.${file_extension}.gz + + Fasta_to_Scaffolds2Bin.sh \\ + $options.args \\ + -i . \\ + -e $file_extension \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + END_VERSIONS + """ +} diff --git a/modules/dastool/scaffolds2bin/meta.yml b/modules/dastool/scaffolds2bin/meta.yml new file mode 100644 index 00000000..f41a3cf2 --- /dev/null +++ b/modules/dastool/scaffolds2bin/meta.yml @@ -0,0 +1,58 @@ +name: dastool_scaffolds2bin +description: Helper script to convert a set of bins in fasta format to tabular scaffolds2bin format +keywords: + - binning + - das tool + - table + - de novo + - bins + - contigs + - assembly + - das_tool +tools: + - dastool: + description: | + DAS Tool is an automated method that integrates the results + of a flexible number of binning algorithms to calculate an optimized, non-redundant + set of bins from a single assembly. + + homepage: https://github.com/cmks/DAS_Tool + documentation: https://github.com/cmks/DAS_Tool + tool_dev_url: https://github.com/cmks/DAS_Tool + doi: "10.1038/s41564-018-0171-1" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Fasta of list of fasta files recommended to be gathered via with .collect() of bins + pattern: "*.{fa,fas,fasta}" + - binner: + type: val + description: Name of the binning software (optional) + - extension: + type: val + description: Fasta file extension (fa | fas | fasta | ...) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - scaffolds2bin: + type: file + description: tabular scaffolds2bin file for DAS tool input + pattern: "*.scaffolds2bin.tsv" + +authors: + - "@maxibor" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index daa48bc2..55223f55 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -338,6 +338,14 @@ damageprofiler: - modules/damageprofiler/** - tests/modules/damageprofiler/** +dastool/dastool: + - modules/dastool/dastool/** + - tests/modules/dastool/dastool/** + +dastool/scaffolds2bin: + - modules/dastool/scaffolds2bin/** + - tests/modules/dastool/scaffolds2bin/** + dedup: - modules/dedup/** - tests/modules/dedup/** diff --git a/tests/modules/dastool/dastool/main.nf b/tests/modules/dastool/dastool/main.nf new file mode 100644 index 00000000..31c32ef4 --- /dev/null +++ b/tests/modules/dastool/dastool/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) +include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' addParams( options: [args: '--score_threshold 0 --debug'] ) + +workflow test_dastool_dastool { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") + + Channel.of([ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)]) + .join(DASTOOL_SCAFFOLDS2BIN.out.scaffolds2bin) + .set {input_dastool} + + + DASTOOL_DASTOOL ( input_dastool, [], [], [] ) +} diff --git a/tests/modules/dastool/dastool/test.yml b/tests/modules/dastool/dastool/test.yml new file mode 100644 index 00000000..eff02f96 --- /dev/null +++ b/tests/modules/dastool/dastool/test.yml @@ -0,0 +1,29 @@ +- name: dastool dastool test_dastool_dastool + command: nextflow run tests/modules/dastool/dastool -entry test_dastool_dastool -c tests/config/nextflow.config + tags: + - dastool + - dastool/dastool + files: + - path: output/dastool/test.seqlength + md5sum: b815a5811008c36808a59b1d0dcfab24 + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/test_DASTool.log + contains: + - 'DAS Tool run on' + - path: output/dastool/test_DASTool_scaffolds2bin.txt + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/test_DASTool_summary.txt + md5sum: a3efa8717b30dfada78dc5ae9a3dc396 + - path: output/dastool/test_proteins.faa.archaea.scg + md5sum: e79d82eecee25821d1658ea4f082601d + - path: output/dastool/test_proteins.faa.bacteria.scg + md5sum: 8132cfb17cf398d41c036ead55c96ffe + - path: output/dastool/test_test.tsv.eval + md5sum: a3efa8717b30dfada78dc5ae9a3dc396 + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 2b297bf557cc3831b800348859331268 + - path: output/metabat2/test.tsv.gz + md5sum: 619338fa5019e361d5545ce385a6961f + - path: output/metabat2/test.txt.gz + md5sum: 745a0446af6ef68b930975e9ce5a95d6 diff --git a/tests/modules/dastool/scaffolds2bin/main.nf b/tests/modules/dastool/scaffolds2bin/main.nf new file mode 100644 index 00000000..63ffe82a --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) + +workflow test_dastool_scaffolds2bin { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") +} \ No newline at end of file diff --git a/tests/modules/dastool/scaffolds2bin/test.yml b/tests/modules/dastool/scaffolds2bin/test.yml new file mode 100644 index 00000000..c6e25bff --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/test.yml @@ -0,0 +1,14 @@ +- name: dastool scaffolds2bin test_dastool_scaffolds2bin + command: nextflow run tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c tests/config/nextflow.config + tags: + - dastool + - dastool/scaffolds2bin + files: + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 2b297bf557cc3831b800348859331268 + - path: output/metabat2/test.tsv.gz + md5sum: 619338fa5019e361d5545ce385a6961f + - path: output/metabat2/test.txt.gz + md5sum: 745a0446af6ef68b930975e9ce5a95d6 From 5ebe62612cc05b1b39359e4a2a2eda79c65fdd73 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sun, 21 Nov 2021 05:14:02 -0700 Subject: [PATCH 068/101] add ectyper module (#948) * add ectyper module * fix-lint * try zcat * Update main.nf * fix lint * Update main.nf * Apply suggestions from code review Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> * Update main.nf * pass lint * Update main.nf * fix lint Co-authored-by: Francesco L <53608000+lescai@users.noreply.github.com> Co-authored-by: Harshil Patel --- modules/ectyper/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/ectyper/main.nf | 51 +++++++++++++++++++++ modules/ectyper/meta.yml | 51 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ectyper/main.nf | 13 ++++++ tests/modules/ectyper/test.yml | 11 +++++ 6 files changed, 208 insertions(+) create mode 100644 modules/ectyper/functions.nf create mode 100644 modules/ectyper/main.nf create mode 100644 modules/ectyper/meta.yml create mode 100644 tests/modules/ectyper/main.nf create mode 100644 tests/modules/ectyper/test.yml diff --git a/modules/ectyper/functions.nf b/modules/ectyper/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/ectyper/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/ectyper/main.nf b/modules/ectyper/main.nf new file mode 100644 index 00000000..b5d8202d --- /dev/null +++ b/modules/ectyper/main.nf @@ -0,0 +1,51 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process ECTYPER { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::ectyper=1.0.0" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/ectyper:1.0.0--pyhdfd78af_1" + } else { + container "quay.io/biocontainers/ectyper:1.0.0--pyhdfd78af_1" + } + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.log"), emit: log + tuple val(meta), path("*.tsv"), emit: tsv + tuple val(meta), path("*.txt"), emit: txt + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def is_compressed = fasta.getName().endsWith(".gz") ? true : false + def fasta_name = fasta.getName().replace(".gz", "") + """ + if [ "$is_compressed" == "true" ]; then + gzip -c -d $fasta > $fasta_name + fi + + ectyper \\ + $options.args \\ + --cores $task.cpus \\ + --output ./ \\ + --input $fasta_name + mv output.tsv ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(ectyper --version 2>&1) | sed 's/.*ectyper //; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/ectyper/meta.yml b/modules/ectyper/meta.yml new file mode 100644 index 00000000..a6beca29 --- /dev/null +++ b/modules/ectyper/meta.yml @@ -0,0 +1,51 @@ +name: ectyper +description: In silico prediction of E. coli serotype +keywords: + - escherichia coli + - fasta + - serotype +tools: + - ectyper: + description: ECtyper is a python program for serotyping E. coli genomes + homepage: https://github.com/phac-nml/ecoli_serotyping + documentation: https://github.com/phac-nml/ecoli_serotyping + tool_dev_url: https://github.com/phac-nml/ecoli_serotyping + doi: "" + licence: ['Apache 2'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA formatted assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - log: + type: file + description: ectyper log output + pattern: "*.log" + - tsv: + type: file + description: ectyper serotyping results in TSV format + pattern: "*.tsv" + - txt: + type: file + description: Allele report generated from BLAST results + pattern: "*.tst" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 55223f55..5a879cdc 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -406,6 +406,10 @@ dshbio/splitgff3: - modules/dshbio/splitgff3/** - tests/modules/dshbio/splitgff3/** +ectyper: + - modules/ectyper/** + - tests/modules/ectyper/** + emmtyper: - modules/emmtyper/** - tests/modules/emmtyper/** diff --git a/tests/modules/ectyper/main.nf b/tests/modules/ectyper/main.nf new file mode 100644 index 00000000..123df68d --- /dev/null +++ b/tests/modules/ectyper/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ECTYPER } from '../../../modules/ectyper/main.nf' addParams( options: [:] ) + +workflow test_ectyper { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + ECTYPER ( input ) +} diff --git a/tests/modules/ectyper/test.yml b/tests/modules/ectyper/test.yml new file mode 100644 index 00000000..c6f4c668 --- /dev/null +++ b/tests/modules/ectyper/test.yml @@ -0,0 +1,11 @@ +- name: ectyper test_ectyper + command: nextflow run tests/modules/ectyper -entry test_ectyper -c tests/config/nextflow.config + tags: + - ectyper + files: + - path: output/ectyper/blast_output_alleles.txt + md5sum: 27f3f5e84f7da451b2948d61589cdb06 + - path: output/ectyper/ectyper.log + contains: ['Serotype', 'RefSeq', 'O-type', 'finished'] + - path: output/ectyper/test.tsv + md5sum: ba923d7c7ee7d1047466aafc9a9df208 From 29c669766d472ff67337d6fb8a149735cabaac53 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sun, 21 Nov 2021 05:17:25 -0700 Subject: [PATCH 069/101] add bakta module (#1085) * add bakta module * Update main.nf * Update main.nf Co-authored-by: Harshil Patel --- modules/bakta/functions.nf | 78 ++++++++++++++++ modules/bakta/main.nf | 77 ++++++++++++++++ modules/bakta/meta.yml | 85 ++++++++++++++++++ ...t_versions_yml.cpython-39-pytest-6.2.5.pyc | Bin 3558 -> 0 bytes tests/config/pytest_modules.yml | 4 + tests/modules/bakta/main.nf | 13 +++ tests/modules/bakta/test.yml | 25 ++++++ 7 files changed, 282 insertions(+) create mode 100644 modules/bakta/functions.nf create mode 100644 modules/bakta/main.nf create mode 100644 modules/bakta/meta.yml delete mode 100644 tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc create mode 100644 tests/modules/bakta/main.nf create mode 100644 tests/modules/bakta/test.yml diff --git a/modules/bakta/functions.nf b/modules/bakta/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/bakta/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf new file mode 100644 index 00000000..2939f575 --- /dev/null +++ b/modules/bakta/main.nf @@ -0,0 +1,77 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process BAKTA { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::bakta=1.2.2" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/bakta:1.2.2--pyhdfd78af_0" + } else { + container "quay.io/biocontainers/bakta:1.2.2--pyhdfd78af_0" + } + + input: + tuple val(meta), path(fasta) + path db + path proteins + path prodigal_tf + + output: + tuple val(meta), path("${prefix}.embl") , emit: embl + tuple val(meta), path("${prefix}.faa") , emit: faa + tuple val(meta), path("${prefix}.ffn") , emit: ffn + tuple val(meta), path("${prefix}.fna") , emit: fna + tuple val(meta), path("${prefix}.gbff") , emit: gbff + tuple val(meta), path("${prefix}.gff3") , emit: gff + tuple val(meta), path("${prefix}.hypotheticals.tsv"), emit: hypotheticals_tsv + tuple val(meta), path("${prefix}.hypotheticals.faa"), emit: hypotheticals_faa + tuple val(meta), path("${prefix}.tsv") , emit: tsv + path "versions.yml" , emit: versions + + script: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" + def prodigal_opt = prodigal_tf ? "--prodigal-tf ${prodigal_tf[0]}" : "" + """ + bakta \\ + $options.args \\ + --threads $task.cpus \\ + --prefix ${prefix} \\ + --db $db \\ + $proteins_opt \\ + $prodigal_tf \\ + $fasta + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + END_VERSIONS + """ + + stub: + prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + touch ${prefix}.embl + touch ${prefix}.faa + touch ${prefix}.ffn + touch ${prefix}.fna + touch ${prefix}.gbff + touch ${prefix}.gff3 + touch ${prefix}.hypotheticals.tsv + touch ${prefix}.hypotheticals.faa + touch ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + END_VERSIONS + """ +} diff --git a/modules/bakta/meta.yml b/modules/bakta/meta.yml new file mode 100644 index 00000000..29e6edbe --- /dev/null +++ b/modules/bakta/meta.yml @@ -0,0 +1,85 @@ +name: bakta +description: Rapid annotation of bacterial genomes & plasmids. +keywords: + - annotation + - fasta + - prokaryote +tools: + - bakta: + description: Rapid & standardized annotation of bacterial genomes & plasmids. + homepage: https://github.com/oschwengers/bakta + documentation: https://github.com/oschwengers/bakta + tool_dev_url: https://github.com/oschwengers/bakta + doi: "10.1099/mgen.0.000685" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: | + FASTA file to be annotated. Has to contain at least a non-empty string dummy value. + - db: + type: file + description: | + Path to the Bakta database + - proteins: + type: file + description: FASTA file of trusted proteins to first annotate from (optional) + - prodigal_tf: + type: file + description: Training file to use for Prodigal (optional) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: annotations as simple human readble tab separated values + pattern: "*.tsv" + - gff: + type: file + description: annotations & sequences in GFF3 format + pattern: "*.gff3" + - gbff: + type: file + description: annotations & sequences in (multi) GenBank format + pattern: "*.gbff" + - embl: + type: file + description: annotations & sequences in (multi) EMBL format + pattern: "*.embl" + - fna: + type: file + description: replicon/contig DNA sequences as FASTA + pattern: "*.fna" + - faa: + type: file + description: CDS/sORF amino acid sequences as FASTA + pattern: "*.faa" + - ffn: + type: file + description: feature nucleotide sequences as FASTA + pattern: "*.ffn" + - hypotheticals_tsv: + type: file + description: further information on hypothetical protein CDS as simple human readble tab separated values + pattern: "*.hypotheticals.tsv" + - hypotheticals_faa: + type: file + description: hypothetical protein CDS amino acid sequences as FASTA + pattern: "*.hypotheticals.faa" + +authors: + - "@rpetit3" diff --git a/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc b/tests/__pycache__/test_versions_yml.cpython-39-pytest-6.2.5.pyc deleted file mode 100644 index 33acb8369a1bc62b5e66e1ed80e2247dd0e2759f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3558 zcmaJ@TaVku73T0FQCdkWt=50fJ2%8xAU@;c_*j71tCg z&y04xD)|!Up)V=?+}g;h4+RSJp}(O&gXmj<0(mOXry?kto--7;v;&k9hjTgS%y%wx z=6LmbRfFfZUyVJ_*0jIjp!gUtco$v~KtnZ30msOtx zcHdT1m3~EitNki`jbLe9>(_wBq}G>Ozs~I4#_lq!Kh=Jt(Go3xsXZnACMEZ^j{Obn zsBLygYW~cVBlwmmqb!sIoX;N%I{4zG*n~!Ek~}55`k^t`5-rvaO-kk@Au+k9{qjxbpJ20feVU+IYv8?e?$%-E#Hn)VR1dVK5g8EOIwYxea|m~^}#a6)g)gcA+b zU_auIhC#G1oc)nM7&#+vk2#(rnULLsap3r&BS%b)g+qPL1~TFY-D=f==RxGq$Pv*c z2j-_-_K-ar?v6pkA zwLXeG%6Mub?#)hx>tLPKc3l+ex@ieSf{?V#nTUctmYO?3w4Is<-Z)6j0GQJXXC8GW zdm_{7d!AsK#EzAkBgR?(*yI5Bb8}lL$s#4g{*sypm_vUjwe$5qYPCkha|&Rsf(} z1YxX+8Z?^KAXQ@PtHdU}4dVg9*~N1VzDw}F53l&hz{rUq`Y>j8$)S!>L-jRH#SxjE zi6MGp@;iv!7shEV(IJLRh%n<*Equ#_SUUKeK->^bmSmY)u@01_7sgYF#V^P&v`}By zXc;3cHugwj#g<5=eORG(Yz+ym;QUaZn=n_6P1w0Amkw)lZBG-whq-!e%nARKT7_Py z(<)tht{>Nu63jJd4LIs2G}?$uK7rXXps){@6Z%*2w-!xl_&DfsB3@E@)E0VxerQLfhXuM+lZtH1)!05f zHV2=9CV(ieqeijyOc(DdX(vJ28b~Xm|0k^E=>Lrvj?kYvLa&RHNEO!sz0S`g74`xB zDb)w`^z?H>SwFLLDaQx<*AI`+HOMFb`b@Vuec(Dr&mU14LPx528K?|iQ}Un>RGHS`SvA)N*>5~AgT|{1kF4ezgEyM+w%}b! z+RyaIn^c3m`Q~OyT#n~$R*t4}GL^kqy_X+fOp&u~Oohc~tGK-7gpqWH(G-&3BAI4# zirKWtjs1-$6G*fS)1@5UcD82!*vc|)k?_ID7tX*7vz$Gi2GXAd3{tTtAhX<7Nwr9S z(Rr}QYCg>S(f z0T~~yAf(5ovk%!ZyHuhmyN$jtv|roUyyrgL_~ibFA8+38kZ()K?}r^SJN|WNX3Dpf5I(`sb z2IGu9iT?QM#wQ#1K7N1Jtlss6V4w|de*?CMoo;-CaROlVJ9hx+`T|+?SBs8wM^UeL z#08uI9Xi#-xs%P_L>U1KQ#%LMF;VC`KLJAdNoa6*bJka0j3$=@2G=d3%o)^$J)R1w zE>;ipV^6$+gey4;nFDtuKe(+7KjG1U39umg%ENnoAe~3-;1#uQWoq4;y$xGJcqq{j z44!lX2r{X}E;e`A*hdR>XWf@f1D^T+`ll@s4dp)E;YXKZ$KbD_&PR&RN}qp5-|k+V zwI62prV~!b+l<4p;?C@J2CNWV-eSG$%=k-au7K?`IM`eG<7z$!o&UM03mzYgs)=&d z^^UltdjBXLU1@A7C!o;5_1K`04o5_5YN<)4!|Mzp$$0FGiF6 z8dmuR&~{p>ojaWOU?N>}vuNpNn81Bq&h|MtCaoMzVR5bI_$A~i Date: Sun, 21 Nov 2021 13:41:03 +0100 Subject: [PATCH 070/101] update: `gunzip` to include meta input tuple (#1082) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Specify more guidelines on input channels * Linting * Updates based on code review * Update README.md * Fix broken sentence * feat: add megahit module, currently decompressed output * Update main.nf * Update tests/modules/megahit/test.yml Co-authored-by: Maxime Borry * Apply suggestions from code review Co-authored-by: Harshil Patel * feat: compress all outputs, remove md5sums due to gz stochasicity * fix: wrong conda channel for pigz * fix: broken singleend tests and update meta.yml * Missed one * Apply suggestions from code review Co-authored-by: Harshil Patel * fix: pigz formatting * Apply suggestions from code review Co-authored-by: Harshil Patel * Apply suggestions from code review * Add bamUtil trimBam * Update modules/bamutil/trimbam/main.nf Co-authored-by: Harshil Patel * Update modules/bamutil/trimbam/main.nf * Changes after code-review * YAML lint * update: add (optional) meta to input tuple * YAML linting * Update main.nf Co-authored-by: Harshil Patel Co-authored-by: Maxime Borry Co-authored-by: Sébastien Guizard Co-authored-by: Robert A. Petit III --- modules/gunzip/main.nf | 10 +++++----- modules/gunzip/meta.yml | 6 ++++++ tests/modules/gunzip/main.nf | 4 +++- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index aec4569f..564fa99d 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -9,7 +9,7 @@ process GUNZIP { label 'process_low' publishDir "${params.outdir}", mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { @@ -19,14 +19,14 @@ process GUNZIP { } input: - path archive + tuple val(meta), path(archive) output: - path "$gunzip", emit: gunzip - path "versions.yml" , emit: versions + tuple val(meta), path("$gunzip"), emit: gunzip + path "versions.yml" , emit: versions script: - gunzip = archive.toString() - '.gz' + gunzip = archive.toString() - '.gz' """ gunzip \\ -f \\ diff --git a/modules/gunzip/meta.yml b/modules/gunzip/meta.yml index 3482f0d2..ea1f1546 100644 --- a/modules/gunzip/meta.yml +++ b/modules/gunzip/meta.yml @@ -10,6 +10,11 @@ tools: documentation: https://www.gnu.org/software/gzip/manual/gzip.html licence: ['GPL-3.0-or-later'] input: + - meta: + type: map + description: | + Optional groovy Map containing meta information + e.g. [ id:'test', single_end:false ] - archive: type: file description: File to be compressed/uncompressed @@ -26,3 +31,4 @@ output: authors: - "@joseespinosa" - "@drpatelh" + - "@jfy133" diff --git a/tests/modules/gunzip/main.nf b/tests/modules/gunzip/main.nf index 5a24e742..0c23a8cd 100644 --- a/tests/modules/gunzip/main.nf +++ b/tests/modules/gunzip/main.nf @@ -5,7 +5,9 @@ nextflow.enable.dsl = 2 include { GUNZIP } from '../../../modules/gunzip/main.nf' addParams( options: [:] ) workflow test_gunzip { - input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + input = [ [], + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] GUNZIP ( input ) } From c25c3fe4669096da8bd4bf3143cfc139d126035d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Sun, 21 Nov 2021 13:00:53 +0000 Subject: [PATCH 071/101] Update: `uLTRA` (#1081) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👌 IMPROVE: Update .gitignore * 📦 Add ultra module * 👌 IMPROVE: Update test input * 👌 IMPROVE: Update and clean code - Update to last versions.yml file - Update meta.yml - Correct typos * 👌 IMPROVE: Update output channels + Rename following subtool * 👌 IMPROVE: Remove old ultre files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 👌 IMPROVE: Remove old ultra files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 🐛 Fix: add unsaved modifications * 🐛 FIX: Remove one inconstant md5sum * 🐛 FIX: Grab software name using ${getSoftwareName(task.process)} * 🐛 FIX: Remove md5sums for pickle files (not constant). * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel * 👌 IMPROVE: update output directory, update meta.yml * 👌 IMPROVE: Use modules to gunzip and sort gtf * 🐛 FIX: Set up channel correctly * 👌 IMPROVE: Remove pickles files and databases Those data might be useful in a debugging purpose. * Apply suggestions from code review * Update main.nf * 🐛 FIX: Update uLTRA to version 0.0.4.1 + remove $(pwd) * 👌 IMPROVE: Sort tags in test.yml * align order of input between main.nf and meta.yml. Add ksahlin as co-author (he did update his package to overcome the pwd-problem * Update main.nf * Update main.nf Co-authored-by: Harshil Patel Co-authored-by: Lasse Folkersen Co-authored-by: Robert A. Petit III --- modules/ultra/pipeline/main.nf | 12 ++++++------ modules/ultra/pipeline/meta.yml | 10 ++++++---- tests/modules/ultra/pipeline/main.nf | 17 +++++++---------- tests/modules/ultra/pipeline/test.yml | 2 +- 4 files changed, 20 insertions(+), 21 deletions(-) diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf index 5a5c2c3e..b61518e6 100644 --- a/modules/ultra/pipeline/main.nf +++ b/modules/ultra/pipeline/main.nf @@ -11,11 +11,11 @@ process ULTRA_PIPELINE { mode: params.publish_dir_mode, saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4" : null) + conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4.1" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" } else { - container "quay.io/biocontainers/ultra_bioinformatics:0.0.4--pyh5e36f6f_1" + container "quay.io/biocontainers/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" } input: @@ -35,9 +35,9 @@ process ULTRA_PIPELINE { --t $task.cpus \\ --prefix $prefix \\ $options.args \\ - \$(pwd)/$genome \\ - \$(pwd)/$gtf \\ - \$(pwd)/$reads \\ + $genome \\ + $gtf \\ + $reads \\ ./ cat <<-END_VERSIONS > versions.yml diff --git a/modules/ultra/pipeline/meta.yml b/modules/ultra/pipeline/meta.yml index d0008cfc..fa8366e8 100644 --- a/modules/ultra/pipeline/meta.yml +++ b/modules/ultra/pipeline/meta.yml @@ -18,6 +18,10 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: A fasta or fastq file of reads to align + pattern: "*.{fasta,fastq}" - genome: type: file description: fasta file of reference genome @@ -26,10 +30,6 @@ input: type: file description: A annotation of use the genome pattern: "*.gtf" - - reads: - type: file - description: A fasta or fastq file of reads to align - pattern: "*.{fasta,fastq}" output: - meta: @@ -48,3 +48,5 @@ output: authors: - "@sguizard" + - "@lassefolkersen" + - "@ksahlin" diff --git a/tests/modules/ultra/pipeline/main.nf b/tests/modules/ultra/pipeline/main.nf index 881fe9a7..1404712b 100644 --- a/tests/modules/ultra/pipeline/main.nf +++ b/tests/modules/ultra/pipeline/main.nf @@ -8,18 +8,15 @@ include { GFFREAD } from '../../../../modules/gffread/main.nf' add workflow test_ultra_pipeline { - fastq = file(params.test_data['homo_sapiens']['pacbio']['hifi'] , checkIfExists: true) + input = [ + [ id:'test', single_end:false ], + file(params.test_data['homo_sapiens']['pacbio']['hifi'], checkIfExists: true) + ] + GUNZIP ( input ) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'] , checkIfExists: true) genome = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - - GUNZIP ( fastq ) GFFREAD ( gtf ) - GUNZIP - .out - .gunzip - .map { [ [ id:'test', single_end:false ], it ] } - .set { input } - - ULTRA_PIPELINE ( input, genome, GFFREAD.out.gtf ) + ULTRA_PIPELINE ( GUNZIP.out.gunzip, genome, GFFREAD.out.gtf ) } diff --git a/tests/modules/ultra/pipeline/test.yml b/tests/modules/ultra/pipeline/test.yml index fa378e58..7140193b 100644 --- a/tests/modules/ultra/pipeline/test.yml +++ b/tests/modules/ultra/pipeline/test.yml @@ -1,8 +1,8 @@ - name: ultra pipeline test_ultra_pipeline command: nextflow run tests/modules/ultra/pipeline -entry test_ultra_pipeline -c tests/config/nextflow.config tags: - - ultra/pipeline - ultra + - ultra/pipeline files: - path: output/gffread/genome_sorted.gtf md5sum: c0b034860c679a354cd093109ed90437 From 15fd90ffe8a9596406746e9112b861ed29f32952 Mon Sep 17 00:00:00 2001 From: Abhinav Sharma Date: Sun, 21 Nov 2021 21:43:58 +0200 Subject: [PATCH 072/101] Add phyloflash module (#786) * initial stubs [ci skip] * remove comments and add main command [ci skip] * design iteration [ci skip] * add new standard functions.nf [ci skip] * update the version string [ci skip] * accomodate the db stubs and single/double ends [ci skip] * add FIXME for missing info [ci skip] * Accomodate the results folder [ci skip] * Update main.nf * Apply suggestions from code review * Update main.nf * Apply suggestions from code review * Add version file to stubs [ci skip] * Tweak the output dir pattern [ci skip] * Update modules/phyloflash/main.nf * Update modules/phyloflash/main.nf * Update modules/phyloflash/main.nf Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel Co-authored-by: FriederikeHanssen --- modules/phyloflash/functions.nf | 78 ++++++++++++++++++++++++++++ modules/phyloflash/main.nf | 85 +++++++++++++++++++++++++++++++ modules/phyloflash/meta.yml | 51 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/phyloflash/main.nf | 44 ++++++++++++++++ tests/modules/phyloflash/test.yml | 15 ++++++ 6 files changed, 277 insertions(+) create mode 100644 modules/phyloflash/functions.nf create mode 100644 modules/phyloflash/main.nf create mode 100644 modules/phyloflash/meta.yml create mode 100644 tests/modules/phyloflash/main.nf create mode 100644 tests/modules/phyloflash/test.yml diff --git a/modules/phyloflash/functions.nf b/modules/phyloflash/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/phyloflash/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/phyloflash/main.nf b/modules/phyloflash/main.nf new file mode 100644 index 00000000..894c16a2 --- /dev/null +++ b/modules/phyloflash/main.nf @@ -0,0 +1,85 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PHYLOFLASH { + tag "$meta.id" + label 'process_medium' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::phyloflash=3.4" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/phyloflash:3.4--hdfd78af_1" + } else { + container "quay.io/biocontainers/phyloflash:3.4--hdfd78af_1" + } + + input: + tuple val(meta), path(reads) + path silva_db + path univec_db + + output: + tuple val(meta), path("${meta.id}*/*"), emit: results + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + if (meta.single_end) { + """ + phyloFlash.pl \\ + $options.args \\ + -read1 ${reads[0]} \\ + -lib $prefix \\ + -interleaved \\ + -dbhome . \\ + -CPUs $task.cpus + + mkdir $prefix + mv ${prefix}.* $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ + } else { + """ + phyloFlash.pl \\ + $options.args \\ + -read1 ${reads[0]} \\ + -read2 ${reads[1]} \\ + -lib $prefix \\ + -dbhome . \\ + -CPUs $task.cpus + + mkdir $prefix + mv ${prefix}.* $prefix + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ + } + + stub: + + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + + """ + mkdir ${prefix} + touch ${prefix}/${prefix}.SSU.collection.fasta + touch ${prefix}/${prefix}.phyloFlash + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ +} diff --git a/modules/phyloflash/meta.yml b/modules/phyloflash/meta.yml new file mode 100644 index 00000000..3ed7a9fa --- /dev/null +++ b/modules/phyloflash/meta.yml @@ -0,0 +1,51 @@ +name: phyloflash +description: phyloFlash is a pipeline to rapidly reconstruct the SSU rRNAs and explore phylogenetic composition of an illumina (meta)genomic dataset. +keywords: + - metagenomics + - illumina datasets + - phylogenetic composition +tools: + - phyloflash: + description: phyloFlash is a pipeline to rapidly reconstruct the SSU rRNAs and explore phylogenetic composition of an illumina (meta)genomic dataset. + + homepage: https://hrgv.github.io/phyloFlash/ + documentation: https://hrgv.github.io/phyloFlash/usage.html + tool_dev_url: https://github.com/HRGV/phyloFlash + doi: "10.1128/mSystems.00920-20" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Channel containing single or paired-end reads + pattern: "*.{fastq.gz,fq.gz}" + - sliva_db: + type: folder + description: Folder containing the SILVA database + pattern: "ref" + - univec_db: + type: folder + description: Folder containing UniVec database + pattern: "UniVec" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - results: + type: folder + description: Folder containing the results of phyloFlash analysis + pattern: "${prefix}*" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@abhi18av" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 077fefc1..69d6a80e 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1011,6 +1011,10 @@ pbccs: - modules/pbccs/** - tests/modules/pbccs/** +phyloflash: + - modules/phyloflash/** + - tests/modules/phyloflash/** + picard/collecthsmetrics: - modules/picard/collecthsmetrics/** - tests/modules/picard/collecthsmetrics/** diff --git a/tests/modules/phyloflash/main.nf b/tests/modules/phyloflash/main.nf new file mode 100644 index 00000000..754d6747 --- /dev/null +++ b/tests/modules/phyloflash/main.nf @@ -0,0 +1,44 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PHYLOFLASH } from '../../../modules/phyloflash/main.nf' addParams( options: [:] ) + +process STUB_PHYLOFLASH_DATABASE { + output: + path "ref" , emit: silva_db + path "UniVec" , emit: univec_db + + stub: + """ + mkdir ref + touch UniVec + """ +} + +workflow test_phyloflash_single_end { + + STUB_PHYLOFLASH_DATABASE () + + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) +} + +workflow test_phyloflash_paired_end { + + STUB_PHYLOFLASH_DATABASE () + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) +} diff --git a/tests/modules/phyloflash/test.yml b/tests/modules/phyloflash/test.yml new file mode 100644 index 00000000..0cba41c5 --- /dev/null +++ b/tests/modules/phyloflash/test.yml @@ -0,0 +1,15 @@ +- name: phyloflash single-end + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_single_end -c tests/config/nextflow.config -stub-run + tags: + - phyloflash + files: + - path: output/phyloflash/test/test.SSU.collection.fasta + md5sum: d41d8cd98f00b204e9800998ecf8427e + +- name: phyloflash paired-end + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_paired_end -c tests/config/nextflow.config -stub-run + tags: + - phyloflash + files: + - path: output/phyloflash/test/test.SSU.collection.fasta + md5sum: d41d8cd98f00b204e9800998ecf8427e From 14554981528013409f13ce4fb5b638ce87cb9828 Mon Sep 17 00:00:00 2001 From: Florian De Temmerman <69114541+fbdtemme@users.noreply.github.com> Date: Sun, 21 Nov 2021 20:56:57 +0100 Subject: [PATCH 073/101] CNVkit: Make targets file optional when running in WGS mode (#1030) * Make targets.bed optional when running in wgs mode * added test for cram * Update test_data_config with new reference.cnn * Update main.nf to allow tumor-only running Still need a unit-test for this. Almost ready, but needs this file as input https://github.com/nf-core/test-datasets/blob/modules/data/generic/cnn/reference.cnn * re-writing previous changes, but now it wont crash the entire CI-setup * fixing overlooked merge conflict * last overlooked merge-conflict * move all files to batch subfolder * adding an optional input for a reference file (needed when running germline and tumoronly) * minor typo * update meta.yml * aligning code, renaming cnvkit to cnvkit_batch, renaming tumorbam to tumor, normalbam to normal * Update pytest_modules.yml Co-authored-by: EC2 Default User Co-authored-by: Lasse Folkersen Co-authored-by: Robert A. Petit III Co-authored-by: Harshil Patel --- modules/cnvkit/{ => batch}/functions.nf | 0 modules/cnvkit/{ => batch}/main.nf | 35 +++++--- modules/cnvkit/{ => batch}/meta.yml | 17 ++-- tests/config/pytest_modules.yml | 6 +- tests/config/test_data.config | 3 + tests/modules/cnvkit/batch/main.nf | 64 +++++++++++++++ tests/modules/cnvkit/batch/test.yml | 101 ++++++++++++++++++++++++ tests/modules/cnvkit/main.nf | 19 ----- tests/modules/cnvkit/test.yml | 27 ------- 9 files changed, 207 insertions(+), 65 deletions(-) rename modules/cnvkit/{ => batch}/functions.nf (100%) rename modules/cnvkit/{ => batch}/main.nf (59%) mode change 100755 => 100644 rename modules/cnvkit/{ => batch}/meta.yml (89%) mode change 100755 => 100644 create mode 100755 tests/modules/cnvkit/batch/main.nf create mode 100755 tests/modules/cnvkit/batch/test.yml delete mode 100755 tests/modules/cnvkit/main.nf delete mode 100755 tests/modules/cnvkit/test.yml diff --git a/modules/cnvkit/functions.nf b/modules/cnvkit/batch/functions.nf similarity index 100% rename from modules/cnvkit/functions.nf rename to modules/cnvkit/batch/functions.nf diff --git a/modules/cnvkit/main.nf b/modules/cnvkit/batch/main.nf old mode 100755 new mode 100644 similarity index 59% rename from modules/cnvkit/main.nf rename to modules/cnvkit/batch/main.nf index 27c8bb0d..06ecaa40 --- a/modules/cnvkit/main.nf +++ b/modules/cnvkit/batch/main.nf @@ -4,7 +4,7 @@ include { initOptions; saveFiles; getSoftwareName; getProcessName } from './func params.options = [:] options = initOptions(params.options) -process CNVKIT { +process CNVKIT_BATCH { tag "$meta.id" label 'process_low' publishDir "${params.outdir}", @@ -19,25 +19,40 @@ process CNVKIT { } input: - tuple val(meta), path(tumourbam), path(normalbam) + tuple val(meta), path(tumor), path(normal) path fasta - path targetfile + path targets + path reference output: tuple val(meta), path("*.bed"), emit: bed - tuple val(meta), path("*.cnn"), emit: cnn - tuple val(meta), path("*.cnr"), emit: cnr - tuple val(meta), path("*.cns"), emit: cns + tuple val(meta), path("*.cnn"), emit: cnn, optional: true + tuple val(meta), path("*.cnr"), emit: cnr, optional: true + tuple val(meta), path("*.cns"), emit: cns, optional: true path "versions.yml" , emit: versions script: + normal_args = normal ? "--normal $normal" : "" + fasta_args = fasta ? "--fasta $fasta" : "" + reference_args = reference ? "--reference $reference" : "" + + def target_args = "" + if (options.args.contains("--method wgs") || options.args.contains("-m wgs")) { + target_args = targets ? "--targets $targets" : "" + } + else { + target_args = "--targets $targets" + } + """ cnvkit.py \\ batch \\ - $tumourbam \\ - --normal $normalbam\\ - --fasta $fasta \\ - --targets $targetfile \\ + $tumor \\ + $normal_args \\ + $fasta_args \\ + $reference_args \\ + $target_args \\ + --processes ${task.cpus} \\ $options.args cat <<-END_VERSIONS > versions.yml diff --git a/modules/cnvkit/meta.yml b/modules/cnvkit/batch/meta.yml old mode 100755 new mode 100644 similarity index 89% rename from modules/cnvkit/meta.yml rename to modules/cnvkit/batch/meta.yml index 3e760d16..0d263041 --- a/modules/cnvkit/meta.yml +++ b/modules/cnvkit/batch/meta.yml @@ -1,4 +1,4 @@ -name: cnvkit +name: cnvkit_batch description: Copy number variant detection from high-throughput sequencing data keywords: - bam @@ -38,14 +38,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - tumourbam: + - tumour: type: file description: | - Input tumour sample bam file - - normalbam: + Input tumour sample bam file (or cram) + - normal: type: file description: | - Input normal sample bam file + Input normal sample bam file (or cram) - fasta: type: file description: | @@ -54,6 +54,10 @@ input: type: file description: | Input target bed file + - reference: + type: file + description: | + Input reference cnn-file (only for germline and tumor-only running) output: - meta: type: map @@ -85,4 +89,5 @@ authors: - "@KevinMenden" - "@MaxUlysse" - "@drpatelh" - + - "@fbdtemme" + - "@lassefolkersen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 69d6a80e..7b47bfea 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -294,9 +294,9 @@ cmseq/polymut: - modules/cmseq/polymut/** - tests/modules/cmseq/polymut/** -cnvkit: - - modules/cnvkit/** - - tests/modules/cnvkit/** +cnvkit/batch: + - modules/cnvkit/batch/** + - tests/modules/cnvkit/batch/** cooler/digest: - modules/cooler/digest/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 3351204d..c3bae012 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -263,6 +263,9 @@ params { 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } + 'cnn' { + reference = "${test_data_dir}/generic/cnn/reference.cnn" + } 'cooler'{ test_pairix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz" test_pairix_pair_gz_px2 = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz.px2" diff --git a/tests/modules/cnvkit/batch/main.nf b/tests/modules/cnvkit/batch/main.nf new file mode 100755 index 00000000..5d92afaa --- /dev/null +++ b/tests/modules/cnvkit/batch/main.nf @@ -0,0 +1,64 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CNVKIT_BATCH as CNVKIT_HYBRID } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn' ] ) +include { CNVKIT_BATCH as CNVKIT_WGS } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn --method wgs' ] ) +include { CNVKIT_BATCH as CNVKIT_TUMORONLY } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--method wgs' ] ) + + +workflow test_cnvkit_hybrid { + tumor = file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + normal = file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test' ], // meta map + tumor, + normal + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) + + CNVKIT_HYBRID ( input, fasta, targets, [] ) +} + +workflow test_cnvkit_wgs { + tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test'], // meta map + tumor, + normal + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + CNVKIT_WGS ( input, fasta, [], [] ) +} + + +workflow test_cnvkit_cram { + tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test'], // meta map + tumor, + normal + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + CNVKIT_WGS ( input, fasta, [], [] ) +} + + + +workflow test_cnvkit_tumoronly { + tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + + input = [ [ id:'test'], // meta map + tumor, + [ ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + reference = file(params.test_data['generic']['cnn']['reference'], checkIfExists: true) + + CNVKIT_TUMORONLY ( input, [], [], reference ) +} diff --git a/tests/modules/cnvkit/batch/test.yml b/tests/modules/cnvkit/batch/test.yml new file mode 100755 index 00000000..96ea670c --- /dev/null +++ b/tests/modules/cnvkit/batch/test.yml @@ -0,0 +1,101 @@ +- name: cnvkit batch test_cnvkit_hybrid + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_hybrid -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/baits.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/baits.target.bed + md5sum: 26d25ff2d6c45b6d92169b3559c6acdb + - path: output/cnvkit/reference.cnn + md5sum: ac99c1ad8b917b96ae15119146c91ab9 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test.paired_end.sorted.call.cns + md5sum: f2ca59b4d50b0c317adc526c1b99b622 + - path: output/cnvkit/test.paired_end.sorted.cnr + md5sum: 7e37d73ab604dbc3fe4ebb56aca9bdc3 + - path: output/cnvkit/test.paired_end.sorted.cns + md5sum: 060af1aa637ed51812af19bcce24fcfe + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: 3fe80b6013ffc3e9968345e810158215 + - path: output/cnvkit/test.single_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.single_end.sorted.targetcoverage.cnn + md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 + +- name: cnvkit batch test_cnvkit_wgs + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_wgs -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/genome.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/genome.bed + md5sum: 87a15eb9c2ff20ccd5cd8735a28708f7 + - path: output/cnvkit/genome.target.bed + md5sum: a13353ae9c8405e701390c069255bbd2 + - path: output/cnvkit/reference.cnn + md5sum: 05c6211e0179885b8a83e44fd21d5f86 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: ff526714696aa49bdc1dc8d00d965266 + - path: output/cnvkit/test2.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test2.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test2.paired_end.sorted.call.cns + md5sum: f6de754c34f780e6befee5b3ff0893f8 + - path: output/cnvkit/test2.paired_end.sorted.cnr + md5sum: 80318d06c6b095945a0fb0e85e887cbc + - path: output/cnvkit/test2.paired_end.sorted.cns + md5sum: 76afa47afc4bd5de35aee8fdb54d3d3a + - path: output/cnvkit/test2.paired_end.sorted.targetcoverage.cnn + md5sum: 6ae6b3fce7299eedca6133d911c38fe1 + +- name: cnvkit batch test_cnvkit_cram + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_cram -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/genome.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/genome.bed + md5sum: 87a15eb9c2ff20ccd5cd8735a28708f7 + - path: output/cnvkit/genome.target.bed + md5sum: a13353ae9c8405e701390c069255bbd2 + - path: output/cnvkit/reference.cnn + md5sum: 05c6211e0179885b8a83e44fd21d5f86 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: ff526714696aa49bdc1dc8d00d965266 + - path: output/cnvkit/test2.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test2.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test2.paired_end.sorted.call.cns + md5sum: f6de754c34f780e6befee5b3ff0893f8 + - path: output/cnvkit/test2.paired_end.sorted.cnr + md5sum: 80318d06c6b095945a0fb0e85e887cbc + - path: output/cnvkit/test2.paired_end.sorted.cns + md5sum: 76afa47afc4bd5de35aee8fdb54d3d3a + - path: output/cnvkit/test2.paired_end.sorted.targetcoverage.cnn + md5sum: 6ae6b3fce7299eedca6133d911c38fe1 + +- name: cnvkit batch test_cnvkit_tumoronly + command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_tumoronly -c tests/config/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/reference.antitarget-tmp.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/reference.target-tmp.bed + md5sum: 26d25ff2d6c45b6d92169b3559c6acdb diff --git a/tests/modules/cnvkit/main.nf b/tests/modules/cnvkit/main.nf deleted file mode 100755 index 6ee959ab..00000000 --- a/tests/modules/cnvkit/main.nf +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { CNVKIT } from '../../../modules/cnvkit/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn' ] ) - -workflow test_cnvkit { - tumourbam = file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - normalbam = file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) - - input = [ [ id:'test' ], // meta map - tumourbam, - normalbam - ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) - - CNVKIT ( input, fasta, targets ) -} diff --git a/tests/modules/cnvkit/test.yml b/tests/modules/cnvkit/test.yml deleted file mode 100755 index 6e09d6f3..00000000 --- a/tests/modules/cnvkit/test.yml +++ /dev/null @@ -1,27 +0,0 @@ -- name: cnvkit - command: nextflow run ./tests/modules/cnvkit/ -entry test_cnvkit -c tests/config/nextflow.config - tags: - - cnvkit - files: - - path: output/cnvkit/baits.target.bed - md5sum: 26d25ff2d6c45b6d92169b3559c6acdb - - path: output/cnvkit/baits.antitarget.bed - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/cnvkit/reference.cnn - md5sum: ac99c1ad8b917b96ae15119146c91ab9 - - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn - md5sum: 3fe80b6013ffc3e9968345e810158215 - - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn - md5sum: 203caf8cef6935bb50b4138097955cb8 - - path: output/cnvkit/test.single_end.sorted.targetcoverage.cnn - md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 - - path: output/cnvkit/test.single_end.sorted.antitargetcoverage.cnn - md5sum: 203caf8cef6935bb50b4138097955cb8 - - path: output/cnvkit/test.paired_end.sorted.cnr - md5sum: 7e37d73ab604dbc3fe4ebb56aca9bdc3 - - path: output/cnvkit/test.paired_end.sorted.cns - md5sum: 060af1aa637ed51812af19bcce24fcfe - - path: output/cnvkit/test.paired_end.sorted.bintest.cns - md5sum: 6544d979475def8a9f69ba42a985668d - - path: output/cnvkit/test.paired_end.sorted.call.cns - md5sum: f2ca59b4d50b0c317adc526c1b99b622 From 45985ff6f01c6e2e229b665ba45b159bd925513d Mon Sep 17 00:00:00 2001 From: Annick Renevey <47788523+rannick@users.noreply.github.com> Date: Sun, 21 Nov 2021 21:14:09 +0100 Subject: [PATCH 074/101] Peddy nf core2 2dev0 (#1048) * Updated module to fit nf-core2.2dev0 templates * Updated module to fit nf-core2.2dev0 templates * Linked test files to nf-core/test-dataset branch:raredisease raw files * Change order of input: vcf and tbi with meta, ped without * Change order of input: vcf and tbi with meta, ped without/adapt test * Change order of input: vcf and tbi with meta, ped without/adapt test, bugfix * Indent and rename files * Update modules/peddy/main.nf Removed newline Co-authored-by: FriederikeHanssen * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/meta.yml Co-authored-by: Harshil Patel * Update modules/peddy/meta.yml Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update modules/peddy/main.nf Co-authored-by: Harshil Patel * Update pytest_modules.yml * Update main.nf * Apply suggestions from code review Co-authored-by: FriederikeHanssen Co-authored-by: Harshil Patel --- modules/peddy/functions.nf | 78 +++++++++++++++++++++++++++++++++ modules/peddy/main.nf | 47 ++++++++++++++++++++ modules/peddy/meta.yml | 64 +++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 ++ tests/modules/peddy/main.nf | 17 +++++++ tests/modules/peddy/test.yml | 17 +++++++ 7 files changed, 231 insertions(+) create mode 100644 modules/peddy/functions.nf create mode 100644 modules/peddy/main.nf create mode 100644 modules/peddy/meta.yml create mode 100644 tests/modules/peddy/main.nf create mode 100644 tests/modules/peddy/test.yml diff --git a/modules/peddy/functions.nf b/modules/peddy/functions.nf new file mode 100644 index 00000000..85628ee0 --- /dev/null +++ b/modules/peddy/functions.nf @@ -0,0 +1,78 @@ +// +// Utility functions used in nf-core DSL2 module files +// + +// +// Extract name of software tool from process name using $task.process +// +def getSoftwareName(task_process) { + return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() +} + +// +// Extract name of module from process name using $task.process +// +def getProcessName(task_process) { + return task_process.tokenize(':')[-1] +} + +// +// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules +// +def initOptions(Map args) { + def Map options = [:] + options.args = args.args ?: '' + options.args2 = args.args2 ?: '' + options.args3 = args.args3 ?: '' + options.publish_by_meta = args.publish_by_meta ?: [] + options.publish_dir = args.publish_dir ?: '' + options.publish_files = args.publish_files + options.suffix = args.suffix ?: '' + return options +} + +// +// Tidy up and join elements of a list to return a path string +// +def getPathFromList(path_list) { + def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries + paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes + return paths.join('/') +} + +// +// Function to save/publish module results +// +def saveFiles(Map args) { + def ioptions = initOptions(args.options) + def path_list = [ ioptions.publish_dir ?: args.publish_dir ] + + // Do not publish versions.yml unless running from pytest workflow + if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { + return null + } + if (ioptions.publish_by_meta) { + def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta + for (key in key_list) { + if (args.meta && key instanceof String) { + def path = key + if (args.meta.containsKey(key)) { + path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] + } + path = path instanceof String ? path : '' + path_list.add(path) + } + } + } + if (ioptions.publish_files instanceof Map) { + for (ext in ioptions.publish_files) { + if (args.filename.endsWith(ext.key)) { + def ext_list = path_list.collect() + ext_list.add(ext.value) + return "${getPathFromList(ext_list)}/$args.filename" + } + } + } else if (ioptions.publish_files == null) { + return "${getPathFromList(path_list)}/$args.filename" + } +} diff --git a/modules/peddy/main.nf b/modules/peddy/main.nf new file mode 100644 index 00000000..4331ed9d --- /dev/null +++ b/modules/peddy/main.nf @@ -0,0 +1,47 @@ +// Import generic module functions +include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' + +params.options = [:] +options = initOptions(params.options) + +process PEDDY { + tag "$meta.id" + label 'process_low' + publishDir "${params.outdir}", + mode: params.publish_dir_mode, + saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } + + conda (params.enable_conda ? "bioconda::peddy=0.4.8" : null) + if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { + container "https://depot.galaxyproject.org/singularity/peddy:0.4.8--pyh5e36f6f_0" + } else { + container "quay.io/biocontainers/peddy:0.4.8--pyh5e36f6f_0" + } + + input: + tuple val(meta), path(vcf), path(vcf_tbi) + path ped + + output: + tuple val(meta), path("*.html") , emit: html + tuple val(meta), path("*.csv") , emit: csv + tuple val(meta), path("*.peddy.ped"), emit: ped + tuple val(meta), path("*.png") , emit: png + path "versions.yml" , emit: versions + + script: + def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + """ + peddy \\ + $options.args \\ + --plot \\ + -p $task.cpus \\ + $vcf \\ + $ped + + cat <<-END_VERSIONS > versions.yml + ${getProcessName(task.process)}: + ${getSoftwareName(task.process)}: \$( peddy --version 2>&1 | sed 's/peddy, version //' ) + END_VERSIONS + """ +} diff --git a/modules/peddy/meta.yml b/modules/peddy/meta.yml new file mode 100644 index 00000000..7c3fcf45 --- /dev/null +++ b/modules/peddy/meta.yml @@ -0,0 +1,64 @@ +name: peddy +description: Manipulation, validation and exploration of pedigrees +keywords: + - pedigrees + - ped + - family + +tools: + - peddy: + description: genotype, ped correspondence check, ancestry check, sex check. directly, quickly on VCF + homepage: https://github.com/brentp/peddy + documentation: https://peddy.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/brentp/peddy + doi: "https://doi.org/10.1016/j.ajhg.2017.01.017" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF file + pattern: "*.{vcf.gz}" + - ped: + type: file + description: PED/FAM file + pattern: "*.{ped,fam}" + - vcf_tbi: + type: file + description: TBI file + pattern: "*.{vcf.gz.tbi}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ped: + type: file + description: PED/FAM file + pattern: "*.peddy.{ped}" + - html: + type: file + description: HTML file + pattern: "*.{html}" + - csv: + type: file + description: CSV file + pattern: "*.{csv}" + - png: + type: file + description: PNG file + pattern: "*.{png}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@rannick" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7b47bfea..d68d64d0 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1019,6 +1019,10 @@ picard/collecthsmetrics: - modules/picard/collecthsmetrics/** - tests/modules/picard/collecthsmetrics/** +peddy: + - modules/peddy/** + - tests/modules/peddy/** + picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index c3bae012..0c7ce2fc 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -128,6 +128,10 @@ params { index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" + justhusky_ped = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky.ped" + justhusky_minimal_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz" + justhusky_minimal_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz.tbi" + } 'illumina' { test_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam" diff --git a/tests/modules/peddy/main.nf b/tests/modules/peddy/main.nf new file mode 100644 index 00000000..d6331752 --- /dev/null +++ b/tests/modules/peddy/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PEDDY } from '../../../modules/peddy/main.nf' addParams( options: [:] ) + +workflow test_peddy { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['homo_sapiens']['genome']['justhusky_minimal_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['justhusky_minimal_vcf_gz_tbi'], checkIfExists: true) + ] + ped = file(params.test_data['homo_sapiens']['genome']['justhusky_ped'], checkIfExists: true) + + PEDDY ( input , ped ) +} diff --git a/tests/modules/peddy/test.yml b/tests/modules/peddy/test.yml new file mode 100644 index 00000000..77bf00f6 --- /dev/null +++ b/tests/modules/peddy/test.yml @@ -0,0 +1,17 @@ +- name: peddy test_peddy + command: nextflow run tests/modules/peddy -entry test_peddy -c tests/config/nextflow.config + tags: + - peddy + files: + - path: output/peddy/justhusky_minimal.het_check.csv + md5sum: f4006d47355f2a760e40215b403926c3 + - path: output/peddy/justhusky_minimal.html + md5sum: 4f189cdbe8f03fe5c32d343c183506a5 + - path: output/peddy/justhusky_minimal.ped_check.csv + md5sum: d79a98558e280afe794d1374d2b985d4 + - path: output/peddy/justhusky_minimal.ped_check.rel-difference.csv + md5sum: 9de7e287cb30c742db2ff3622b0e63b1 + - path: output/peddy/justhusky_minimal.sex_check.csv + md5sum: 60848489bc697490da6a53b5170baf3b + - path: output/peddy/justhusky_minimal.vs.html + md5sum: 20f5f3a97fa781057c876ac79e044010 From 20d8250d9f39ddb05dfb437603aaf99b5c0b2b41 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 26 Nov 2021 07:58:40 +0000 Subject: [PATCH 075/101] Update all modules to new NF DSL2 syntax (#1099) * Add comment line for consistency * Remove all functions.nf * Remove include functions.nf and publishDir options * Replace options.args3 with task.ext.args3 - 3 modules * Replace options.args3 with task.ext.args3 - 17 modules * Replace {task.cpus} with task.cpus * Replace off on off off off off off off off on off on off on off off off on off off off on on off off off on on off off off off off off off on off off off off on off on on off off off on on on on off off off on off on on off on on off off on on on off on on off on off off off off on off off off on off off on off on off off off on on off on off on off off on off off off on off off off on off off off off on off off off on on on off on on off off on off on on on off on on off on on on off off off off off on on off off on off off off off off on off off on on off on on off on off off off on off off off off on on off on off off on off off on off on off off off off off off off off on on off on off off off.args with * Add def args = task.ext.args line to all modules in script section * Replace options.args with args and args_list * Initialise args2 and args3 properly * Replace container syntax * Revert container changes for cellranger/mkref * Replace getProcessName in all modules * Replace getSoftwareName in all modules * Unify modules using VERSION variable * Replae options.suffix with task.ext.suffix * Remove NF version restriction for CI * Bump NF version in README * Replace task.process.tokenize logic with task.process * Minor tweaks to unify syntax in tests main.nf * Add a separate nextflow.config for each module * Transfer remaining module options to nextflow.config * Remove addParams from tests main.nf * Remove TODO statements * Use -c to import module specific config * Bump NF version to 21.10.3 * Fix tests for artic/minion * Fix broken publishDir syntax * Standardise and fix obvious failing module tests * Remove kronatools to krona * Comment out tags in subworkflow test.yml * Fix failing module tests * Add consistent indentation to nextflow.config * Comment out subworklow definitions * Fix kallistobustools/ref * Fix rmarkdownnotebook * Fix jupyternotebook * Quote task.process * Add plink2/vcf to pytest_modules.yml * Remove NF_CORE_MODULES_TEST from pytest CI * Fix more tests * Move bacteroides_fragilis to prokaryotes folder * Fix cooler merge tests * Fix kallistobustools/count tests * Fix kallistobustools/ref tests * Update test_10x_1_fastq_gz file for kallistobustools/count tests * Fix bcftools/query tests * Fix delly/call tests * Fix cooler/zoomify tests * Fix csvtk/split tests * Fix gatk4/intervallisttools tests * Fix gatk4/variantfiltration * Fix pydamage/filter tests * Fix test data for unicycler * Fix gstama/collapse module * Fix leehom tests * Fix metaphlan3 tests * Fix pairtools/select tests * Update nextflow.config * Update nextflow.config * feat: update syntax * Fix arriba tests * Fix more failing tests * Update test syntax * Remove comments from tests nextflow.config * Apply suggestions from code review * Fix kallistobustools/count module * Update dumpsoftwareversions module * Update custom/dumpsoftwareversions * Add args2 to untar module * Update leftover modules * Remove last remaining addParams Co-authored-by: JoseEspinosa Co-authored-by: Gregor Sturm Co-authored-by: MaxUlysse --- .github/workflows/nf-core-linting.yml | 4 - .github/workflows/pytest-workflow.yml | 10 +- README.md | 4 +- modules/abacas/functions.nf | 78 ---------- modules/abacas/main.nf | 26 ++-- modules/adapterremoval/functions.nf | 78 ---------- modules/adapterremoval/main.nf | 37 ++--- modules/agrvate/functions.nf | 78 ---------- modules/agrvate/main.nf | 26 ++-- modules/allelecounter/functions.nf | 78 ---------- modules/allelecounter/main.nf | 26 ++-- modules/amps/functions.nf | 78 ---------- modules/amps/main.nf | 24 +--- modules/arriba/functions.nf | 78 ---------- modules/arriba/main.nf | 28 ++-- modules/artic/guppyplex/functions.nf | 78 ---------- modules/artic/guppyplex/main.nf | 26 ++-- modules/artic/minion/functions.nf | 78 ---------- modules/artic/minion/main.nf | 34 ++--- modules/assemblyscan/functions.nf | 78 ---------- modules/assemblyscan/main.nf | 24 +--- modules/ataqv/ataqv/functions.nf | 78 ---------- modules/ataqv/ataqv/main.nf | 25 ++-- modules/bakta/functions.nf | 78 ---------- modules/bakta/main.nf | 34 ++--- modules/bamaligncleaner/functions.nf | 78 ---------- modules/bamaligncleaner/main.nf | 26 ++-- modules/bamtools/split/functions.nf | 78 ---------- modules/bamtools/split/main.nf | 26 ++-- modules/bamutil/trimbam/functions.nf | 78 ---------- modules/bamutil/trimbam/main.nf | 26 ++-- modules/bandage/image/functions.nf | 78 ---------- modules/bandage/image/main.nf | 28 ++-- modules/bbmap/align/functions.nf | 78 ---------- modules/bbmap/align/main.nf | 26 ++-- modules/bbmap/bbduk/functions.nf | 78 ---------- modules/bbmap/bbduk/main.nf | 25 ++-- modules/bbmap/bbsplit/functions.nf | 78 ---------- modules/bbmap/bbsplit/main.nf | 32 ++--- modules/bbmap/index/functions.nf | 78 ---------- modules/bbmap/index/main.nf | 24 +--- modules/bcftools/concat/functions.nf | 78 ---------- modules/bcftools/concat/main.nf | 26 ++-- modules/bcftools/consensus/functions.nf | 78 ---------- modules/bcftools/consensus/main.nf | 26 ++-- modules/bcftools/filter/functions.nf | 78 ---------- modules/bcftools/filter/main.nf | 26 ++-- modules/bcftools/index/functions.nf | 78 ---------- modules/bcftools/index/main.nf | 26 ++-- modules/bcftools/isec/functions.nf | 78 ---------- modules/bcftools/isec/main.nf | 26 ++-- modules/bcftools/merge/functions.nf | 78 ---------- modules/bcftools/merge/main.nf | 26 ++-- modules/bcftools/mpileup/functions.nf | 78 ---------- modules/bcftools/mpileup/main.nf | 36 +++-- modules/bcftools/norm/functions.nf | 78 ---------- modules/bcftools/norm/main.nf | 26 ++-- modules/bcftools/query/functions.nf | 78 ---------- modules/bcftools/query/main.nf | 26 ++-- modules/bcftools/reheader/functions.nf | 78 ---------- modules/bcftools/reheader/main.nf | 26 ++-- modules/bcftools/stats/functions.nf | 78 ---------- modules/bcftools/stats/main.nf | 26 ++-- modules/bcftools/view/functions.nf | 78 ---------- modules/bcftools/view/main.nf | 28 ++-- modules/bedtools/bamtobed/functions.nf | 78 ---------- modules/bedtools/bamtobed/main.nf | 26 ++-- modules/bedtools/complement/functions.nf | 78 ---------- modules/bedtools/complement/main.nf | 26 ++-- modules/bedtools/genomecov/functions.nf | 78 ---------- modules/bedtools/genomecov/main.nf | 34 ++--- modules/bedtools/getfasta/functions.nf | 78 ---------- modules/bedtools/getfasta/main.nf | 26 ++-- modules/bedtools/intersect/functions.nf | 78 ---------- modules/bedtools/intersect/main.nf | 26 ++-- modules/bedtools/makewindows/functions.nf | 78 ---------- modules/bedtools/makewindows/main.nf | 26 ++-- modules/bedtools/maskfasta/functions.nf | 78 ---------- modules/bedtools/maskfasta/main.nf | 26 ++-- modules/bedtools/merge/functions.nf | 78 ---------- modules/bedtools/merge/main.nf | 26 ++-- modules/bedtools/slop/functions.nf | 78 ---------- modules/bedtools/slop/main.nf | 26 ++-- modules/bedtools/sort/functions.nf | 78 ---------- modules/bedtools/sort/main.nf | 26 ++-- modules/bedtools/subtract/functions.nf | 78 ---------- modules/bedtools/subtract/main.nf | 26 ++-- modules/bismark/align/functions.nf | 78 ---------- modules/bismark/align/main.nf | 26 ++-- modules/bismark/deduplicate/functions.nf | 78 ---------- modules/bismark/deduplicate/main.nf | 26 ++-- .../bismark/genomepreparation/functions.nf | 78 ---------- modules/bismark/genomepreparation/main.nf | 24 +--- .../bismark/methylationextractor/functions.nf | 78 ---------- modules/bismark/methylationextractor/main.nf | 24 +--- modules/bismark/report/functions.nf | 78 ---------- modules/bismark/report/main.nf | 24 +--- modules/bismark/summary/functions.nf | 78 ---------- modules/bismark/summary/main.nf | 22 +-- modules/blast/blastn/functions.nf | 78 ---------- modules/blast/blastn/main.nf | 26 ++-- modules/blast/makeblastdb/functions.nf | 78 ---------- modules/blast/makeblastdb/main.nf | 24 +--- modules/bowtie/align/functions.nf | 78 ---------- modules/bowtie/align/main.nf | 29 ++-- modules/bowtie/build/functions.nf | 78 ---------- modules/bowtie/build/main.nf | 22 +-- modules/bowtie2/align/functions.nf | 78 ---------- modules/bowtie2/align/main.nf | 37 ++--- modules/bowtie2/build/functions.nf | 78 ---------- modules/bowtie2/build/main.nf | 24 +--- modules/bwa/aln/functions.nf | 78 ---------- modules/bwa/aln/main.nf | 34 ++--- modules/bwa/index/functions.nf | 78 ---------- modules/bwa/index/main.nf | 24 +--- modules/bwa/mem/functions.nf | 78 ---------- modules/bwa/mem/main.nf | 29 ++-- modules/bwa/sampe/functions.nf | 78 ---------- modules/bwa/sampe/main.nf | 26 ++-- modules/bwa/samse/functions.nf | 78 ---------- modules/bwa/samse/main.nf | 26 ++-- modules/bwamem2/index/functions.nf | 78 ---------- modules/bwamem2/index/main.nf | 24 +--- modules/bwamem2/mem/functions.nf | 78 ---------- modules/bwamem2/mem/main.nf | 29 ++-- modules/bwameth/align/functions.nf | 78 ---------- modules/bwameth/align/main.nf | 29 ++-- modules/bwameth/index/functions.nf | 78 ---------- modules/bwameth/index/main.nf | 22 +-- modules/cat/cat/functions.nf | 78 ---------- modules/cat/cat/main.nf | 25 ++-- modules/cat/fastq/functions.nf | 78 ---------- modules/cat/fastq/main.nf | 28 ++-- modules/cellranger/mkref/functions.nf | 78 ---------- modules/cellranger/mkref/main.nf | 27 ++-- modules/checkm/lineagewf/functions.nf | 78 ---------- modules/checkm/lineagewf/main.nf | 26 ++-- modules/chromap/chromap/functions.nf | 78 ---------- modules/chromap/chromap/main.nf | 55 ++++--- modules/chromap/index/functions.nf | 78 ---------- modules/chromap/index/main.nf | 28 ++-- modules/clonalframeml/functions.nf | 78 ---------- modules/clonalframeml/main.nf | 26 ++-- modules/cmseq/polymut/functions.nf | 78 ---------- modules/cmseq/polymut/main.nf | 27 ++-- modules/cnvkit/batch/functions.nf | 78 ---------- modules/cnvkit/batch/main.nf | 35 ++--- modules/cooler/cload/functions.nf | 78 ---------- modules/cooler/cload/main.nf | 28 ++-- modules/cooler/digest/functions.nf | 78 ---------- modules/cooler/digest/main.nf | 24 +--- modules/cooler/dump/functions.nf | 78 ---------- modules/cooler/dump/main.nf | 26 ++-- modules/cooler/dump/meta.yml | 3 + modules/cooler/merge/functions.nf | 78 ---------- modules/cooler/merge/main.nf | 26 ++-- modules/cooler/zoomify/functions.nf | 78 ---------- modules/cooler/zoomify/main.nf | 26 ++-- modules/csvtk/concat/functions.nf | 78 ---------- modules/csvtk/concat/main.nf | 24 +--- modules/csvtk/split/functions.nf | 78 ---------- modules/csvtk/split/main.nf | 26 ++-- .../custom/dumpsoftwareversions/functions.nf | 78 ---------- modules/custom/dumpsoftwareversions/main.nf | 95 +----------- modules/custom/dumpsoftwareversions/meta.yml | 1 + .../templates/dumpsoftwareversions.py | 89 ++++++++++++ modules/custom/getchromsizes/functions.nf | 78 ---------- modules/custom/getchromsizes/main.nf | 22 +-- modules/cutadapt/functions.nf | 78 ---------- modules/cutadapt/main.nf | 26 ++-- modules/damageprofiler/functions.nf | 78 ---------- modules/damageprofiler/main.nf | 37 ++--- modules/dastool/dastool/functions.nf | 78 ---------- modules/dastool/dastool/main.nf | 25 ++-- modules/dastool/scaffolds2bin/functions.nf | 78 ---------- modules/dastool/scaffolds2bin/main.nf | 25 ++-- modules/dedup/functions.nf | 78 ---------- modules/dedup/main.nf | 26 ++-- modules/deeptools/computematrix/functions.nf | 78 ---------- modules/deeptools/computematrix/main.nf | 26 ++-- .../deeptools/plotfingerprint/functions.nf | 78 ---------- modules/deeptools/plotfingerprint/main.nf | 26 ++-- modules/deeptools/plotheatmap/functions.nf | 78 ---------- modules/deeptools/plotheatmap/main.nf | 26 ++-- modules/deeptools/plotprofile/functions.nf | 78 ---------- modules/deeptools/plotprofile/main.nf | 26 ++-- modules/delly/call/functions.nf | 78 ---------- modules/delly/call/main.nf | 26 ++-- modules/diamond/blastp/functions.nf | 78 ---------- modules/diamond/blastp/main.nf | 26 ++-- modules/diamond/blastx/functions.nf | 78 ---------- modules/diamond/blastx/main.nf | 26 ++-- modules/diamond/makedb/functions.nf | 78 ---------- modules/diamond/makedb/main.nf | 24 +--- modules/dragonflye/functions.nf | 78 ---------- modules/dragonflye/main.nf | 24 +--- modules/dshbio/exportsegments/functions.nf | 78 ---------- modules/dshbio/exportsegments/main.nf | 26 ++-- modules/dshbio/filterbed/functions.nf | 78 ---------- modules/dshbio/filterbed/main.nf | 26 ++-- modules/dshbio/filtergff3/functions.nf | 78 ---------- modules/dshbio/filtergff3/main.nf | 26 ++-- modules/dshbio/splitbed/functions.nf | 78 ---------- modules/dshbio/splitbed/main.nf | 26 ++-- modules/dshbio/splitgff3/functions.nf | 78 ---------- modules/dshbio/splitgff3/main.nf | 26 ++-- modules/ectyper/functions.nf | 78 ---------- modules/ectyper/main.nf | 27 ++-- modules/emmtyper/functions.nf | 78 ---------- modules/emmtyper/main.nf | 26 ++-- modules/ensemblvep/functions.nf | 78 ---------- modules/ensemblvep/main.nf | 34 ++--- modules/expansionhunter/functions.nf | 78 ---------- modules/expansionhunter/main.nf | 26 ++-- modules/fargene/functions.nf | 78 ---------- modules/fargene/main.nf | 28 ++-- modules/fastani/functions.nf | 78 ---------- modules/fastani/main.nf | 28 ++-- modules/fastp/functions.nf | 78 ---------- modules/fastp/main.nf | 32 ++--- modules/fastqc/functions.nf | 78 ---------- modules/fastqc/main.nf | 32 ++--- modules/fastqscan/functions.nf | 78 ---------- modules/fastqscan/main.nf | 26 ++-- modules/fasttree/functions.nf | 78 ---------- modules/fasttree/main.nf | 24 +--- .../callmolecularconsensusreads/functions.nf | 78 ---------- .../fgbio/callmolecularconsensusreads/main.nf | 25 ++-- modules/fgbio/fastqtobam/functions.nf | 78 ---------- modules/fgbio/fastqtobam/main.nf | 34 ++--- modules/fgbio/groupreadsbyumi/functions.nf | 78 ---------- modules/fgbio/groupreadsbyumi/main.nf | 26 ++-- modules/fgbio/sortbam/functions.nf | 78 ---------- modules/fgbio/sortbam/main.nf | 25 ++-- modules/filtlong/functions.nf | 78 ---------- modules/filtlong/main.nf | 26 ++-- modules/flash/functions.nf | 78 ---------- modules/flash/main.nf | 26 ++-- modules/freebayes/functions.nf | 78 ---------- modules/freebayes/main.nf | 40 ++---- modules/freebayes/meta.yml | 7 +- modules/gatk4/applybqsr/functions.nf | 78 ---------- modules/gatk4/applybqsr/main.nf | 26 ++-- modules/gatk4/baserecalibrator/functions.nf | 78 ---------- modules/gatk4/baserecalibrator/main.nf | 26 ++-- modules/gatk4/bedtointervallist/functions.nf | 78 ---------- modules/gatk4/bedtointervallist/main.nf | 26 ++-- .../gatk4/calculatecontamination/functions.nf | 78 ---------- modules/gatk4/calculatecontamination/main.nf | 26 ++-- .../createsequencedictionary/functions.nf | 78 ---------- .../gatk4/createsequencedictionary/main.nf | 24 +--- .../createsomaticpanelofnormals/functions.nf | 78 ---------- .../gatk4/createsomaticpanelofnormals/main.nf | 26 ++-- .../estimatelibrarycomplexity/functions.nf | 78 ---------- .../gatk4/estimatelibrarycomplexity/main.nf | 26 ++-- modules/gatk4/fastqtosam/functions.nf | 78 ---------- modules/gatk4/fastqtosam/main.nf | 26 ++-- modules/gatk4/filtermutectcalls/functions.nf | 78 ---------- modules/gatk4/filtermutectcalls/main.nf | 26 ++-- modules/gatk4/genomicsdbimport/functions.nf | 78 ---------- modules/gatk4/genomicsdbimport/main.nf | 26 ++-- modules/gatk4/genotypegvcfs/functions.nf | 78 ---------- modules/gatk4/genotypegvcfs/main.nf | 26 ++-- modules/gatk4/getpileupsummaries/functions.nf | 78 ---------- modules/gatk4/getpileupsummaries/main.nf | 26 ++-- modules/gatk4/haplotypecaller/functions.nf | 78 ---------- modules/gatk4/haplotypecaller/main.nf | 26 ++-- modules/gatk4/indexfeaturefile/functions.nf | 78 ---------- modules/gatk4/indexfeaturefile/main.nf | 24 +--- modules/gatk4/intervallisttools/functions.nf | 78 ---------- modules/gatk4/intervallisttools/main.nf | 26 ++-- .../learnreadorientationmodel/functions.nf | 78 ---------- .../gatk4/learnreadorientationmodel/main.nf | 26 ++-- modules/gatk4/markduplicates/functions.nf | 78 ---------- modules/gatk4/markduplicates/main.nf | 26 ++-- modules/gatk4/mergebamalignment/functions.nf | 78 ---------- modules/gatk4/mergebamalignment/main.nf | 26 ++-- modules/gatk4/mergevcfs/functions.nf | 78 ---------- modules/gatk4/mergevcfs/main.nf | 26 ++-- modules/gatk4/mutect2/functions.nf | 78 ---------- modules/gatk4/mutect2/main.nf | 26 ++-- modules/gatk4/revertsam/functions.nf | 78 ---------- modules/gatk4/revertsam/main.nf | 26 ++-- modules/gatk4/samtofastq/functions.nf | 78 ---------- modules/gatk4/samtofastq/main.nf | 26 ++-- modules/gatk4/splitncigarreads/functions.nf | 78 ---------- modules/gatk4/splitncigarreads/main.nf | 26 ++-- modules/gatk4/variantfiltration/functions.nf | 78 ---------- modules/gatk4/variantfiltration/main.nf | 26 ++-- modules/genmap/index/functions.nf | 78 ---------- modules/genmap/index/main.nf | 22 +-- modules/genmap/mappability/functions.nf | 78 ---------- modules/genmap/mappability/main.nf | 24 +--- modules/genrich/functions.nf | 78 ---------- modules/genrich/main.nf | 28 ++-- modules/gffread/functions.nf | 78 ---------- modules/gffread/main.nf | 26 ++-- modules/glnexus/functions.nf | 78 ---------- modules/glnexus/main.nf | 26 ++-- modules/graphmap2/align/functions.nf | 78 ---------- modules/graphmap2/align/main.nf | 26 ++-- modules/graphmap2/index/functions.nf | 78 ---------- modules/graphmap2/index/main.nf | 24 +--- modules/gstama/collapse/functions.nf | 78 ---------- modules/gstama/collapse/main.nf | 27 ++-- modules/gstama/merge/functions.nf | 78 ---------- modules/gstama/merge/main.nf | 26 ++-- modules/gtdbtk/classifywf/functions.nf | 78 ---------- modules/gtdbtk/classifywf/main.nf | 29 ++-- modules/gubbins/functions.nf | 78 ---------- modules/gubbins/main.nf | 24 +--- modules/gunc/downloaddb/functions.nf | 78 ---------- modules/gunc/downloaddb/main.nf | 24 +--- modules/gunc/run/functions.nf | 78 ---------- modules/gunc/run/main.nf | 26 ++-- modules/gunzip/functions.nf | 78 ---------- modules/gunzip/main.nf | 24 +--- modules/gunzip/test.txt.gz | Bin 47 -> 0 bytes modules/hicap/functions.nf | 78 ---------- modules/hicap/main.nf | 26 ++-- modules/hifiasm/functions.nf | 78 ---------- modules/hifiasm/main.nf | 32 ++--- modules/hisat2/align/functions.nf | 78 ---------- modules/hisat2/align/main.nf | 41 ++---- modules/hisat2/build/functions.nf | 78 ---------- modules/hisat2/build/main.nf | 27 ++-- .../hisat2/extractsplicesites/functions.nf | 78 ---------- modules/hisat2/extractsplicesites/main.nf | 24 +--- modules/hmmcopy/gccounter/functions.nf | 78 ---------- modules/hmmcopy/gccounter/main.nf | 26 ++-- modules/hmmcopy/readcounter/functions.nf | 78 ---------- modules/hmmcopy/readcounter/main.nf | 34 ++--- modules/hmmer/hmmalign/functions.nf | 78 ---------- modules/hmmer/hmmalign/main.nf | 26 ++-- modules/homer/annotatepeaks/functions.nf | 78 ---------- modules/homer/annotatepeaks/main.nf | 28 ++-- modules/homer/findpeaks/functions.nf | 78 ---------- modules/homer/findpeaks/main.nf | 28 ++-- modules/homer/maketagdirectory/functions.nf | 78 ---------- modules/homer/maketagdirectory/main.nf | 30 ++-- modules/homer/makeucscfile/functions.nf | 78 ---------- modules/homer/makeucscfile/main.nf | 30 ++-- modules/idr/functions.nf | 78 ---------- modules/idr/main.nf | 24 +--- modules/imputeme/vcftoprs/functions.nf | 78 ---------- modules/imputeme/vcftoprs/main.nf | 27 ++-- modules/iqtree/functions.nf | 78 ---------- modules/iqtree/main.nf | 24 +--- modules/ismapper/functions.nf | 78 ---------- modules/ismapper/main.nf | 26 ++-- modules/isoseq3/cluster/functions.nf | 78 ---------- modules/isoseq3/cluster/main.nf | 30 ++-- modules/isoseq3/refine/functions.nf | 78 ---------- modules/isoseq3/refine/main.nf | 26 ++-- modules/ivar/consensus/functions.nf | 78 ---------- modules/ivar/consensus/main.nf | 29 ++-- modules/ivar/trim/functions.nf | 78 ---------- modules/ivar/trim/main.nf | 26 ++-- modules/ivar/variants/functions.nf | 78 ---------- modules/ivar/variants/main.nf | 29 ++-- modules/jupyternotebook/functions.nf | 78 ---------- modules/jupyternotebook/main.nf | 41 +++--- modules/kallisto/index/functions.nf | 78 ---------- modules/kallisto/index/main.nf | 24 +--- modules/kallistobustools/count/functions.nf | 78 ---------- modules/kallistobustools/count/main.nf | 30 ++-- modules/kallistobustools/count/meta.yml | 4 +- modules/kallistobustools/ref/functions.nf | 78 ---------- modules/kallistobustools/ref/main.nf | 34 ++--- modules/kallistobustools/ref/meta.yml | 4 +- modules/khmer/normalizebymedian/functions.nf | 78 ---------- modules/khmer/normalizebymedian/main.nf | 31 ++-- modules/kleborate/functions.nf | 78 ---------- modules/kleborate/main.nf | 26 ++-- modules/kraken2/kraken2/functions.nf | 78 ---------- modules/kraken2/kraken2/main.nf | 26 ++-- modules/krona/kronadb/main.nf | 27 ++++ .../{kronatools => krona}/kronadb/meta.yml | 4 +- modules/krona/ktimporttaxonomy/main.nf | 30 ++++ .../ktimporttaxonomy/meta.yml | 6 +- modules/kronatools/kronadb/functions.nf | 78 ---------- modules/kronatools/kronadb/main.nf | 35 ----- .../kronatools/ktimporttaxonomy/functions.nf | 78 ---------- modules/kronatools/ktimporttaxonomy/main.nf | 39 ----- modules/last/dotplot/functions.nf | 78 ---------- modules/last/dotplot/main.nf | 26 ++-- modules/last/lastal/functions.nf | 78 ---------- modules/last/lastal/main.nf | 26 ++-- modules/last/lastdb/functions.nf | 78 ---------- modules/last/lastdb/main.nf | 26 ++-- modules/last/mafconvert/functions.nf | 78 ---------- modules/last/mafconvert/main.nf | 26 ++-- modules/last/mafswap/functions.nf | 78 ---------- modules/last/mafswap/main.nf | 26 ++-- modules/last/postmask/functions.nf | 78 ---------- modules/last/postmask/main.nf | 26 ++-- modules/last/split/functions.nf | 78 ---------- modules/last/split/main.nf | 26 ++-- modules/last/train/functions.nf | 78 ---------- modules/last/train/main.nf | 26 ++-- modules/leehom/functions.nf | 78 ---------- modules/leehom/main.nf | 101 ++++++------- modules/lib/functions.nf | 78 ---------- modules/lima/functions.nf | 78 ---------- modules/lima/main.nf | 26 ++-- modules/lissero/functions.nf | 78 ---------- modules/lissero/main.nf | 26 ++-- modules/lofreq/call/functions.nf | 78 ---------- modules/lofreq/call/main.nf | 26 ++-- modules/lofreq/callparallel/functions.nf | 78 ---------- modules/lofreq/callparallel/main.nf | 26 ++-- modules/lofreq/filter/functions.nf | 78 ---------- modules/lofreq/filter/main.nf | 26 ++-- modules/lofreq/indelqual/functions.nf | 78 ---------- modules/lofreq/indelqual/main.nf | 25 ++-- modules/macs2/callpeak/functions.nf | 78 ---------- modules/macs2/callpeak/main.nf | 42 +++--- modules/malt/build/functions.nf | 78 ---------- modules/malt/build/main.nf | 26 ++-- modules/malt/run/functions.nf | 78 ---------- modules/malt/run/main.nf | 26 ++-- modules/maltextract/functions.nf | 78 ---------- modules/maltextract/main.nf | 24 +--- modules/manta/germline/functions.nf | 78 ---------- modules/manta/germline/main.nf | 25 +--- modules/manta/somatic/functions.nf | 78 ---------- modules/manta/somatic/main.nf | 24 +--- modules/manta/tumoronly/functions.nf | 78 ---------- modules/manta/tumoronly/main.nf | 25 +--- modules/mapdamage2/functions.nf | 78 ---------- modules/mapdamage2/main.nf | 25 ++-- modules/mash/sketch/functions.nf | 78 ---------- modules/mash/sketch/main.nf | 25 ++-- modules/mashtree/functions.nf | 78 ---------- modules/mashtree/main.nf | 26 ++-- modules/maxbin2/functions.nf | 78 ---------- modules/maxbin2/main.nf | 24 +--- modules/medaka/functions.nf | 78 ---------- modules/medaka/main.nf | 26 ++-- modules/megahit/functions.nf | 78 ---------- modules/megahit/main.nf | 37 ++--- modules/meningotype/functions.nf | 78 ---------- modules/meningotype/main.nf | 26 ++-- .../jgisummarizebamcontigdepths/functions.nf | 78 ---------- .../jgisummarizebamcontigdepths/main.nf | 25 ++-- modules/metabat2/metabat2/functions.nf | 78 ---------- modules/metabat2/metabat2/main.nf | 25 ++-- modules/metaphlan3/functions.nf | 78 ---------- modules/metaphlan3/main.nf | 26 ++-- modules/methyldackel/extract/functions.nf | 78 ---------- modules/methyldackel/extract/main.nf | 24 +--- modules/methyldackel/mbias/functions.nf | 78 ---------- modules/methyldackel/mbias/main.nf | 26 ++-- modules/minia/functions.nf | 78 ---------- modules/minia/main.nf | 26 ++-- modules/miniasm/functions.nf | 78 ---------- modules/miniasm/main.nf | 26 ++-- modules/minimap2/align/functions.nf | 78 ---------- modules/minimap2/align/main.nf | 26 ++-- modules/minimap2/index/functions.nf | 78 ---------- modules/minimap2/index/main.nf | 24 +--- modules/mlst/functions.nf | 78 ---------- modules/mlst/main.nf | 24 +--- modules/mosdepth/functions.nf | 78 ---------- modules/mosdepth/main.nf | 26 ++-- modules/msisensor/msi/functions.nf | 78 ---------- modules/msisensor/msi/main.nf | 26 ++-- modules/msisensor/scan/functions.nf | 78 ---------- modules/msisensor/scan/main.nf | 26 ++-- modules/mtnucratio/functions.nf | 78 ---------- modules/mtnucratio/main.nf | 25 ++-- modules/multiqc/functions.nf | 78 ---------- modules/multiqc/main.nf | 24 +--- modules/mummer/functions.nf | 78 ---------- modules/mummer/main.nf | 28 ++-- modules/muscle/functions.nf | 78 ---------- modules/muscle/main.nf | 41 ++---- modules/nanolyse/functions.nf | 78 ---------- modules/nanolyse/main.nf | 24 +--- modules/nanoplot/functions.nf | 78 ---------- modules/nanoplot/main.nf | 24 +--- modules/ncbigenomedownload/functions.nf | 78 ---------- modules/ncbigenomedownload/main.nf | 26 ++-- modules/nextclade/functions.nf | 78 ---------- modules/nextclade/main.nf | 26 ++-- modules/ngmaster/functions.nf | 78 ---------- modules/ngmaster/main.nf | 26 ++-- modules/nucmer/functions.nf | 78 ---------- modules/nucmer/main.nf | 26 ++-- modules/optitype/functions.nf | 78 ---------- modules/optitype/main.nf | 33 ++--- modules/pairix/functions.nf | 78 ---------- modules/pairix/main.nf | 24 +--- modules/pairtools/dedup/functions.nf | 78 ---------- modules/pairtools/dedup/main.nf | 26 ++-- modules/pairtools/flip/functions.nf | 78 ---------- modules/pairtools/flip/main.nf | 26 ++-- modules/pairtools/parse/functions.nf | 78 ---------- modules/pairtools/parse/main.nf | 26 ++-- modules/pairtools/restrict/functions.nf | 78 ---------- modules/pairtools/restrict/main.nf | 26 ++-- modules/pairtools/select/functions.nf | 78 ---------- modules/pairtools/select/main.nf | 26 ++-- modules/pairtools/sort/functions.nf | 78 ---------- modules/pairtools/sort/main.nf | 26 ++-- modules/pangolin/functions.nf | 78 ---------- modules/pangolin/main.nf | 26 ++-- modules/paraclu/functions.nf | 78 ---------- modules/paraclu/main.nf | 25 ++-- modules/pbbam/pbmerge/functions.nf | 78 ---------- modules/pbbam/pbmerge/main.nf | 26 ++-- modules/pbccs/functions.nf | 78 ---------- modules/pbccs/main.nf | 26 ++-- modules/peddy/functions.nf | 78 ---------- modules/peddy/main.nf | 26 ++-- modules/phantompeakqualtools/functions.nf | 78 ---------- modules/phantompeakqualtools/main.nf | 27 ++-- modules/phyloflash/functions.nf | 78 ---------- modules/phyloflash/main.nf | 41 ++---- modules/picard/collecthsmetrics/functions.nf | 78 ---------- modules/picard/collecthsmetrics/main.nf | 26 ++-- .../collectmultiplemetrics/functions.nf | 78 ---------- modules/picard/collectmultiplemetrics/main.nf | 26 ++-- modules/picard/collectwgsmetrics/functions.nf | 78 ---------- modules/picard/collectwgsmetrics/main.nf | 26 ++-- modules/picard/filtersamreads/functions.nf | 78 ---------- modules/picard/filtersamreads/main.nf | 32 ++--- modules/picard/markduplicates/functions.nf | 78 ---------- modules/picard/markduplicates/main.nf | 26 ++-- modules/picard/mergesamfiles/functions.nf | 78 ---------- modules/picard/mergesamfiles/main.nf | 30 ++-- modules/picard/sortsam/functions.nf | 78 ---------- modules/picard/sortsam/main.nf | 25 +--- modules/pirate/functions.nf | 78 ---------- modules/pirate/main.nf | 26 ++-- modules/plasmidid/functions.nf | 78 ---------- modules/plasmidid/main.nf | 26 ++-- modules/plink/extract/functions.nf | 78 ---------- modules/plink/extract/main.nf | 26 ++-- modules/plink/vcf/functions.nf | 78 ---------- modules/plink/vcf/main.nf | 26 ++-- modules/plink2/vcf/functions.nf | 78 ---------- modules/plink2/vcf/main.nf | 26 ++-- modules/pmdtools/filter/functions.nf | 78 ---------- modules/pmdtools/filter/main.nf | 30 ++-- modules/porechop/functions.nf | 78 ---------- modules/porechop/main.nf | 34 ++--- modules/preseq/lcextrap/functions.nf | 78 ---------- modules/preseq/lcextrap/main.nf | 26 ++-- modules/prodigal/functions.nf | 78 ---------- modules/prodigal/main.nf | 26 ++-- modules/prokka/functions.nf | 78 ---------- modules/prokka/main.nf | 25 ++-- modules/pycoqc/functions.nf | 78 ---------- modules/pycoqc/main.nf | 24 +--- modules/pydamage/analyze/functions.nf | 78 ---------- modules/pydamage/analyze/main.nf | 26 ++-- modules/pydamage/filter/functions.nf | 78 ---------- modules/pydamage/filter/main.nf | 26 ++-- modules/qcat/functions.nf | 78 ---------- modules/qcat/main.nf | 24 +--- modules/qualimap/bamqc/functions.nf | 78 ---------- modules/qualimap/bamqc/main.nf | 26 ++-- modules/qualimap/rnaseq/functions.nf | 78 ---------- modules/qualimap/rnaseq/main.nf | 26 ++-- modules/quast/functions.nf | 78 ---------- modules/quast/main.nf | 28 ++-- modules/racon/functions.nf | 78 ---------- modules/racon/main.nf | 28 ++-- modules/rapidnj/functions.nf | 78 ---------- modules/rapidnj/main.nf | 26 ++-- modules/rasusa/functions.nf | 78 ---------- modules/rasusa/main.nf | 26 ++-- modules/raxmlng/functions.nf | 78 ---------- modules/raxmlng/main.nf | 24 +--- modules/rmarkdownnotebook/functions.nf | 78 ---------- modules/rmarkdownnotebook/main.nf | 41 +++--- modules/roary/functions.nf | 78 ---------- modules/roary/main.nf | 26 ++-- modules/rsem/calculateexpression/functions.nf | 78 ---------- modules/rsem/calculateexpression/main.nf | 26 ++-- modules/rsem/preparereference/functions.nf | 78 ---------- modules/rsem/preparereference/main.nf | 39 ++--- modules/rseqc/bamstat/functions.nf | 78 ---------- modules/rseqc/bamstat/main.nf | 26 ++-- modules/rseqc/inferexperiment/functions.nf | 78 ---------- modules/rseqc/inferexperiment/main.nf | 26 ++-- modules/rseqc/innerdistance/functions.nf | 78 ---------- modules/rseqc/innerdistance/main.nf | 30 ++-- modules/rseqc/junctionannotation/functions.nf | 78 ---------- modules/rseqc/junctionannotation/main.nf | 26 ++-- modules/rseqc/junctionsaturation/functions.nf | 78 ---------- modules/rseqc/junctionsaturation/main.nf | 26 ++-- modules/rseqc/readdistribution/functions.nf | 78 ---------- modules/rseqc/readdistribution/main.nf | 24 +--- modules/rseqc/readduplication/functions.nf | 78 ---------- modules/rseqc/readduplication/main.nf | 26 ++-- modules/salmon/index/functions.nf | 78 ---------- modules/salmon/index/main.nf | 24 +--- modules/salmon/quant/functions.nf | 78 ---------- modules/salmon/quant/main.nf | 26 ++-- modules/samblaster/functions.nf | 78 ---------- modules/samblaster/main.nf | 32 ++--- modules/samtools/ampliconclip/functions.nf | 78 ---------- modules/samtools/ampliconclip/main.nf | 26 ++-- modules/samtools/bam2fq/functions.nf | 78 ---------- modules/samtools/bam2fq/main.nf | 32 ++--- modules/samtools/depth/functions.nf | 78 ---------- modules/samtools/depth/main.nf | 26 ++-- modules/samtools/faidx/functions.nf | 78 ---------- modules/samtools/faidx/main.nf | 22 +-- modules/samtools/fastq/functions.nf | 78 ---------- modules/samtools/fastq/main.nf | 26 ++-- modules/samtools/fixmate/functions.nf | 78 ---------- modules/samtools/fixmate/main.nf | 26 ++-- modules/samtools/flagstat/functions.nf | 78 ---------- modules/samtools/flagstat/main.nf | 22 +-- modules/samtools/idxstats/functions.nf | 78 ---------- modules/samtools/idxstats/main.nf | 22 +-- modules/samtools/index/functions.nf | 78 ---------- modules/samtools/index/main.nf | 24 +--- modules/samtools/merge/functions.nf | 78 ---------- modules/samtools/merge/main.nf | 26 ++-- modules/samtools/mpileup/functions.nf | 78 ---------- modules/samtools/mpileup/main.nf | 26 ++-- modules/samtools/sort/functions.nf | 78 ---------- modules/samtools/sort/main.nf | 26 ++-- modules/samtools/stats/functions.nf | 78 ---------- modules/samtools/stats/main.nf | 22 +-- modules/samtools/view/functions.nf | 78 ---------- modules/samtools/view/main.nf | 26 ++-- modules/scoary/functions.nf | 78 ---------- modules/scoary/main.nf | 26 ++-- modules/seacr/callpeak/functions.nf | 78 ---------- modules/seacr/callpeak/main.nf | 29 ++-- modules/seqkit/split2/functions.nf | 78 ---------- modules/seqkit/split2/main.nf | 32 ++--- modules/seqsero2/functions.nf | 78 ---------- modules/seqsero2/main.nf | 26 ++-- modules/seqtk/mergepe/functions.nf | 78 ---------- modules/seqtk/mergepe/main.nf | 30 ++-- modules/seqtk/sample/functions.nf | 78 ---------- modules/seqtk/sample/main.nf | 38 ++--- modules/seqtk/subseq/functions.nf | 78 ---------- modules/seqtk/subseq/main.nf | 26 ++-- modules/sequenzautils/bam2seqz/functions.nf | 78 ---------- modules/sequenzautils/bam2seqz/main.nf | 26 ++-- modules/sequenzautils/gcwiggle/functions.nf | 78 ---------- modules/sequenzautils/gcwiggle/main.nf | 26 ++-- modules/seqwish/induce/functions.nf | 78 ---------- modules/seqwish/induce/main.nf | 28 ++-- modules/shovill/functions.nf | 78 ---------- modules/shovill/main.nf | 24 +--- modules/snpdists/functions.nf | 78 ---------- modules/snpdists/main.nf | 26 ++-- modules/snpeff/functions.nf | 78 ---------- modules/snpeff/main.nf | 39 ++--- modules/snpsites/functions.nf | 78 ---------- modules/snpsites/main.nf | 24 +--- modules/sortmerna/functions.nf | 78 ---------- modules/sortmerna/main.nf | 32 ++--- modules/spades/functions.nf | 78 ---------- modules/spades/main.nf | 26 ++-- modules/spatyper/functions.nf | 78 ---------- modules/spatyper/main.nf | 26 ++-- modules/sratools/fasterqdump/functions.nf | 78 ---------- modules/sratools/fasterqdump/main.nf | 27 ++-- modules/sratools/prefetch/functions.nf | 78 ---------- modules/sratools/prefetch/main.nf | 24 +--- modules/staphopiasccmec/functions.nf | 78 ---------- modules/staphopiasccmec/main.nf | 26 ++-- modules/star/align/functions.nf | 78 ---------- modules/star/align/main.nf | 39 +++-- modules/star/genomegenerate/functions.nf | 78 ---------- modules/star/genomegenerate/main.nf | 34 ++--- modules/strelka/germline/functions.nf | 78 ---------- modules/strelka/germline/main.nf | 27 ++-- modules/strelka/somatic/functions.nf | 78 ---------- modules/strelka/somatic/main.nf | 26 ++-- modules/stringtie/merge/functions.nf | 78 ---------- modules/stringtie/merge/main.nf | 22 +-- modules/stringtie/stringtie/functions.nf | 78 ---------- modules/stringtie/stringtie/main.nf | 26 ++-- modules/subread/featurecounts/functions.nf | 78 ---------- modules/subread/featurecounts/main.nf | 26 ++-- modules/tabix/bgzip/functions.nf | 78 ---------- modules/tabix/bgzip/main.nf | 26 ++-- modules/tabix/bgziptabix/functions.nf | 78 ---------- modules/tabix/bgziptabix/main.nf | 29 ++-- modules/tabix/tabix/functions.nf | 78 ---------- modules/tabix/tabix/main.nf | 24 +--- modules/tbprofiler/profile/functions.nf | 78 ---------- modules/tbprofiler/profile/main.nf | 26 ++-- modules/tiddit/cov/functions.nf | 78 ---------- modules/tiddit/cov/main.nf | 29 ++-- modules/tiddit/sv/functions.nf | 78 ---------- modules/tiddit/sv/main.nf | 26 ++-- modules/trimgalore/functions.nf | 78 ---------- modules/trimgalore/main.nf | 32 ++--- modules/ucsc/bed12tobigbed/functions.nf | 78 ---------- modules/ucsc/bed12tobigbed/main.nf | 26 ++-- modules/ucsc/bedclip/functions.nf | 78 ---------- modules/ucsc/bedclip/main.nf | 26 ++-- modules/ucsc/bedgraphtobigwig/functions.nf | 78 ---------- modules/ucsc/bedgraphtobigwig/main.nf | 26 ++-- .../ucsc/bigwigaverageoverbed/functions.nf | 78 ---------- modules/ucsc/bigwigaverageoverbed/main.nf | 30 ++-- modules/ucsc/liftover/functions.nf | 78 ---------- modules/ucsc/liftover/main.nf | 28 ++-- modules/ucsc/wigtobigwig/functions.nf | 78 ---------- modules/ucsc/wigtobigwig/main.nf | 35 ++--- modules/ultra/pipeline/functions.nf | 78 ---------- modules/ultra/pipeline/main.nf | 26 ++-- modules/umitools/dedup/functions.nf | 78 ---------- modules/umitools/dedup/main.nf | 26 ++-- modules/umitools/extract/functions.nf | 78 ---------- modules/umitools/extract/main.nf | 32 ++--- modules/unicycler/functions.nf | 78 ---------- modules/unicycler/main.nf | 26 ++-- modules/untar/functions.nf | 78 ---------- modules/untar/main.nf | 28 ++-- modules/unzip/functions.nf | 78 ---------- modules/unzip/main.nf | 24 +--- modules/variantbam/functions.nf | 78 ---------- modules/variantbam/main.nf | 32 ++--- modules/vcftools/functions.nf | 78 ---------- modules/vcftools/main.nf | 52 +++---- modules/yara/index/functions.nf | 78 ---------- modules/yara/index/main.nf | 22 +-- modules/yara/mapper/functions.nf | 78 ---------- modules/yara/mapper/main.nf | 34 ++--- tests/config/nextflow.config | 8 +- tests/config/pytest_modules.yml | 122 ++++++++-------- tests/config/test_data.config | 77 ++++++---- tests/modules/abacas/main.nf | 2 +- tests/modules/abacas/nextflow.config | 9 ++ tests/modules/abacas/test.yml | 2 +- tests/modules/adapterremoval/main.nf | 2 +- tests/modules/adapterremoval/nextflow.config | 5 + tests/modules/adapterremoval/test.yml | 6 +- tests/modules/agrvate/main.nf | 2 +- tests/modules/agrvate/nextflow.config | 9 ++ tests/modules/agrvate/test.yml | 2 +- tests/modules/allelecounter/main.nf | 2 +- tests/modules/allelecounter/nextflow.config | 5 + tests/modules/allelecounter/test.yml | 4 +- tests/modules/amps/main.nf | 12 +- tests/modules/amps/nextflow.config | 9 ++ tests/modules/amps/test.yml | 2 +- tests/modules/arriba/main.nf | 16 ++- tests/modules/arriba/nextflow.config | 13 ++ tests/modules/arriba/test.yml | 70 +++++---- tests/modules/artic/guppyplex/main.nf | 2 +- tests/modules/artic/guppyplex/nextflow.config | 5 + tests/modules/artic/guppyplex/test.yml | 2 +- tests/modules/artic/minion/main.nf | 18 +-- tests/modules/artic/minion/nextflow.config | 5 + tests/modules/artic/minion/test.yml | 2 +- tests/modules/assemblyscan/main.nf | 2 +- tests/modules/assemblyscan/nextflow.config | 5 + tests/modules/assemblyscan/test.yml | 2 +- tests/modules/ataqv/ataqv/main.nf | 4 +- tests/modules/ataqv/ataqv/nextflow.config | 9 ++ tests/modules/ataqv/ataqv/test.yml | 10 +- tests/modules/bakta/main.nf | 10 +- tests/modules/bakta/nextflow.config | 5 + tests/modules/bamaligncleaner/main.nf | 2 +- tests/modules/bamaligncleaner/nextflow.config | 5 + tests/modules/bamaligncleaner/test.yml | 2 +- tests/modules/bamtools/split/main.nf | 2 +- tests/modules/bamtools/split/nextflow.config | 9 ++ tests/modules/bamtools/split/test.yml | 2 +- tests/modules/bamutil/trimbam/main.nf | 2 +- tests/modules/bamutil/trimbam/nextflow.config | 5 + tests/modules/bamutil/trimbam/test.yml | 2 +- tests/modules/bandage/image/main.nf | 2 +- tests/modules/bandage/image/nextflow.config | 5 + tests/modules/bandage/image/test.yml | 2 +- tests/modules/bbmap/align/main.nf | 6 +- tests/modules/bbmap/align/nextflow.config | 9 ++ tests/modules/bbmap/align/test.yml | 8 +- tests/modules/bbmap/bbduk/main.nf | 2 +- tests/modules/bbmap/bbduk/nextflow.config | 10 ++ tests/modules/bbmap/bbduk/test.yml | 8 +- tests/modules/bbmap/bbsplit/main.nf | 4 +- tests/modules/bbmap/bbsplit/nextflow.config | 5 + tests/modules/bbmap/bbsplit/test.yml | 2 +- tests/modules/bbmap/index/main.nf | 2 +- tests/modules/bbmap/index/nextflow.config | 5 + tests/modules/bbmap/index/test.yml | 4 +- tests/modules/bcftools/concat/main.nf | 2 +- tests/modules/bcftools/concat/nextflow.config | 9 ++ tests/modules/bcftools/concat/test.yml | 2 +- tests/modules/bcftools/consensus/main.nf | 2 +- .../bcftools/consensus/nextflow.config | 5 + tests/modules/bcftools/consensus/test.yml | 2 +- tests/modules/bcftools/filter/main.nf | 2 +- tests/modules/bcftools/filter/nextflow.config | 9 ++ tests/modules/bcftools/filter/test.yml | 2 +- tests/modules/bcftools/index/main.nf | 4 +- tests/modules/bcftools/index/nextflow.config | 9 ++ tests/modules/bcftools/index/test.yml | 4 +- tests/modules/bcftools/isec/main.nf | 2 +- tests/modules/bcftools/isec/nextflow.config | 9 ++ tests/modules/bcftools/isec/test.yml | 2 +- tests/modules/bcftools/merge/main.nf | 2 +- tests/modules/bcftools/merge/nextflow.config | 9 ++ tests/modules/bcftools/merge/test.yml | 2 +- tests/modules/bcftools/mpileup/main.nf | 3 +- .../modules/bcftools/mpileup/nextflow.config | 10 ++ tests/modules/bcftools/mpileup/test.yml | 2 +- tests/modules/bcftools/norm/main.nf | 2 +- tests/modules/bcftools/norm/nextflow.config | 9 ++ tests/modules/bcftools/norm/test.yml | 2 +- tests/modules/bcftools/query/main.nf | 2 +- tests/modules/bcftools/query/nextflow.config | 9 ++ tests/modules/bcftools/query/test.yml | 4 +- tests/modules/bcftools/reheader/main.nf | 2 +- .../modules/bcftools/reheader/nextflow.config | 9 ++ tests/modules/bcftools/reheader/test.yml | 6 +- tests/modules/bcftools/stats/main.nf | 2 +- tests/modules/bcftools/stats/nextflow.config | 5 + tests/modules/bcftools/stats/test.yml | 2 +- tests/modules/bcftools/view/main.nf | 2 +- tests/modules/bcftools/view/nextflow.config | 9 ++ tests/modules/bcftools/view/test.yml | 4 +- tests/modules/bedtools/bamtobed/main.nf | 2 +- .../modules/bedtools/bamtobed/nextflow.config | 5 + tests/modules/bedtools/bamtobed/test.yml | 2 +- tests/modules/bedtools/complement/main.nf | 2 +- .../bedtools/complement/nextflow.config | 9 ++ tests/modules/bedtools/complement/test.yml | 2 +- tests/modules/bedtools/genomecov/main.nf | 2 +- .../bedtools/genomecov/nextflow.config | 9 ++ tests/modules/bedtools/genomecov/test.yml | 8 +- tests/modules/bedtools/getfasta/main.nf | 2 +- .../modules/bedtools/getfasta/nextflow.config | 5 + tests/modules/bedtools/getfasta/test.yml | 2 +- tests/modules/bedtools/intersect/main.nf | 2 +- .../bedtools/intersect/nextflow.config | 9 ++ tests/modules/bedtools/intersect/test.yml | 4 +- tests/modules/bedtools/makewindows/main.nf | 11 +- .../bedtools/makewindows/nextflow.config | 9 ++ tests/modules/bedtools/makewindows/test.yml | 2 +- tests/modules/bedtools/maskfasta/main.nf | 2 +- .../bedtools/maskfasta/nextflow.config | 5 + tests/modules/bedtools/maskfasta/test.yml | 2 +- tests/modules/bedtools/merge/main.nf | 2 +- tests/modules/bedtools/merge/nextflow.config | 9 ++ tests/modules/bedtools/merge/test.yml | 2 +- tests/modules/bedtools/slop/main.nf | 2 +- tests/modules/bedtools/slop/nextflow.config | 10 ++ tests/modules/bedtools/slop/test.yml | 2 +- tests/modules/bedtools/sort/main.nf | 2 +- tests/modules/bedtools/sort/nextflow.config | 9 ++ tests/modules/bedtools/sort/test.yml | 2 +- tests/modules/bedtools/subtract/main.nf | 2 +- .../modules/bedtools/subtract/nextflow.config | 5 + tests/modules/bedtools/subtract/test.yml | 2 +- tests/modules/bismark/align/main.nf | 6 +- tests/modules/bismark/align/nextflow.config | 5 + tests/modules/bismark/align/test.yml | 12 +- tests/modules/bismark/deduplicate/main.nf | 2 +- .../bismark/deduplicate/nextflow.config | 5 + tests/modules/bismark/deduplicate/test.yml | 2 +- .../modules/bismark/genomepreparation/main.nf | 2 +- .../bismark/genomepreparation/nextflow.config | 5 + .../bismark/genomepreparation/test.yml | 2 +- .../bismark/methylationextractor/main.nf | 4 +- .../methylationextractor/nextflow.config | 5 + .../bismark/methylationextractor/test.yml | 2 +- tests/modules/bismark/report/main.nf | 10 +- tests/modules/bismark/report/nextflow.config | 5 + tests/modules/bismark/report/test.yml | 2 +- tests/modules/bismark/summary/main.nf | 10 +- tests/modules/bismark/summary/nextflow.config | 5 + tests/modules/bismark/summary/test.yml | 2 +- tests/modules/blast/blastn/main.nf | 4 +- tests/modules/blast/blastn/nextflow.config | 9 ++ tests/modules/blast/blastn/test.yml | 2 +- tests/modules/blast/makeblastdb/main.nf | 2 +- .../modules/blast/makeblastdb/nextflow.config | 9 ++ tests/modules/blast/makeblastdb/test.yml | 2 +- tests/modules/bowtie/align/main.nf | 24 ++-- tests/modules/bowtie/align/nextflow.config | 5 + tests/modules/bowtie/align/test.yml | 28 ++-- tests/modules/bowtie/build_test/main.nf | 2 +- .../modules/bowtie/build_test/nextflow.config | 5 + tests/modules/bowtie/build_test/test.yml | 2 +- tests/modules/bowtie2/align/main.nf | 25 ++-- tests/modules/bowtie2/align/nextflow.config | 5 + tests/modules/bowtie2/align/test.yml | 28 ++-- tests/modules/bowtie2/build_test/main.nf | 2 +- .../bowtie2/build_test/nextflow.config | 5 + tests/modules/bowtie2/build_test/test.yml | 2 +- tests/modules/bwa/aln/main.nf | 24 ++-- tests/modules/bwa/aln/nextflow.config | 5 + tests/modules/bwa/aln/test.yml | 24 ++-- tests/modules/bwa/index/main.nf | 2 +- tests/modules/bwa/index/nextflow.config | 5 + tests/modules/bwa/index/test.yml | 2 +- tests/modules/bwa/mem/main.nf | 24 ++-- tests/modules/bwa/mem/nextflow.config | 5 + tests/modules/bwa/mem/test.yml | 24 ++-- tests/modules/bwa/sampe/main.nf | 6 +- tests/modules/bwa/sampe/nextflow.config | 5 + tests/modules/bwa/sampe/test.yml | 2 +- tests/modules/bwa/samse/main.nf | 6 +- tests/modules/bwa/samse/nextflow.config | 5 + tests/modules/bwa/samse/test.yml | 2 +- tests/modules/bwamem2/index/main.nf | 2 +- tests/modules/bwamem2/index/nextflow.config | 5 + tests/modules/bwamem2/index/test.yml | 2 +- tests/modules/bwamem2/mem/main.nf | 24 ++-- tests/modules/bwamem2/mem/nextflow.config | 5 + tests/modules/bwamem2/mem/test.yml | 24 ++-- tests/modules/bwameth/align/main.nf | 24 ++-- tests/modules/bwameth/align/nextflow.config | 5 + tests/modules/bwameth/align/test.yml | 4 +- tests/modules/bwameth/index/main.nf | 2 +- tests/modules/bwameth/index/nextflow.config | 5 + tests/modules/bwameth/index/test.yml | 2 +- tests/modules/cat/cat/main.nf | 2 +- tests/modules/cat/cat/nextflow.config | 5 + tests/modules/cat/cat/test.yml | 8 +- tests/modules/cat/fastq/main.nf | 2 +- tests/modules/cat/fastq/nextflow.config | 5 + tests/modules/cat/fastq/test.yml | 4 +- tests/modules/cellranger/mkref/main.nf | 2 +- .../modules/cellranger/mkref/nextflow.config | 5 + tests/modules/cellranger/mkref/test.yml | 2 +- tests/modules/checkm/lineagewf/main.nf | 2 +- .../modules/checkm/lineagewf/nextflow.config | 5 + tests/modules/checkm/lineagewf/test.yml | 4 +- tests/modules/chromap/chromap/main.nf | 19 ++- tests/modules/chromap/chromap/nextflow.config | 9 ++ tests/modules/chromap/chromap/test.yml | 6 +- tests/modules/chromap/index/main.nf | 2 +- tests/modules/chromap/index/nextflow.config | 5 + tests/modules/chromap/index/test.yml | 2 +- tests/modules/clonalframeml/main.nf | 12 +- tests/modules/clonalframeml/nextflow.config | 5 + tests/modules/clonalframeml/test.yml | 2 +- tests/modules/cmseq/polymut/main.nf | 2 +- tests/modules/cmseq/polymut/nextflow.config | 5 + tests/modules/cmseq/polymut/test.yml | 6 +- tests/modules/cnvkit/batch/main.nf | 61 ++++---- tests/modules/cnvkit/batch/nextflow.config | 17 +++ tests/modules/cnvkit/batch/test.yml | 8 +- tests/modules/cooler/cload/main.nf | 12 +- tests/modules/cooler/cload/nextflow.config | 17 +++ tests/modules/cooler/cload/test.yml | 6 +- tests/modules/cooler/digest/main.nf | 2 +- tests/modules/cooler/digest/nextflow.config | 5 + tests/modules/cooler/digest/test.yml | 2 +- tests/modules/cooler/dump/main.nf | 2 +- tests/modules/cooler/dump/nextflow.config | 5 + tests/modules/cooler/dump/test.yml | 2 +- tests/modules/cooler/merge/main.nf | 18 ++- tests/modules/cooler/merge/nextflow.config | 5 + tests/modules/cooler/merge/test.yml | 2 +- tests/modules/cooler/zoomify/main.nf | 11 +- tests/modules/cooler/zoomify/nextflow.config | 9 ++ tests/modules/cooler/zoomify/test.yml | 4 +- tests/modules/csvtk/concat/main.nf | 2 +- tests/modules/csvtk/concat/nextflow.config | 5 + tests/modules/csvtk/concat/test.yml | 2 +- tests/modules/csvtk/split/main.nf | 2 +- tests/modules/csvtk/split/nextflow.config | 9 ++ tests/modules/csvtk/split/test.yml | 4 +- .../custom/dumpsoftwareversions/main.nf | 51 +++++-- .../dumpsoftwareversions/nextflow.config | 5 + .../custom/dumpsoftwareversions/test.yml | 8 +- tests/modules/custom/getchromsizes/main.nf | 2 +- .../custom/getchromsizes/nextflow.config | 5 + tests/modules/custom/getchromsizes/test.yml | 2 +- tests/modules/cutadapt/main.nf | 2 +- tests/modules/cutadapt/nextflow.config | 9 ++ tests/modules/cutadapt/test.yml | 4 +- tests/modules/damageprofiler/main.nf | 2 +- tests/modules/damageprofiler/nextflow.config | 5 + tests/modules/damageprofiler/test.yml | 6 +- tests/modules/dastool/dastool/main.nf | 8 +- tests/modules/dastool/dastool/nextflow.config | 13 ++ tests/modules/dastool/dastool/test.yml | 2 +- tests/modules/dastool/scaffolds2bin/main.nf | 6 +- .../dastool/scaffolds2bin/nextflow.config | 9 ++ tests/modules/dastool/scaffolds2bin/test.yml | 2 +- tests/modules/dedup/main.nf | 2 +- tests/modules/dedup/nextflow.config | 9 ++ tests/modules/dedup/test.yml | 2 +- tests/modules/deeptools/computematrix/main.nf | 2 +- .../deeptools/computematrix/nextflow.config | 9 ++ .../modules/deeptools/computematrix/test.yml | 2 +- .../modules/deeptools/plotfingerprint/main.nf | 2 +- .../deeptools/plotfingerprint/nextflow.config | 5 + .../deeptools/plotfingerprint/test.yml | 2 +- tests/modules/deeptools/plotheatmap/main.nf | 2 +- .../deeptools/plotheatmap/nextflow.config | 5 + tests/modules/deeptools/plotheatmap/test.yml | 2 +- tests/modules/deeptools/plotprofile/main.nf | 2 +- .../deeptools/plotprofile/nextflow.config | 5 + tests/modules/deeptools/plotprofile/test.yml | 2 +- tests/modules/delly/call/main.nf | 2 +- tests/modules/delly/call/nextflow.config | 5 + tests/modules/delly/call/test.yml | 3 +- tests/modules/diamond/blastp/main.nf | 4 +- tests/modules/diamond/blastp/nextflow.config | 9 ++ tests/modules/diamond/blastp/test.yml | 2 +- tests/modules/diamond/blastx/main.nf | 4 +- tests/modules/diamond/blastx/nextflow.config | 9 ++ tests/modules/diamond/blastx/test.yml | 2 +- tests/modules/diamond/makedb/main.nf | 2 +- tests/modules/diamond/makedb/nextflow.config | 5 + tests/modules/diamond/makedb/test.yml | 2 +- tests/modules/dragonflye/main.nf | 4 +- tests/modules/dragonflye/nextflow.config | 13 ++ tests/modules/dragonflye/test.yml | 4 +- tests/modules/dshbio/exportsegments/main.nf | 2 +- .../dshbio/exportsegments/nextflow.config | 5 + tests/modules/dshbio/exportsegments/test.yml | 2 +- tests/modules/dshbio/filterbed/main.nf | 2 +- .../modules/dshbio/filterbed/nextflow.config | 9 ++ tests/modules/dshbio/filterbed/test.yml | 2 +- tests/modules/dshbio/filtergff3/main.nf | 2 +- .../modules/dshbio/filtergff3/nextflow.config | 10 ++ tests/modules/dshbio/filtergff3/test.yml | 2 +- tests/modules/dshbio/splitbed/main.nf | 2 +- tests/modules/dshbio/splitbed/nextflow.config | 10 ++ tests/modules/dshbio/splitbed/test.yml | 2 +- tests/modules/dshbio/splitgff3/main.nf | 2 +- .../modules/dshbio/splitgff3/nextflow.config | 10 ++ tests/modules/dshbio/splitgff3/test.yml | 2 +- tests/modules/ectyper/main.nf | 10 +- tests/modules/ectyper/nextflow.config | 5 + tests/modules/ectyper/test.yml | 2 +- tests/modules/emmtyper/main.nf | 2 +- tests/modules/emmtyper/nextflow.config | 5 + tests/modules/emmtyper/test.yml | 2 +- tests/modules/ensemblvep/main.nf | 10 +- tests/modules/ensemblvep/nextflow.config | 10 ++ tests/modules/ensemblvep/test.yml | 2 +- tests/modules/expansionhunter/main.nf | 2 +- tests/modules/expansionhunter/nextflow.config | 5 + tests/modules/expansionhunter/test.yml | 2 +- tests/modules/fargene/main.nf | 2 +- tests/modules/fargene/nextflow.config | 5 + tests/modules/fargene/test.yml | 2 +- tests/modules/fastani/main.nf | 2 +- tests/modules/fastani/nextflow.config | 5 + tests/modules/fastani/test.yml | 2 +- tests/modules/fastp/main.nf | 2 +- tests/modules/fastp/nextflow.config | 5 + tests/modules/fastp/test.yml | 10 +- tests/modules/fastqc/main.nf | 2 +- tests/modules/fastqc/nextflow.config | 5 + tests/modules/fastqc/test.yml | 4 +- tests/modules/fastqscan/main.nf | 2 +- tests/modules/fastqscan/nextflow.config | 9 ++ tests/modules/fastqscan/test.yml | 2 +- tests/modules/fasttree/main.nf | 2 +- tests/modules/fasttree/nextflow.config | 5 + tests/modules/fasttree/test.yml | 2 +- .../fgbio/callmolecularconsensusreads/main.nf | 4 +- .../nextflow.config | 15 ++ .../callmolecularconsensusreads/test.yml | 2 +- tests/modules/fgbio/fastqtobam/main.nf | 17 ++- .../modules/fgbio/fastqtobam/nextflow.config | 5 + tests/modules/fgbio/fastqtobam/test.yml | 4 +- tests/modules/fgbio/groupreadsbyumi/main.nf | 9 +- .../fgbio/groupreadsbyumi/nextflow.config | 5 + tests/modules/fgbio/groupreadsbyumi/test.yml | 2 +- tests/modules/fgbio/sortbam/main.nf | 2 +- tests/modules/fgbio/sortbam/nextflow.config | 5 + tests/modules/fgbio/sortbam/test.yml | 2 +- tests/modules/filtlong/main.nf | 2 +- tests/modules/filtlong/nextflow.config | 5 + tests/modules/filtlong/test.yml | 6 +- tests/modules/flash/main.nf | 2 +- tests/modules/flash/nextflow.config | 9 ++ tests/modules/flash/test.yml | 2 +- tests/modules/freebayes/main.nf | 2 +- tests/modules/freebayes/nextflow.config | 5 + tests/modules/freebayes/test.yml | 10 +- tests/modules/gatk4/applybqsr/main.nf | 2 +- tests/modules/gatk4/applybqsr/nextflow.config | 5 + tests/modules/gatk4/applybqsr/test.yml | 6 +- tests/modules/gatk4/baserecalibrator/main.nf | 2 +- .../gatk4/baserecalibrator/nextflow.config | 5 + tests/modules/gatk4/baserecalibrator/test.yml | 8 +- tests/modules/gatk4/bedtointervallist/main.nf | 2 +- .../gatk4/bedtointervallist/nextflow.config | 5 + .../modules/gatk4/bedtointervallist/test.yml | 2 +- .../gatk4/calculatecontamination/main.nf | 2 +- .../calculatecontamination/nextflow.config | 5 + .../gatk4/calculatecontamination/test.yml | 6 +- .../gatk4/createsequencedictionary/main.nf | 2 +- .../createsequencedictionary/nextflow.config | 5 + .../gatk4/createsequencedictionary/test.yml | 2 +- .../gatk4/createsomaticpanelofnormals/main.nf | 4 +- .../nextflow.config | 9 ++ .../createsomaticpanelofnormals/test.yml | 2 +- .../gatk4/estimatelibrarycomplexity/main.nf | 2 +- .../estimatelibrarycomplexity/nextflow.config | 5 + .../gatk4/estimatelibrarycomplexity/test.yml | 2 +- tests/modules/gatk4/fastqtosam/main.nf | 2 +- .../modules/gatk4/fastqtosam/nextflow.config | 5 + tests/modules/gatk4/fastqtosam/test.yml | 4 +- tests/modules/gatk4/filtermutectcalls/main.nf | 2 +- .../gatk4/filtermutectcalls/nextflow.config | 9 ++ .../modules/gatk4/filtermutectcalls/test.yml | 6 +- tests/modules/gatk4/genomicsdbimport/main.nf | 4 +- .../gatk4/genomicsdbimport/nextflow.config | 5 + tests/modules/gatk4/genomicsdbimport/test.yml | 6 +- tests/modules/gatk4/genotypegvcfs/main.nf | 4 +- .../gatk4/genotypegvcfs/nextflow.config | 9 ++ tests/modules/gatk4/genotypegvcfs/test.yml | 18 +-- .../modules/gatk4/getpileupsummaries/main.nf | 2 +- .../gatk4/getpileupsummaries/nextflow.config | 5 + .../modules/gatk4/getpileupsummaries/test.yml | 4 +- tests/modules/gatk4/haplotypecaller/main.nf | 2 +- .../gatk4/haplotypecaller/nextflow.config | 5 + tests/modules/gatk4/haplotypecaller/test.yml | 6 +- tests/modules/gatk4/indexfeaturefile/main.nf | 2 +- .../gatk4/indexfeaturefile/nextflow.config | 5 + tests/modules/gatk4/indexfeaturefile/test.yml | 8 +- tests/modules/gatk4/intervallisttools/main.nf | 12 +- .../gatk4/intervallisttools/nextflow.config | 9 ++ .../modules/gatk4/intervallisttools/test.yml | 10 +- .../gatk4/learnreadorientationmodel/main.nf | 2 +- .../learnreadorientationmodel/nextflow.config | 9 ++ .../gatk4/learnreadorientationmodel/test.yml | 2 +- tests/modules/gatk4/markduplicates/main.nf | 2 +- .../gatk4/markduplicates/nextflow.config | 5 + tests/modules/gatk4/markduplicates/test.yml | 4 +- tests/modules/gatk4/mergebamalignment/main.nf | 2 +- .../gatk4/mergebamalignment/nextflow.config | 5 + .../modules/gatk4/mergebamalignment/test.yml | 2 +- tests/modules/gatk4/mergevcfs/main.nf | 2 +- tests/modules/gatk4/mergevcfs/nextflow.config | 5 + tests/modules/gatk4/mergevcfs/test.yml | 4 +- tests/modules/gatk4/mutect2/main.nf | 4 +- tests/modules/gatk4/mutect2/nextflow.config | 9 ++ tests/modules/gatk4/mutect2/test.yml | 10 +- tests/modules/gatk4/revertsam/main.nf | 2 +- tests/modules/gatk4/revertsam/nextflow.config | 5 + tests/modules/gatk4/revertsam/test.yml | 2 +- tests/modules/gatk4/samtofastq/main.nf | 2 +- .../modules/gatk4/samtofastq/nextflow.config | 5 + tests/modules/gatk4/samtofastq/test.yml | 4 +- tests/modules/gatk4/splitncigarreads/main.nf | 2 +- .../gatk4/splitncigarreads/nextflow.config | 5 + tests/modules/gatk4/splitncigarreads/test.yml | 2 +- tests/modules/gatk4/variantfiltration/main.nf | 35 ++--- .../gatk4/variantfiltration/nextflow.config | 10 ++ .../modules/gatk4/variantfiltration/test.yml | 4 +- tests/modules/genmap/index/main.nf | 2 +- tests/modules/genmap/index/nextflow.config | 5 + tests/modules/genmap/index/test.yml | 2 +- tests/modules/genmap/mappability/main.nf | 4 +- .../genmap/mappability/nextflow.config | 9 ++ tests/modules/genmap/mappability/test.yml | 2 +- tests/modules/genrich/main.nf | 8 +- tests/modules/genrich/nextflow.config | 21 +++ tests/modules/genrich/test.yml | 10 +- tests/modules/gffread/main.nf | 2 +- tests/modules/gffread/nextflow.config | 9 ++ tests/modules/gffread/test.yml | 2 +- tests/modules/glnexus/main.nf | 14 +- tests/modules/glnexus/nextflow.config | 5 + tests/modules/glnexus/test.yml | 4 +- tests/modules/graphmap2/align/main.nf | 4 +- tests/modules/graphmap2/align/nextflow.config | 5 + tests/modules/graphmap2/align/test.yml | 2 +- tests/modules/graphmap2/index/main.nf | 2 +- tests/modules/graphmap2/index/nextflow.config | 5 + tests/modules/graphmap2/index/test.yml | 2 +- tests/modules/gstama/collapse/main.nf | 2 +- tests/modules/gstama/collapse/nextflow.config | 10 ++ tests/modules/gstama/collapse/test.yml | 2 +- tests/modules/gstama/merge/main.nf | 2 +- tests/modules/gstama/merge/nextflow.config | 9 ++ tests/modules/gstama/merge/test.yml | 2 +- tests/modules/gtdbtk/classifywf/main.nf | 2 +- .../modules/gtdbtk/classifywf/nextflow.config | 5 + tests/modules/gtdbtk/classifywf/test.yml | 2 +- tests/modules/gubbins/main.nf | 2 +- tests/modules/gubbins/nextflow.config | 5 + tests/modules/gubbins/test.yml | 2 +- tests/modules/gunc/downloaddb/main.nf | 2 +- tests/modules/gunc/downloaddb/nextflow.config | 5 + tests/modules/gunc/downloaddb/test.yml | 2 +- tests/modules/gunc/run/main.nf | 14 +- tests/modules/gunc/run/nextflow.config | 5 + tests/modules/gunc/run/test.yml | 2 +- tests/modules/gunzip/main.nf | 2 +- tests/modules/gunzip/nextflow.config | 5 + tests/modules/gunzip/test.yml | 2 +- tests/modules/hicap/main.nf | 11 +- tests/modules/hicap/nextflow.config | 5 + tests/modules/hicap/test.yml | 8 +- tests/modules/hifiasm/main.nf | 2 +- tests/modules/hifiasm/nextflow.config | 9 ++ tests/modules/hifiasm/test.yml | 4 +- tests/modules/hisat2/align/main.nf | 26 ++-- tests/modules/hisat2/align/nextflow.config | 5 + tests/modules/hisat2/align/test.yml | 36 ++--- tests/modules/hisat2/build_test/main.nf | 4 +- .../modules/hisat2/build_test/nextflow.config | 5 + tests/modules/hisat2/build_test/test.yml | 18 +-- .../modules/hisat2/extractsplicesites/main.nf | 2 +- .../hisat2/extractsplicesites/nextflow.config | 5 + .../hisat2/extractsplicesites/test.yml | 2 +- tests/modules/hmmcopy/gccounter/main.nf | 2 +- .../modules/hmmcopy/gccounter/nextflow.config | 5 + tests/modules/hmmcopy/gccounter/test.yml | 2 +- tests/modules/hmmcopy/readcounter/main.nf | 2 +- .../hmmcopy/readcounter/nextflow.config | 5 + tests/modules/hmmcopy/readcounter/test.yml | 2 +- tests/modules/hmmer/hmmalign/main.nf | 2 +- tests/modules/hmmer/hmmalign/nextflow.config | 5 + tests/modules/hmmer/hmmalign/test.yml | 2 +- tests/modules/homer/annotatepeaks/main.nf | 2 +- .../homer/annotatepeaks/nextflow.config | 5 + tests/modules/homer/annotatepeaks/test.yml | 2 +- tests/modules/homer/findpeaks/main.nf | 4 +- tests/modules/homer/findpeaks/nextflow.config | 13 ++ tests/modules/homer/findpeaks/test.yml | 2 +- tests/modules/homer/maketagdirectory/main.nf | 2 +- .../homer/maketagdirectory/nextflow.config | 9 ++ tests/modules/homer/maketagdirectory/test.yml | 4 +- tests/modules/homer/makeucscfile/main.nf | 4 +- .../homer/makeucscfile/nextflow.config | 9 ++ tests/modules/homer/makeucscfile/test.yml | 2 +- tests/modules/idr/main.nf | 2 +- tests/modules/idr/nextflow.config | 5 + tests/modules/idr/test.yml | 6 +- tests/modules/imputeme/vcftoprs/main.nf | 2 +- .../modules/imputeme/vcftoprs/nextflow.config | 5 + tests/modules/imputeme/vcftoprs/test.yml | 2 +- tests/modules/iqtree/main.nf | 2 +- tests/modules/iqtree/nextflow.config | 5 + tests/modules/iqtree/test.yml | 2 +- tests/modules/ismapper/main.nf | 2 +- tests/modules/ismapper/nextflow.config | 5 + tests/modules/ismapper/test.yml | 2 +- tests/modules/isoseq3/cluster/main.nf | 2 +- tests/modules/isoseq3/cluster/nextflow.config | 9 ++ tests/modules/isoseq3/cluster/test.yml | 2 +- tests/modules/isoseq3/refine/main.nf | 2 +- tests/modules/isoseq3/refine/nextflow.config | 9 ++ tests/modules/isoseq3/refine/test.yml | 2 +- tests/modules/ivar/consensus/main.nf | 2 +- tests/modules/ivar/consensus/nextflow.config | 9 ++ tests/modules/ivar/consensus/test.yml | 2 +- tests/modules/ivar/trim/main.nf | 2 +- tests/modules/ivar/trim/nextflow.config | 5 + tests/modules/ivar/trim/test.yml | 2 +- tests/modules/ivar/variants/main.nf | 2 +- tests/modules/ivar/variants/nextflow.config | 5 + tests/modules/ivar/variants/test.yml | 6 +- tests/modules/jupyternotebook/main.nf | 12 +- tests/modules/jupyternotebook/nextflow.config | 19 +++ tests/modules/jupyternotebook/test.yml | 6 +- tests/modules/kallisto/index/main.nf | 2 +- tests/modules/kallisto/index/nextflow.config | 5 + tests/modules/kallisto/index/test.yml | 2 +- tests/modules/kallistobustools/count/main.nf | 2 +- .../kallistobustools/count/nextflow.config | 9 ++ tests/modules/kallistobustools/count/test.yml | 2 +- tests/modules/kallistobustools/ref/main.nf | 2 +- .../kallistobustools/ref/nextflow.config | 5 + tests/modules/kallistobustools/ref/test.yml | 6 +- tests/modules/khmer/normalizebymedian/main.nf | 6 +- .../khmer/normalizebymedian/nextflow.config | 9 ++ .../modules/khmer/normalizebymedian/test.yml | 10 +- tests/modules/kleborate/main.nf | 2 +- tests/modules/kleborate/nextflow.config | 5 + tests/modules/kleborate/test.yml | 2 +- tests/modules/kraken2/kraken2/main.nf | 4 +- tests/modules/kraken2/kraken2/nextflow.config | 5 + tests/modules/kraken2/kraken2/test.yml | 4 +- tests/modules/krona/kronadb/main.nf | 9 ++ tests/modules/krona/kronadb/nextflow.config | 5 + tests/modules/krona/kronadb/test.yml | 7 + tests/modules/krona/ktimporttaxonomy/main.nf | 16 +++ .../krona/ktimporttaxonomy/nextflow.config | 5 + tests/modules/krona/ktimporttaxonomy/test.yml | 9 ++ tests/modules/kronatools/kronadb/main.nf | 9 -- tests/modules/kronatools/kronadb/test.yml | 7 - .../kronatools/ktimporttaxonomy/main.nf | 15 -- .../kronatools/ktimporttaxonomy/test.yml | 9 -- tests/modules/last/dotplot/main.nf | 2 +- tests/modules/last/dotplot/nextflow.config | 5 + tests/modules/last/dotplot/test.yml | 2 +- tests/modules/last/lastal/main.nf | 4 +- tests/modules/last/lastal/nextflow.config | 5 + tests/modules/last/lastal/test.yml | 4 +- tests/modules/last/lastdb/main.nf | 2 +- tests/modules/last/lastdb/nextflow.config | 9 ++ tests/modules/last/lastdb/test.yml | 4 +- tests/modules/last/mafconvert/main.nf | 2 +- tests/modules/last/mafconvert/nextflow.config | 5 + tests/modules/last/mafconvert/test.yml | 2 +- tests/modules/last/mafswap/main.nf | 2 +- tests/modules/last/mafswap/nextflow.config | 5 + tests/modules/last/mafswap/test.yml | 2 +- tests/modules/last/postmask/main.nf | 2 +- tests/modules/last/postmask/nextflow.config | 9 ++ tests/modules/last/postmask/test.yml | 2 +- tests/modules/last/split/main.nf | 2 +- tests/modules/last/split/nextflow.config | 9 ++ tests/modules/last/split/test.yml | 2 +- tests/modules/last/train/main.nf | 4 +- tests/modules/last/train/nextflow.config | 5 + tests/modules/last/train/test.yml | 2 +- tests/modules/leehom/main.nf | 4 +- tests/modules/leehom/nextflow.config | 9 ++ tests/modules/leehom/test.yml | 10 +- tests/modules/lima/main.nf | 2 +- tests/modules/lima/nextflow.config | 10 ++ tests/modules/lima/test.yml | 10 +- tests/modules/lissero/main.nf | 2 +- tests/modules/lissero/nextflow.config | 5 + tests/modules/lissero/test.yml | 2 +- tests/modules/lofreq/call/main.nf | 2 +- tests/modules/lofreq/call/nextflow.config | 5 + tests/modules/lofreq/call/test.yml | 2 +- tests/modules/lofreq/callparallel/main.nf | 2 +- .../lofreq/callparallel/nextflow.config | 5 + tests/modules/lofreq/callparallel/test.yml | 2 +- tests/modules/lofreq/filter/main.nf | 2 +- tests/modules/lofreq/filter/nextflow.config | 5 + tests/modules/lofreq/filter/test.yml | 2 +- tests/modules/lofreq/indelqual/main.nf | 2 +- .../modules/lofreq/indelqual/nextflow.config | 10 ++ tests/modules/lofreq/indelqual/test.yml | 2 +- tests/modules/macs2/callpeak/main.nf | 6 +- tests/modules/macs2/callpeak/nextflow.config | 17 +++ tests/modules/macs2/callpeak/test.yml | 6 +- tests/modules/malt/build_test/main.nf | 4 +- tests/modules/malt/build_test/nextflow.config | 5 + tests/modules/malt/build_test/test.yml | 4 +- tests/modules/malt/run/main.nf | 6 +- tests/modules/malt/run/nextflow.config | 5 + tests/modules/malt/run/test.yml | 2 +- tests/modules/maltextract/main.nf | 10 +- tests/modules/maltextract/nextflow.config | 5 + tests/modules/maltextract/test.yml | 2 +- tests/modules/manta/germline/main.nf | 2 +- tests/modules/manta/germline/nextflow.config | 5 + tests/modules/manta/germline/test.yml | 4 +- tests/modules/manta/somatic/main.nf | 2 +- tests/modules/manta/somatic/nextflow.config | 5 + tests/modules/manta/somatic/test.yml | 2 +- tests/modules/manta/tumoronly/main.nf | 2 +- tests/modules/manta/tumoronly/nextflow.config | 5 + tests/modules/manta/tumoronly/test.yml | 4 +- tests/modules/mapdamage2/main.nf | 2 +- tests/modules/mapdamage2/nextflow.config | 5 + tests/modules/mapdamage2/test.yml | 2 +- tests/modules/mash/sketch/main.nf | 2 +- tests/modules/mash/sketch/nextflow.config | 5 + tests/modules/mash/sketch/test.yml | 2 +- tests/modules/mashtree/main.nf | 2 +- tests/modules/mashtree/nextflow.config | 5 + tests/modules/mashtree/test.yml | 2 +- tests/modules/maxbin2/main.nf | 2 +- tests/modules/maxbin2/nextflow.config | 5 + tests/modules/maxbin2/test.yml | 2 +- tests/modules/medaka/main.nf | 2 +- tests/modules/medaka/nextflow.config | 9 ++ tests/modules/medaka/test.yml | 2 +- tests/modules/megahit/main.nf | 2 +- tests/modules/megahit/nextflow.config | 5 + tests/modules/megahit/test.yml | 4 +- tests/modules/meningotype/main.nf | 2 +- tests/modules/meningotype/nextflow.config | 5 + tests/modules/meningotype/test.yml | 2 +- .../jgisummarizebamcontigdepths/main.nf | 2 +- .../nextflow.config | 5 + .../jgisummarizebamcontigdepths/test.yml | 2 +- tests/modules/metabat2/metabat2/main.nf | 4 +- .../modules/metabat2/metabat2/nextflow.config | 9 ++ tests/modules/metabat2/metabat2/test.yml | 4 +- tests/modules/metaphlan3/main.nf | 8 +- tests/modules/metaphlan3/nextflow.config | 13 ++ tests/modules/metaphlan3/test.yml | 8 +- tests/modules/methyldackel/extract/main.nf | 2 +- .../methyldackel/extract/nextflow.config | 5 + tests/modules/methyldackel/extract/test.yml | 2 +- tests/modules/methyldackel/mbias/main.nf | 2 +- .../methyldackel/mbias/nextflow.config | 5 + tests/modules/methyldackel/mbias/test.yml | 2 +- tests/modules/minia/main.nf | 2 +- tests/modules/minia/nextflow.config | 5 + tests/modules/minia/test.yml | 2 +- tests/modules/miniasm/main.nf | 2 +- tests/modules/miniasm/nextflow.config | 9 ++ tests/modules/miniasm/test.yml | 2 +- tests/modules/minimap2/align/main.nf | 2 +- tests/modules/minimap2/align/nextflow.config | 5 + tests/modules/minimap2/align/test.yml | 4 +- tests/modules/minimap2/index/main.nf | 2 +- tests/modules/minimap2/index/nextflow.config | 5 + tests/modules/minimap2/index/test.yml | 2 +- tests/modules/mlst/main.nf | 2 +- tests/modules/mlst/nextflow.config | 5 + tests/modules/mlst/test.yml | 2 +- tests/modules/mosdepth/main.nf | 2 +- tests/modules/mosdepth/nextflow.config | 5 + tests/modules/mosdepth/test.yml | 2 +- tests/modules/msisensor/msi/main.nf | 4 +- tests/modules/msisensor/msi/nextflow.config | 5 + tests/modules/msisensor/msi/test.yml | 2 +- tests/modules/msisensor/scan/main.nf | 2 +- tests/modules/msisensor/scan/nextflow.config | 5 + tests/modules/msisensor/scan/test.yml | 2 +- tests/modules/mtnucratio/main.nf | 2 +- tests/modules/mtnucratio/nextflow.config | 5 + tests/modules/mtnucratio/test.yml | 2 +- tests/modules/multiqc/main.nf | 4 +- tests/modules/multiqc/nextflow.config | 5 + tests/modules/multiqc/test.yml | 2 +- tests/modules/mummer/main.nf | 2 +- tests/modules/mummer/nextflow.config | 5 + tests/modules/mummer/test.yml | 2 +- tests/modules/muscle/main.nf | 4 +- tests/modules/muscle/nextflow.config | 13 ++ tests/modules/muscle/test.yml | 2 +- tests/modules/nanolyse/main.nf | 2 +- tests/modules/nanolyse/nextflow.config | 9 ++ tests/modules/nanolyse/test.yml | 2 +- tests/modules/nanoplot/main.nf | 2 +- tests/modules/nanoplot/nextflow.config | 5 + tests/modules/nanoplot/test.yml | 4 +- tests/modules/ncbigenomedownload/main.nf | 2 +- .../ncbigenomedownload/nextflow.config | 8 ++ tests/modules/ncbigenomedownload/test.yml | 2 +- tests/modules/nextclade/main.nf | 2 +- tests/modules/nextclade/nextflow.config | 5 + tests/modules/nextclade/test.yml | 2 +- tests/modules/ngmaster/main.nf | 2 +- tests/modules/ngmaster/nextflow.config | 5 + tests/modules/ngmaster/test.yml | 2 +- tests/modules/nucmer/main.nf | 2 +- tests/modules/nucmer/nextflow.config | 5 + tests/modules/nucmer/test.yml | 2 +- tests/modules/optitype/main.nf | 2 +- tests/modules/optitype/nextflow.config | 10 ++ tests/modules/optitype/test.yml | 2 +- tests/modules/pairix/main.nf | 2 +- tests/modules/pairix/nextflow.config | 5 + tests/modules/pairix/test.yml | 2 +- tests/modules/pairtools/dedup/main.nf | 2 +- tests/modules/pairtools/dedup/nextflow.config | 9 ++ tests/modules/pairtools/dedup/test.yml | 2 +- tests/modules/pairtools/flip/main.nf | 2 +- tests/modules/pairtools/flip/nextflow.config | 5 + tests/modules/pairtools/flip/test.yml | 2 +- tests/modules/pairtools/parse/main.nf | 2 +- tests/modules/pairtools/parse/nextflow.config | 9 ++ tests/modules/pairtools/parse/test.yml | 2 +- tests/modules/pairtools/restrict/main.nf | 2 +- .../pairtools/restrict/nextflow.config | 9 ++ tests/modules/pairtools/restrict/test.yml | 2 +- tests/modules/pairtools/select/main.nf | 2 +- .../modules/pairtools/select/nextflow.config | 9 ++ tests/modules/pairtools/select/test.yml | 2 +- tests/modules/pairtools/sort/main.nf | 2 +- tests/modules/pairtools/sort/nextflow.config | 9 ++ tests/modules/pairtools/sort/test.yml | 2 +- tests/modules/pangolin/main.nf | 2 +- tests/modules/pangolin/nextflow.config | 5 + tests/modules/pangolin/test.yml | 2 +- tests/modules/paraclu/main.nf | 2 +- tests/modules/paraclu/nextflow.config | 5 + tests/modules/paraclu/test.yml | 2 +- tests/modules/pbbam/pbmerge/main.nf | 2 +- tests/modules/pbbam/pbmerge/nextflow.config | 9 ++ tests/modules/pbbam/pbmerge/test.yml | 2 +- tests/modules/pbccs/main.nf | 2 +- tests/modules/pbccs/nextflow.config | 9 ++ tests/modules/pbccs/test.yml | 2 +- tests/modules/peddy/main.nf | 4 +- tests/modules/peddy/nextflow.config | 5 + tests/modules/peddy/test.yml | 2 +- tests/modules/phyloflash/main.nf | 12 +- tests/modules/phyloflash/nextflow.config | 5 + tests/modules/phyloflash/test.yml | 4 +- tests/modules/picard/collecthsmetrics/main.nf | 2 +- .../picard/collecthsmetrics/nextflow.config | 5 + .../modules/picard/collecthsmetrics/test.yml | 2 +- .../picard/collectmultiplemetrics/main.nf | 2 +- .../collectmultiplemetrics/nextflow.config | 5 + .../picard/collectmultiplemetrics/test.yml | 2 +- .../modules/picard/collectwgsmetrics/main.nf | 2 +- .../picard/collectwgsmetrics/nextflow.config | 5 + .../modules/picard/collectwgsmetrics/test.yml | 2 +- tests/modules/picard/filtersamreads/main.nf | 4 +- .../picard/filtersamreads/nextflow.config | 13 ++ tests/modules/picard/filtersamreads/test.yml | 4 +- tests/modules/picard/markduplicates/main.nf | 4 +- .../picard/markduplicates/nextflow.config | 9 ++ tests/modules/picard/markduplicates/test.yml | 4 +- tests/modules/picard/mergesamfiles/main.nf | 2 +- .../picard/mergesamfiles/nextflow.config | 5 + tests/modules/picard/mergesamfiles/test.yml | 2 +- tests/modules/picard/sortsam/main.nf | 2 +- tests/modules/picard/sortsam/nextflow.config | 9 ++ tests/modules/picard/sortsam/test.yml | 2 +- tests/modules/pirate/main.nf | 19 ++- tests/modules/pirate/nextflow.config | 5 + tests/modules/pirate/test.yml | 2 +- tests/modules/plasmidid/main.nf | 2 +- tests/modules/plasmidid/nextflow.config | 9 ++ tests/modules/plasmidid/test.yml | 2 +- tests/modules/plink/extract/main.nf | 4 +- tests/modules/plink/extract/nextflow.config | 13 ++ tests/modules/plink/extract/test.yml | 2 +- tests/modules/plink/vcf/main.nf | 2 +- tests/modules/plink/vcf/nextflow.config | 9 ++ tests/modules/plink/vcf/test.yml | 2 +- tests/modules/plink2/vcf/main.nf | 2 +- tests/modules/plink2/vcf/nextflow.config | 9 ++ tests/modules/plink2/vcf/test.yml | 2 +- tests/modules/pmdtools/filter/main.nf | 2 +- tests/modules/pmdtools/filter/nextflow.config | 5 + tests/modules/pmdtools/filter/test.yml | 2 +- tests/modules/porechop/main.nf | 2 +- tests/modules/porechop/nextflow.config | 10 ++ tests/modules/porechop/test.yml | 2 +- tests/modules/preseq/lcextrap/main.nf | 2 +- tests/modules/preseq/lcextrap/nextflow.config | 5 + tests/modules/preseq/lcextrap/test.yml | 4 +- tests/modules/prodigal/main.nf | 2 +- tests/modules/prodigal/nextflow.config | 5 + tests/modules/prodigal/test.yml | 2 +- tests/modules/prokka/main.nf | 2 +- tests/modules/prokka/nextflow.config | 5 + tests/modules/prokka/test.yml | 2 +- tests/modules/pycoqc/main.nf | 2 +- tests/modules/pycoqc/nextflow.config | 9 ++ tests/modules/pycoqc/test.yml | 2 +- tests/modules/pydamage/analyze/main.nf | 2 +- .../modules/pydamage/analyze/nextflow.config | 5 + tests/modules/pydamage/analyze/test.yml | 2 +- tests/modules/pydamage/filter/main.nf | 4 +- tests/modules/pydamage/filter/nextflow.config | 5 + tests/modules/pydamage/filter/test.yml | 4 +- tests/modules/qcat/main.nf | 2 +- tests/modules/qcat/nextflow.config | 5 + tests/modules/qcat/test.yml | 2 +- tests/modules/qualimap/bamqc/main.nf | 2 +- tests/modules/qualimap/bamqc/nextflow.config | 5 + tests/modules/qualimap/bamqc/test.yml | 2 +- tests/modules/quast/main.nf | 2 +- tests/modules/quast/nextflow.config | 5 + tests/modules/quast/test.yml | 4 +- tests/modules/racon/main.nf | 2 +- tests/modules/racon/nextflow.config | 5 + tests/modules/racon/test.yml | 2 +- tests/modules/rapidnj/main.nf | 2 +- tests/modules/rapidnj/nextflow.config | 5 + tests/modules/rapidnj/test.yml | 2 +- tests/modules/rasusa/main.nf | 2 +- tests/modules/rasusa/nextflow.config | 9 ++ tests/modules/rasusa/test.yml | 2 +- tests/modules/raxmlng/main.nf | 4 +- tests/modules/raxmlng/nextflow.config | 13 ++ tests/modules/raxmlng/test.yml | 4 +- tests/modules/rmarkdownnotebook/main.nf | 8 +- .../modules/rmarkdownnotebook/nextflow.config | 15 ++ tests/modules/rmarkdownnotebook/test.yml | 4 +- tests/modules/roary/main.nf | 15 +- tests/modules/roary/nextflow.config | 5 + tests/modules/roary/test.yml | 8 +- .../modules/rsem/calculateexpression/main.nf | 4 +- .../rsem/calculateexpression/nextflow.config | 13 ++ .../modules/rsem/calculateexpression/test.yml | 50 +++---- tests/modules/rsem/preparereference/main.nf | 2 +- .../rsem/preparereference/nextflow.config | 5 + tests/modules/rsem/preparereference/test.yml | 2 +- tests/modules/rseqc/bamstat/main.nf | 2 +- tests/modules/rseqc/bamstat/nextflow.config | 5 + tests/modules/rseqc/bamstat/test.yml | 2 +- tests/modules/rseqc/inferexperiment/main.nf | 2 +- .../rseqc/inferexperiment/nextflow.config | 5 + tests/modules/rseqc/inferexperiment/test.yml | 2 +- tests/modules/rseqc/innerdistance/main.nf | 2 +- .../rseqc/innerdistance/nextflow.config | 5 + tests/modules/rseqc/innerdistance/test.yml | 2 +- .../modules/rseqc/junctionannotation/main.nf | 2 +- .../rseqc/junctionannotation/nextflow.config | 5 + .../modules/rseqc/junctionannotation/test.yml | 2 +- .../modules/rseqc/junctionsaturation/main.nf | 2 +- .../rseqc/junctionsaturation/nextflow.config | 5 + .../modules/rseqc/junctionsaturation/test.yml | 2 +- tests/modules/rseqc/readdistribution/main.nf | 2 +- .../rseqc/readdistribution/nextflow.config | 5 + tests/modules/rseqc/readdistribution/test.yml | 2 +- tests/modules/rseqc/readduplication/main.nf | 2 +- .../rseqc/readduplication/nextflow.config | 5 + tests/modules/rseqc/readduplication/test.yml | 2 +- tests/modules/salmon/index/main.nf | 2 +- tests/modules/salmon/index/nextflow.config | 5 + tests/modules/salmon/index/test.yml | 2 +- tests/modules/salmon/quant/main.nf | 33 +++-- tests/modules/salmon/quant/nextflow.config | 9 ++ tests/modules/salmon/quant/test.yml | 92 ++++++------ tests/modules/samblaster/main.nf | 2 +- tests/modules/samblaster/nextflow.config | 10 ++ tests/modules/samblaster/test.yml | 2 +- tests/modules/samtools/ampliconclip/main.nf | 2 +- .../samtools/ampliconclip/nextflow.config | 5 + tests/modules/samtools/ampliconclip/test.yml | 8 +- tests/modules/samtools/bam2fq/main.nf | 2 +- tests/modules/samtools/bam2fq/nextflow.config | 9 ++ tests/modules/samtools/bam2fq/test.yml | 4 +- tests/modules/samtools/depth/main.nf | 2 +- tests/modules/samtools/depth/nextflow.config | 5 + tests/modules/samtools/depth/test.yml | 2 +- tests/modules/samtools/faidx/main.nf | 2 +- tests/modules/samtools/faidx/nextflow.config | 5 + tests/modules/samtools/faidx/test.yml | 2 +- tests/modules/samtools/fastq/main.nf | 2 +- tests/modules/samtools/fastq/nextflow.config | 5 + tests/modules/samtools/fastq/test.yml | 2 +- tests/modules/samtools/fixmate/main.nf | 2 +- .../modules/samtools/fixmate/nextflow.config | 9 ++ tests/modules/samtools/fixmate/test.yml | 2 +- tests/modules/samtools/flagstat/main.nf | 11 +- .../modules/samtools/flagstat/nextflow.config | 5 + tests/modules/samtools/flagstat/test.yml | 2 +- tests/modules/samtools/idxstats/main.nf | 2 +- .../modules/samtools/idxstats/nextflow.config | 5 + tests/modules/samtools/idxstats/test.yml | 2 +- tests/modules/samtools/index/main.nf | 6 +- tests/modules/samtools/index/nextflow.config | 9 ++ tests/modules/samtools/index/test.yml | 6 +- tests/modules/samtools/merge/main.nf | 2 +- tests/modules/samtools/merge/nextflow.config | 9 ++ tests/modules/samtools/merge/test.yml | 4 +- tests/modules/samtools/mpileup/main.nf | 2 +- .../modules/samtools/mpileup/nextflow.config | 5 + tests/modules/samtools/mpileup/test.yml | 2 +- tests/modules/samtools/sort/main.nf | 2 +- tests/modules/samtools/sort/nextflow.config | 9 ++ tests/modules/samtools/sort/test.yml | 2 +- tests/modules/samtools/stats/main.nf | 2 +- tests/modules/samtools/stats/nextflow.config | 5 + tests/modules/samtools/stats/test.yml | 4 +- tests/modules/samtools/view/main.nf | 2 +- tests/modules/samtools/view/nextflow.config | 5 + tests/modules/samtools/view/test.yml | 4 +- tests/modules/scoary/main.nf | 2 +- tests/modules/scoary/nextflow.config | 5 + tests/modules/scoary/test.yml | 2 +- tests/modules/seacr/callpeak/main.nf | 2 +- tests/modules/seacr/callpeak/nextflow.config | 9 ++ tests/modules/seacr/callpeak/test.yml | 4 +- tests/modules/seqkit/split2/main.nf | 6 +- tests/modules/seqkit/split2/nextflow.config | 17 +++ tests/modules/seqkit/split2/test.yml | 12 +- tests/modules/seqsero2/main.nf | 2 +- tests/modules/seqsero2/nextflow.config | 9 ++ tests/modules/seqsero2/test.yml | 2 +- tests/modules/seqtk/mergepe/main.nf | 2 +- tests/modules/seqtk/mergepe/nextflow.config | 9 ++ tests/modules/seqtk/mergepe/test.yml | 4 +- tests/modules/seqtk/sample/main.nf | 2 +- tests/modules/seqtk/sample/nextflow.config | 10 ++ tests/modules/seqtk/sample/test.yml | 4 +- tests/modules/seqtk/subseq/main.nf | 2 +- tests/modules/seqtk/subseq/nextflow.config | 9 ++ tests/modules/seqtk/subseq/test.yml | 2 +- tests/modules/sequenzautils/bam2seqz/main.nf | 2 +- .../sequenzautils/bam2seqz/nextflow.config | 5 + tests/modules/sequenzautils/bam2seqz/test.yml | 2 +- tests/modules/sequenzautils/gcwiggle/main.nf | 2 +- .../sequenzautils/gcwiggle/nextflow.config | 9 ++ tests/modules/sequenzautils/gcwiggle/test.yml | 4 +- tests/modules/seqwish/induce/main.nf | 2 +- tests/modules/seqwish/induce/nextflow.config | 5 + tests/modules/seqwish/induce/test.yml | 2 +- tests/modules/shovill/main.nf | 8 +- tests/modules/shovill/nextflow.config | 21 +++ tests/modules/shovill/test.yml | 8 +- tests/modules/snpdists/main.nf | 2 +- tests/modules/snpdists/nextflow.config | 5 + tests/modules/snpdists/test.yml | 2 +- tests/modules/snpeff/main.nf | 10 +- tests/modules/snpeff/nextflow.config | 10 ++ tests/modules/snpeff/test.yml | 2 +- tests/modules/snpsites/main.nf | 2 +- tests/modules/snpsites/nextflow.config | 5 + tests/modules/snpsites/test.yml | 2 +- tests/modules/spades/main.nf | 2 +- tests/modules/spades/nextflow.config | 9 ++ tests/modules/spades/test.yml | 8 +- tests/modules/spatyper/main.nf | 4 +- tests/modules/spatyper/nextflow.config | 9 ++ tests/modules/spatyper/test.yml | 4 +- tests/modules/sratools/fasterqdump/main.nf | 2 +- .../sratools/fasterqdump/nextflow.config | 5 + tests/modules/sratools/fasterqdump/test.yml | 10 +- tests/modules/sratools/prefetch/main.nf | 2 +- .../modules/sratools/prefetch/nextflow.config | 5 + tests/modules/sratools/prefetch/test.yml | 2 +- tests/modules/staphopiasccmec/main.nf | 4 +- tests/modules/staphopiasccmec/nextflow.config | 9 ++ tests/modules/staphopiasccmec/test.yml | 4 +- tests/modules/star/align/main.nf | 72 +++++++--- tests/modules/star/align/nextflow.config | 21 +++ tests/modules/star/align/test.yml | 136 +++++++++--------- tests/modules/star/genomegenerate/main.nf | 2 +- .../star/genomegenerate/nextflow.config | 5 + tests/modules/star/genomegenerate/test.yml | 2 +- tests/modules/strelka/germline/main.nf | 2 +- .../modules/strelka/germline/nextflow.config | 5 + tests/modules/strelka/germline/test.yml | 4 +- tests/modules/strelka/somatic/main.nf | 2 +- tests/modules/strelka/somatic/nextflow.config | 5 + tests/modules/strelka/somatic/test.yml | 4 +- tests/modules/stringtie/merge/main.nf | 4 +- tests/modules/stringtie/merge/nextflow.config | 5 + tests/modules/stringtie/merge/test.yml | 4 +- tests/modules/stringtie/stringtie/main.nf | 2 +- .../stringtie/stringtie/nextflow.config | 5 + tests/modules/stringtie/stringtie/test.yml | 4 +- tests/modules/subread/featurecounts/main.nf | 2 +- .../subread/featurecounts/nextflow.config | 9 ++ tests/modules/subread/featurecounts/test.yml | 6 +- tests/modules/tabix/bgzip/main.nf | 2 +- tests/modules/tabix/bgzip/nextflow.config | 5 + tests/modules/tabix/bgzip/test.yml | 2 +- tests/modules/tabix/bgziptabix/main.nf | 2 +- .../modules/tabix/bgziptabix/nextflow.config | 9 ++ tests/modules/tabix/bgziptabix/test.yml | 2 +- tests/modules/tabix/tabix/main.nf | 6 +- tests/modules/tabix/tabix/nextflow.config | 17 +++ tests/modules/tabix/tabix/test.yml | 6 +- tests/modules/tbprofiler/profile/main.nf | 28 ++-- .../tbprofiler/profile/nextflow.config | 13 ++ tests/modules/tbprofiler/profile/test.yml | 4 +- tests/modules/tiddit/cov/main.nf | 2 +- tests/modules/tiddit/cov/nextflow.config | 5 + tests/modules/tiddit/cov/test.yml | 4 +- tests/modules/tiddit/sv/main.nf | 2 +- tests/modules/tiddit/sv/nextflow.config | 5 + tests/modules/tiddit/sv/test.yml | 4 +- tests/modules/trimgalore/main.nf | 2 +- tests/modules/trimgalore/nextflow.config | 5 + tests/modules/trimgalore/test.yml | 4 +- tests/modules/ucsc/bed12tobigbed/main.nf | 2 +- .../ucsc/bed12tobigbed/nextflow.config | 5 + tests/modules/ucsc/bed12tobigbed/test.yml | 2 +- tests/modules/ucsc/bedclip/main.nf | 2 +- tests/modules/ucsc/bedclip/nextflow.config | 9 ++ tests/modules/ucsc/bedclip/test.yml | 2 +- tests/modules/ucsc/bedgraphtobigwig/main.nf | 2 +- .../ucsc/bedgraphtobigwig/nextflow.config | 5 + tests/modules/ucsc/bedgraphtobigwig/test.yml | 2 +- .../modules/ucsc/bigwigaverageoverbed/main.nf | 2 +- .../ucsc/bigwigaverageoverbed/nextflow.config | 5 + .../ucsc/bigwigaverageoverbed/test.yml | 2 +- tests/modules/ucsc/liftover/main.nf | 2 +- tests/modules/ucsc/liftover/nextflow.config | 5 + tests/modules/ucsc/liftover/test.yml | 2 +- tests/modules/ucsc/wigtobigwig/main.nf | 2 +- .../modules/ucsc/wigtobigwig/nextflow.config | 5 + tests/modules/ucsc/wigtobigwig/test.yml | 2 +- tests/modules/ultra/pipeline/main.nf | 6 +- tests/modules/ultra/pipeline/nextflow.config | 10 ++ tests/modules/ultra/pipeline/test.yml | 2 +- tests/modules/unicycler/main.nf | 2 +- tests/modules/unicycler/nextflow.config | 5 + tests/modules/unicycler/test.yml | 6 +- tests/modules/untar/main.nf | 2 +- tests/modules/untar/nextflow.config | 5 + tests/modules/untar/test.yml | 2 +- tests/modules/unzip/main.nf | 2 +- tests/modules/unzip/nextflow.config | 5 + tests/modules/unzip/test.yml | 2 +- tests/modules/variantbam/main.nf | 2 +- tests/modules/variantbam/nextflow.config | 9 ++ tests/modules/variantbam/test.yml | 2 +- tests/modules/vcftools/main.nf | 4 +- tests/modules/vcftools/nextflow.config | 13 ++ tests/modules/vcftools/test.yml | 8 +- tests/modules/yara/index/main.nf | 2 +- tests/modules/yara/index/nextflow.config | 5 + tests/modules/yara/index/test.yml | 2 +- tests/modules/yara/mapper/main.nf | 24 ++-- tests/modules/yara/mapper/nextflow.config | 13 ++ tests/modules/yara/mapper/test.yml | 56 ++++---- .../nf-core/align_bowtie2/test.yml | 14 +- .../nf-core/bam_sort_samtools/test.yml | 22 +-- .../nf-core/bam_stats_samtools/test.yml | 24 ++-- .../nf-core/gatk_create_som_pon/test.yml | 5 +- .../test.yml | 3 +- .../test.yml | 3 +- tests/subworkflows/nf-core/sra_fastq/test.yml | 18 +-- 1803 files changed, 7649 insertions(+), 36540 deletions(-) delete mode 100644 modules/abacas/functions.nf delete mode 100644 modules/adapterremoval/functions.nf delete mode 100644 modules/agrvate/functions.nf delete mode 100644 modules/allelecounter/functions.nf delete mode 100644 modules/amps/functions.nf delete mode 100644 modules/arriba/functions.nf delete mode 100644 modules/artic/guppyplex/functions.nf delete mode 100644 modules/artic/minion/functions.nf delete mode 100644 modules/assemblyscan/functions.nf delete mode 100644 modules/ataqv/ataqv/functions.nf delete mode 100644 modules/bakta/functions.nf delete mode 100644 modules/bamaligncleaner/functions.nf delete mode 100644 modules/bamtools/split/functions.nf delete mode 100644 modules/bamutil/trimbam/functions.nf delete mode 100644 modules/bandage/image/functions.nf delete mode 100644 modules/bbmap/align/functions.nf delete mode 100644 modules/bbmap/bbduk/functions.nf delete mode 100644 modules/bbmap/bbsplit/functions.nf delete mode 100644 modules/bbmap/index/functions.nf delete mode 100644 modules/bcftools/concat/functions.nf delete mode 100644 modules/bcftools/consensus/functions.nf delete mode 100644 modules/bcftools/filter/functions.nf delete mode 100644 modules/bcftools/index/functions.nf delete mode 100644 modules/bcftools/isec/functions.nf delete mode 100644 modules/bcftools/merge/functions.nf delete mode 100644 modules/bcftools/mpileup/functions.nf delete mode 100644 modules/bcftools/norm/functions.nf delete mode 100644 modules/bcftools/query/functions.nf delete mode 100644 modules/bcftools/reheader/functions.nf delete mode 100644 modules/bcftools/stats/functions.nf delete mode 100644 modules/bcftools/view/functions.nf delete mode 100644 modules/bedtools/bamtobed/functions.nf delete mode 100644 modules/bedtools/complement/functions.nf delete mode 100644 modules/bedtools/genomecov/functions.nf delete mode 100644 modules/bedtools/getfasta/functions.nf delete mode 100644 modules/bedtools/intersect/functions.nf delete mode 100644 modules/bedtools/makewindows/functions.nf delete mode 100644 modules/bedtools/maskfasta/functions.nf delete mode 100644 modules/bedtools/merge/functions.nf delete mode 100644 modules/bedtools/slop/functions.nf delete mode 100644 modules/bedtools/sort/functions.nf delete mode 100644 modules/bedtools/subtract/functions.nf delete mode 100644 modules/bismark/align/functions.nf delete mode 100644 modules/bismark/deduplicate/functions.nf delete mode 100644 modules/bismark/genomepreparation/functions.nf delete mode 100644 modules/bismark/methylationextractor/functions.nf delete mode 100644 modules/bismark/report/functions.nf delete mode 100644 modules/bismark/summary/functions.nf delete mode 100644 modules/blast/blastn/functions.nf delete mode 100644 modules/blast/makeblastdb/functions.nf delete mode 100644 modules/bowtie/align/functions.nf delete mode 100644 modules/bowtie/build/functions.nf delete mode 100644 modules/bowtie2/align/functions.nf delete mode 100644 modules/bowtie2/build/functions.nf delete mode 100644 modules/bwa/aln/functions.nf delete mode 100644 modules/bwa/index/functions.nf delete mode 100644 modules/bwa/mem/functions.nf delete mode 100644 modules/bwa/sampe/functions.nf delete mode 100644 modules/bwa/samse/functions.nf delete mode 100644 modules/bwamem2/index/functions.nf delete mode 100644 modules/bwamem2/mem/functions.nf delete mode 100644 modules/bwameth/align/functions.nf delete mode 100644 modules/bwameth/index/functions.nf delete mode 100644 modules/cat/cat/functions.nf delete mode 100644 modules/cat/fastq/functions.nf delete mode 100644 modules/cellranger/mkref/functions.nf delete mode 100644 modules/checkm/lineagewf/functions.nf delete mode 100644 modules/chromap/chromap/functions.nf delete mode 100644 modules/chromap/index/functions.nf delete mode 100644 modules/clonalframeml/functions.nf delete mode 100644 modules/cmseq/polymut/functions.nf delete mode 100755 modules/cnvkit/batch/functions.nf delete mode 100644 modules/cooler/cload/functions.nf delete mode 100644 modules/cooler/digest/functions.nf delete mode 100644 modules/cooler/dump/functions.nf delete mode 100644 modules/cooler/merge/functions.nf delete mode 100644 modules/cooler/zoomify/functions.nf delete mode 100644 modules/csvtk/concat/functions.nf delete mode 100644 modules/csvtk/split/functions.nf delete mode 100644 modules/custom/dumpsoftwareversions/functions.nf create mode 100644 modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py delete mode 100644 modules/custom/getchromsizes/functions.nf delete mode 100644 modules/cutadapt/functions.nf delete mode 100644 modules/damageprofiler/functions.nf delete mode 100644 modules/dastool/dastool/functions.nf delete mode 100644 modules/dastool/scaffolds2bin/functions.nf delete mode 100644 modules/dedup/functions.nf delete mode 100644 modules/deeptools/computematrix/functions.nf delete mode 100644 modules/deeptools/plotfingerprint/functions.nf delete mode 100644 modules/deeptools/plotheatmap/functions.nf delete mode 100644 modules/deeptools/plotprofile/functions.nf delete mode 100644 modules/delly/call/functions.nf delete mode 100644 modules/diamond/blastp/functions.nf delete mode 100644 modules/diamond/blastx/functions.nf delete mode 100644 modules/diamond/makedb/functions.nf delete mode 100644 modules/dragonflye/functions.nf delete mode 100644 modules/dshbio/exportsegments/functions.nf delete mode 100644 modules/dshbio/filterbed/functions.nf delete mode 100644 modules/dshbio/filtergff3/functions.nf delete mode 100644 modules/dshbio/splitbed/functions.nf delete mode 100644 modules/dshbio/splitgff3/functions.nf delete mode 100644 modules/ectyper/functions.nf delete mode 100644 modules/emmtyper/functions.nf delete mode 100644 modules/ensemblvep/functions.nf delete mode 100644 modules/expansionhunter/functions.nf delete mode 100644 modules/fargene/functions.nf delete mode 100644 modules/fastani/functions.nf delete mode 100644 modules/fastp/functions.nf delete mode 100644 modules/fastqc/functions.nf delete mode 100644 modules/fastqscan/functions.nf delete mode 100644 modules/fasttree/functions.nf delete mode 100644 modules/fgbio/callmolecularconsensusreads/functions.nf delete mode 100644 modules/fgbio/fastqtobam/functions.nf delete mode 100644 modules/fgbio/groupreadsbyumi/functions.nf delete mode 100644 modules/fgbio/sortbam/functions.nf delete mode 100644 modules/filtlong/functions.nf delete mode 100644 modules/flash/functions.nf delete mode 100644 modules/freebayes/functions.nf delete mode 100644 modules/gatk4/applybqsr/functions.nf delete mode 100644 modules/gatk4/baserecalibrator/functions.nf delete mode 100644 modules/gatk4/bedtointervallist/functions.nf delete mode 100644 modules/gatk4/calculatecontamination/functions.nf delete mode 100644 modules/gatk4/createsequencedictionary/functions.nf delete mode 100644 modules/gatk4/createsomaticpanelofnormals/functions.nf delete mode 100644 modules/gatk4/estimatelibrarycomplexity/functions.nf delete mode 100644 modules/gatk4/fastqtosam/functions.nf delete mode 100644 modules/gatk4/filtermutectcalls/functions.nf delete mode 100644 modules/gatk4/genomicsdbimport/functions.nf delete mode 100644 modules/gatk4/genotypegvcfs/functions.nf delete mode 100644 modules/gatk4/getpileupsummaries/functions.nf delete mode 100644 modules/gatk4/haplotypecaller/functions.nf delete mode 100644 modules/gatk4/indexfeaturefile/functions.nf delete mode 100644 modules/gatk4/intervallisttools/functions.nf delete mode 100644 modules/gatk4/learnreadorientationmodel/functions.nf delete mode 100644 modules/gatk4/markduplicates/functions.nf delete mode 100644 modules/gatk4/mergebamalignment/functions.nf delete mode 100644 modules/gatk4/mergevcfs/functions.nf delete mode 100644 modules/gatk4/mutect2/functions.nf delete mode 100644 modules/gatk4/revertsam/functions.nf delete mode 100644 modules/gatk4/samtofastq/functions.nf delete mode 100644 modules/gatk4/splitncigarreads/functions.nf delete mode 100644 modules/gatk4/variantfiltration/functions.nf delete mode 100644 modules/genmap/index/functions.nf delete mode 100644 modules/genmap/mappability/functions.nf delete mode 100644 modules/genrich/functions.nf delete mode 100644 modules/gffread/functions.nf delete mode 100644 modules/glnexus/functions.nf delete mode 100644 modules/graphmap2/align/functions.nf delete mode 100644 modules/graphmap2/index/functions.nf delete mode 100644 modules/gstama/collapse/functions.nf delete mode 100644 modules/gstama/merge/functions.nf delete mode 100644 modules/gtdbtk/classifywf/functions.nf delete mode 100644 modules/gubbins/functions.nf delete mode 100644 modules/gunc/downloaddb/functions.nf delete mode 100644 modules/gunc/run/functions.nf delete mode 100644 modules/gunzip/functions.nf delete mode 100644 modules/gunzip/test.txt.gz delete mode 100644 modules/hicap/functions.nf delete mode 100644 modules/hifiasm/functions.nf delete mode 100644 modules/hisat2/align/functions.nf delete mode 100644 modules/hisat2/build/functions.nf delete mode 100644 modules/hisat2/extractsplicesites/functions.nf delete mode 100644 modules/hmmcopy/gccounter/functions.nf delete mode 100644 modules/hmmcopy/readcounter/functions.nf delete mode 100644 modules/hmmer/hmmalign/functions.nf delete mode 100644 modules/homer/annotatepeaks/functions.nf delete mode 100644 modules/homer/findpeaks/functions.nf delete mode 100644 modules/homer/maketagdirectory/functions.nf delete mode 100644 modules/homer/makeucscfile/functions.nf delete mode 100644 modules/idr/functions.nf delete mode 100644 modules/imputeme/vcftoprs/functions.nf delete mode 100644 modules/iqtree/functions.nf delete mode 100644 modules/ismapper/functions.nf delete mode 100644 modules/isoseq3/cluster/functions.nf delete mode 100644 modules/isoseq3/refine/functions.nf delete mode 100644 modules/ivar/consensus/functions.nf delete mode 100644 modules/ivar/trim/functions.nf delete mode 100644 modules/ivar/variants/functions.nf delete mode 100644 modules/jupyternotebook/functions.nf delete mode 100644 modules/kallisto/index/functions.nf delete mode 100644 modules/kallistobustools/count/functions.nf delete mode 100644 modules/kallistobustools/ref/functions.nf delete mode 100644 modules/khmer/normalizebymedian/functions.nf delete mode 100644 modules/kleborate/functions.nf delete mode 100644 modules/kraken2/kraken2/functions.nf create mode 100644 modules/krona/kronadb/main.nf rename modules/{kronatools => krona}/kronadb/meta.yml (95%) create mode 100644 modules/krona/ktimporttaxonomy/main.nf rename modules/{kronatools => krona}/ktimporttaxonomy/meta.yml (95%) delete mode 100644 modules/kronatools/kronadb/functions.nf delete mode 100644 modules/kronatools/kronadb/main.nf delete mode 100644 modules/kronatools/ktimporttaxonomy/functions.nf delete mode 100644 modules/kronatools/ktimporttaxonomy/main.nf delete mode 100644 modules/last/dotplot/functions.nf delete mode 100644 modules/last/lastal/functions.nf delete mode 100644 modules/last/lastdb/functions.nf delete mode 100644 modules/last/mafconvert/functions.nf delete mode 100644 modules/last/mafswap/functions.nf delete mode 100644 modules/last/postmask/functions.nf delete mode 100644 modules/last/split/functions.nf delete mode 100644 modules/last/train/functions.nf delete mode 100644 modules/leehom/functions.nf delete mode 100644 modules/lib/functions.nf delete mode 100644 modules/lima/functions.nf delete mode 100644 modules/lissero/functions.nf delete mode 100644 modules/lofreq/call/functions.nf delete mode 100644 modules/lofreq/callparallel/functions.nf delete mode 100644 modules/lofreq/filter/functions.nf delete mode 100644 modules/lofreq/indelqual/functions.nf delete mode 100644 modules/macs2/callpeak/functions.nf delete mode 100644 modules/malt/build/functions.nf delete mode 100644 modules/malt/run/functions.nf delete mode 100644 modules/maltextract/functions.nf delete mode 100644 modules/manta/germline/functions.nf delete mode 100644 modules/manta/somatic/functions.nf delete mode 100644 modules/manta/tumoronly/functions.nf delete mode 100644 modules/mapdamage2/functions.nf delete mode 100644 modules/mash/sketch/functions.nf delete mode 100644 modules/mashtree/functions.nf delete mode 100644 modules/maxbin2/functions.nf delete mode 100644 modules/medaka/functions.nf delete mode 100644 modules/megahit/functions.nf delete mode 100644 modules/meningotype/functions.nf delete mode 100644 modules/metabat2/jgisummarizebamcontigdepths/functions.nf delete mode 100644 modules/metabat2/metabat2/functions.nf delete mode 100644 modules/metaphlan3/functions.nf delete mode 100644 modules/methyldackel/extract/functions.nf delete mode 100644 modules/methyldackel/mbias/functions.nf delete mode 100644 modules/minia/functions.nf delete mode 100644 modules/miniasm/functions.nf delete mode 100644 modules/minimap2/align/functions.nf delete mode 100644 modules/minimap2/index/functions.nf delete mode 100644 modules/mlst/functions.nf delete mode 100644 modules/mosdepth/functions.nf delete mode 100644 modules/msisensor/msi/functions.nf delete mode 100644 modules/msisensor/scan/functions.nf delete mode 100644 modules/mtnucratio/functions.nf delete mode 100644 modules/multiqc/functions.nf delete mode 100644 modules/mummer/functions.nf delete mode 100644 modules/muscle/functions.nf delete mode 100644 modules/nanolyse/functions.nf delete mode 100644 modules/nanoplot/functions.nf delete mode 100644 modules/ncbigenomedownload/functions.nf delete mode 100755 modules/nextclade/functions.nf delete mode 100644 modules/ngmaster/functions.nf delete mode 100644 modules/nucmer/functions.nf delete mode 100644 modules/optitype/functions.nf delete mode 100644 modules/pairix/functions.nf delete mode 100644 modules/pairtools/dedup/functions.nf delete mode 100644 modules/pairtools/flip/functions.nf delete mode 100644 modules/pairtools/parse/functions.nf delete mode 100644 modules/pairtools/restrict/functions.nf delete mode 100644 modules/pairtools/select/functions.nf delete mode 100644 modules/pairtools/sort/functions.nf delete mode 100644 modules/pangolin/functions.nf delete mode 100644 modules/paraclu/functions.nf delete mode 100644 modules/pbbam/pbmerge/functions.nf delete mode 100644 modules/pbccs/functions.nf delete mode 100644 modules/peddy/functions.nf delete mode 100644 modules/phantompeakqualtools/functions.nf delete mode 100644 modules/phyloflash/functions.nf delete mode 100644 modules/picard/collecthsmetrics/functions.nf delete mode 100644 modules/picard/collectmultiplemetrics/functions.nf delete mode 100644 modules/picard/collectwgsmetrics/functions.nf delete mode 100644 modules/picard/filtersamreads/functions.nf delete mode 100644 modules/picard/markduplicates/functions.nf delete mode 100644 modules/picard/mergesamfiles/functions.nf delete mode 100644 modules/picard/sortsam/functions.nf delete mode 100644 modules/pirate/functions.nf delete mode 100644 modules/plasmidid/functions.nf delete mode 100644 modules/plink/extract/functions.nf delete mode 100644 modules/plink/vcf/functions.nf delete mode 100644 modules/plink2/vcf/functions.nf delete mode 100644 modules/pmdtools/filter/functions.nf delete mode 100644 modules/porechop/functions.nf delete mode 100644 modules/preseq/lcextrap/functions.nf delete mode 100644 modules/prodigal/functions.nf delete mode 100644 modules/prokka/functions.nf delete mode 100644 modules/pycoqc/functions.nf delete mode 100644 modules/pydamage/analyze/functions.nf delete mode 100644 modules/pydamage/filter/functions.nf delete mode 100644 modules/qcat/functions.nf delete mode 100644 modules/qualimap/bamqc/functions.nf delete mode 100644 modules/qualimap/rnaseq/functions.nf delete mode 100644 modules/quast/functions.nf delete mode 100644 modules/racon/functions.nf delete mode 100644 modules/rapidnj/functions.nf delete mode 100644 modules/rasusa/functions.nf delete mode 100644 modules/raxmlng/functions.nf delete mode 100644 modules/rmarkdownnotebook/functions.nf delete mode 100644 modules/roary/functions.nf delete mode 100644 modules/rsem/calculateexpression/functions.nf delete mode 100644 modules/rsem/preparereference/functions.nf delete mode 100644 modules/rseqc/bamstat/functions.nf delete mode 100644 modules/rseqc/inferexperiment/functions.nf delete mode 100644 modules/rseqc/innerdistance/functions.nf delete mode 100644 modules/rseqc/junctionannotation/functions.nf delete mode 100644 modules/rseqc/junctionsaturation/functions.nf delete mode 100644 modules/rseqc/readdistribution/functions.nf delete mode 100644 modules/rseqc/readduplication/functions.nf delete mode 100644 modules/salmon/index/functions.nf delete mode 100644 modules/salmon/quant/functions.nf delete mode 100644 modules/samblaster/functions.nf delete mode 100644 modules/samtools/ampliconclip/functions.nf delete mode 100644 modules/samtools/bam2fq/functions.nf delete mode 100644 modules/samtools/depth/functions.nf delete mode 100644 modules/samtools/faidx/functions.nf delete mode 100644 modules/samtools/fastq/functions.nf delete mode 100644 modules/samtools/fixmate/functions.nf delete mode 100644 modules/samtools/flagstat/functions.nf delete mode 100644 modules/samtools/idxstats/functions.nf delete mode 100644 modules/samtools/index/functions.nf delete mode 100644 modules/samtools/merge/functions.nf delete mode 100644 modules/samtools/mpileup/functions.nf delete mode 100644 modules/samtools/sort/functions.nf delete mode 100644 modules/samtools/stats/functions.nf delete mode 100644 modules/samtools/view/functions.nf delete mode 100644 modules/scoary/functions.nf delete mode 100644 modules/seacr/callpeak/functions.nf delete mode 100644 modules/seqkit/split2/functions.nf delete mode 100644 modules/seqsero2/functions.nf delete mode 100644 modules/seqtk/mergepe/functions.nf delete mode 100644 modules/seqtk/sample/functions.nf delete mode 100644 modules/seqtk/subseq/functions.nf delete mode 100755 modules/sequenzautils/bam2seqz/functions.nf delete mode 100755 modules/sequenzautils/gcwiggle/functions.nf delete mode 100644 modules/seqwish/induce/functions.nf delete mode 100644 modules/shovill/functions.nf delete mode 100644 modules/snpdists/functions.nf delete mode 100644 modules/snpeff/functions.nf delete mode 100644 modules/snpsites/functions.nf delete mode 100644 modules/sortmerna/functions.nf delete mode 100644 modules/spades/functions.nf delete mode 100644 modules/spatyper/functions.nf delete mode 100644 modules/sratools/fasterqdump/functions.nf delete mode 100644 modules/sratools/prefetch/functions.nf delete mode 100644 modules/staphopiasccmec/functions.nf delete mode 100644 modules/star/align/functions.nf delete mode 100644 modules/star/genomegenerate/functions.nf delete mode 100644 modules/strelka/germline/functions.nf delete mode 100644 modules/strelka/somatic/functions.nf delete mode 100644 modules/stringtie/merge/functions.nf delete mode 100644 modules/stringtie/stringtie/functions.nf delete mode 100644 modules/subread/featurecounts/functions.nf delete mode 100644 modules/tabix/bgzip/functions.nf delete mode 100644 modules/tabix/bgziptabix/functions.nf delete mode 100644 modules/tabix/tabix/functions.nf delete mode 100644 modules/tbprofiler/profile/functions.nf delete mode 100644 modules/tiddit/cov/functions.nf delete mode 100644 modules/tiddit/sv/functions.nf delete mode 100644 modules/trimgalore/functions.nf delete mode 100644 modules/ucsc/bed12tobigbed/functions.nf delete mode 100755 modules/ucsc/bedclip/functions.nf delete mode 100644 modules/ucsc/bedgraphtobigwig/functions.nf delete mode 100755 modules/ucsc/bigwigaverageoverbed/functions.nf delete mode 100644 modules/ucsc/liftover/functions.nf delete mode 100755 modules/ucsc/wigtobigwig/functions.nf delete mode 100644 modules/ultra/pipeline/functions.nf delete mode 100644 modules/umitools/dedup/functions.nf delete mode 100644 modules/umitools/extract/functions.nf delete mode 100644 modules/unicycler/functions.nf delete mode 100644 modules/untar/functions.nf delete mode 100644 modules/unzip/functions.nf delete mode 100644 modules/variantbam/functions.nf delete mode 100644 modules/vcftools/functions.nf delete mode 100644 modules/yara/index/functions.nf delete mode 100644 modules/yara/mapper/functions.nf create mode 100644 tests/modules/abacas/nextflow.config create mode 100644 tests/modules/adapterremoval/nextflow.config create mode 100644 tests/modules/agrvate/nextflow.config create mode 100644 tests/modules/allelecounter/nextflow.config create mode 100644 tests/modules/amps/nextflow.config create mode 100644 tests/modules/arriba/nextflow.config create mode 100644 tests/modules/artic/guppyplex/nextflow.config create mode 100644 tests/modules/artic/minion/nextflow.config create mode 100644 tests/modules/assemblyscan/nextflow.config create mode 100644 tests/modules/ataqv/ataqv/nextflow.config create mode 100644 tests/modules/bakta/nextflow.config create mode 100644 tests/modules/bamaligncleaner/nextflow.config create mode 100644 tests/modules/bamtools/split/nextflow.config create mode 100644 tests/modules/bamutil/trimbam/nextflow.config create mode 100644 tests/modules/bandage/image/nextflow.config create mode 100644 tests/modules/bbmap/align/nextflow.config create mode 100644 tests/modules/bbmap/bbduk/nextflow.config create mode 100644 tests/modules/bbmap/bbsplit/nextflow.config create mode 100644 tests/modules/bbmap/index/nextflow.config create mode 100644 tests/modules/bcftools/concat/nextflow.config create mode 100644 tests/modules/bcftools/consensus/nextflow.config create mode 100644 tests/modules/bcftools/filter/nextflow.config create mode 100644 tests/modules/bcftools/index/nextflow.config create mode 100644 tests/modules/bcftools/isec/nextflow.config create mode 100644 tests/modules/bcftools/merge/nextflow.config create mode 100644 tests/modules/bcftools/mpileup/nextflow.config create mode 100644 tests/modules/bcftools/norm/nextflow.config create mode 100644 tests/modules/bcftools/query/nextflow.config create mode 100644 tests/modules/bcftools/reheader/nextflow.config create mode 100644 tests/modules/bcftools/stats/nextflow.config create mode 100644 tests/modules/bcftools/view/nextflow.config create mode 100644 tests/modules/bedtools/bamtobed/nextflow.config create mode 100644 tests/modules/bedtools/complement/nextflow.config create mode 100644 tests/modules/bedtools/genomecov/nextflow.config create mode 100644 tests/modules/bedtools/getfasta/nextflow.config create mode 100644 tests/modules/bedtools/intersect/nextflow.config create mode 100644 tests/modules/bedtools/makewindows/nextflow.config create mode 100644 tests/modules/bedtools/maskfasta/nextflow.config create mode 100644 tests/modules/bedtools/merge/nextflow.config create mode 100644 tests/modules/bedtools/slop/nextflow.config create mode 100644 tests/modules/bedtools/sort/nextflow.config create mode 100644 tests/modules/bedtools/subtract/nextflow.config create mode 100644 tests/modules/bismark/align/nextflow.config create mode 100644 tests/modules/bismark/deduplicate/nextflow.config create mode 100644 tests/modules/bismark/genomepreparation/nextflow.config create mode 100644 tests/modules/bismark/methylationextractor/nextflow.config create mode 100644 tests/modules/bismark/report/nextflow.config create mode 100644 tests/modules/bismark/summary/nextflow.config create mode 100644 tests/modules/blast/blastn/nextflow.config create mode 100644 tests/modules/blast/makeblastdb/nextflow.config create mode 100644 tests/modules/bowtie/align/nextflow.config create mode 100644 tests/modules/bowtie/build_test/nextflow.config create mode 100644 tests/modules/bowtie2/align/nextflow.config create mode 100644 tests/modules/bowtie2/build_test/nextflow.config create mode 100644 tests/modules/bwa/aln/nextflow.config create mode 100644 tests/modules/bwa/index/nextflow.config create mode 100644 tests/modules/bwa/mem/nextflow.config create mode 100644 tests/modules/bwa/sampe/nextflow.config create mode 100644 tests/modules/bwa/samse/nextflow.config create mode 100644 tests/modules/bwamem2/index/nextflow.config create mode 100644 tests/modules/bwamem2/mem/nextflow.config create mode 100644 tests/modules/bwameth/align/nextflow.config create mode 100644 tests/modules/bwameth/index/nextflow.config create mode 100644 tests/modules/cat/cat/nextflow.config create mode 100644 tests/modules/cat/fastq/nextflow.config create mode 100644 tests/modules/cellranger/mkref/nextflow.config create mode 100644 tests/modules/checkm/lineagewf/nextflow.config create mode 100644 tests/modules/chromap/chromap/nextflow.config create mode 100644 tests/modules/chromap/index/nextflow.config create mode 100644 tests/modules/clonalframeml/nextflow.config create mode 100644 tests/modules/cmseq/polymut/nextflow.config create mode 100644 tests/modules/cnvkit/batch/nextflow.config create mode 100644 tests/modules/cooler/cload/nextflow.config create mode 100644 tests/modules/cooler/digest/nextflow.config create mode 100644 tests/modules/cooler/dump/nextflow.config create mode 100644 tests/modules/cooler/merge/nextflow.config create mode 100644 tests/modules/cooler/zoomify/nextflow.config create mode 100644 tests/modules/csvtk/concat/nextflow.config create mode 100644 tests/modules/csvtk/split/nextflow.config create mode 100644 tests/modules/custom/dumpsoftwareversions/nextflow.config create mode 100644 tests/modules/custom/getchromsizes/nextflow.config create mode 100644 tests/modules/cutadapt/nextflow.config create mode 100644 tests/modules/damageprofiler/nextflow.config create mode 100644 tests/modules/dastool/dastool/nextflow.config create mode 100644 tests/modules/dastool/scaffolds2bin/nextflow.config create mode 100644 tests/modules/dedup/nextflow.config create mode 100644 tests/modules/deeptools/computematrix/nextflow.config create mode 100644 tests/modules/deeptools/plotfingerprint/nextflow.config create mode 100644 tests/modules/deeptools/plotheatmap/nextflow.config create mode 100644 tests/modules/deeptools/plotprofile/nextflow.config create mode 100644 tests/modules/delly/call/nextflow.config create mode 100644 tests/modules/diamond/blastp/nextflow.config create mode 100644 tests/modules/diamond/blastx/nextflow.config create mode 100644 tests/modules/diamond/makedb/nextflow.config create mode 100644 tests/modules/dragonflye/nextflow.config create mode 100644 tests/modules/dshbio/exportsegments/nextflow.config create mode 100644 tests/modules/dshbio/filterbed/nextflow.config create mode 100644 tests/modules/dshbio/filtergff3/nextflow.config create mode 100644 tests/modules/dshbio/splitbed/nextflow.config create mode 100644 tests/modules/dshbio/splitgff3/nextflow.config create mode 100644 tests/modules/ectyper/nextflow.config create mode 100644 tests/modules/emmtyper/nextflow.config create mode 100644 tests/modules/ensemblvep/nextflow.config create mode 100644 tests/modules/expansionhunter/nextflow.config create mode 100644 tests/modules/fargene/nextflow.config create mode 100644 tests/modules/fastani/nextflow.config create mode 100644 tests/modules/fastp/nextflow.config create mode 100644 tests/modules/fastqc/nextflow.config create mode 100644 tests/modules/fastqscan/nextflow.config create mode 100644 tests/modules/fasttree/nextflow.config create mode 100644 tests/modules/fgbio/callmolecularconsensusreads/nextflow.config create mode 100644 tests/modules/fgbio/fastqtobam/nextflow.config create mode 100644 tests/modules/fgbio/groupreadsbyumi/nextflow.config create mode 100644 tests/modules/fgbio/sortbam/nextflow.config create mode 100644 tests/modules/filtlong/nextflow.config create mode 100644 tests/modules/flash/nextflow.config create mode 100644 tests/modules/freebayes/nextflow.config create mode 100644 tests/modules/gatk4/applybqsr/nextflow.config create mode 100644 tests/modules/gatk4/baserecalibrator/nextflow.config create mode 100644 tests/modules/gatk4/bedtointervallist/nextflow.config create mode 100644 tests/modules/gatk4/calculatecontamination/nextflow.config create mode 100644 tests/modules/gatk4/createsequencedictionary/nextflow.config create mode 100644 tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config create mode 100644 tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config create mode 100644 tests/modules/gatk4/fastqtosam/nextflow.config create mode 100644 tests/modules/gatk4/filtermutectcalls/nextflow.config create mode 100644 tests/modules/gatk4/genomicsdbimport/nextflow.config create mode 100644 tests/modules/gatk4/genotypegvcfs/nextflow.config create mode 100644 tests/modules/gatk4/getpileupsummaries/nextflow.config create mode 100644 tests/modules/gatk4/haplotypecaller/nextflow.config create mode 100644 tests/modules/gatk4/indexfeaturefile/nextflow.config create mode 100644 tests/modules/gatk4/intervallisttools/nextflow.config create mode 100644 tests/modules/gatk4/learnreadorientationmodel/nextflow.config create mode 100644 tests/modules/gatk4/markduplicates/nextflow.config create mode 100644 tests/modules/gatk4/mergebamalignment/nextflow.config create mode 100644 tests/modules/gatk4/mergevcfs/nextflow.config create mode 100644 tests/modules/gatk4/mutect2/nextflow.config create mode 100644 tests/modules/gatk4/revertsam/nextflow.config create mode 100644 tests/modules/gatk4/samtofastq/nextflow.config create mode 100644 tests/modules/gatk4/splitncigarreads/nextflow.config create mode 100644 tests/modules/gatk4/variantfiltration/nextflow.config create mode 100644 tests/modules/genmap/index/nextflow.config create mode 100644 tests/modules/genmap/mappability/nextflow.config create mode 100644 tests/modules/genrich/nextflow.config create mode 100644 tests/modules/gffread/nextflow.config create mode 100644 tests/modules/glnexus/nextflow.config create mode 100644 tests/modules/graphmap2/align/nextflow.config create mode 100644 tests/modules/graphmap2/index/nextflow.config create mode 100644 tests/modules/gstama/collapse/nextflow.config create mode 100644 tests/modules/gstama/merge/nextflow.config create mode 100644 tests/modules/gtdbtk/classifywf/nextflow.config create mode 100644 tests/modules/gubbins/nextflow.config create mode 100644 tests/modules/gunc/downloaddb/nextflow.config create mode 100644 tests/modules/gunc/run/nextflow.config create mode 100644 tests/modules/gunzip/nextflow.config create mode 100644 tests/modules/hicap/nextflow.config create mode 100644 tests/modules/hifiasm/nextflow.config create mode 100644 tests/modules/hisat2/align/nextflow.config create mode 100644 tests/modules/hisat2/build_test/nextflow.config create mode 100644 tests/modules/hisat2/extractsplicesites/nextflow.config create mode 100644 tests/modules/hmmcopy/gccounter/nextflow.config create mode 100644 tests/modules/hmmcopy/readcounter/nextflow.config create mode 100644 tests/modules/hmmer/hmmalign/nextflow.config create mode 100644 tests/modules/homer/annotatepeaks/nextflow.config create mode 100644 tests/modules/homer/findpeaks/nextflow.config create mode 100644 tests/modules/homer/maketagdirectory/nextflow.config create mode 100644 tests/modules/homer/makeucscfile/nextflow.config create mode 100644 tests/modules/idr/nextflow.config create mode 100644 tests/modules/imputeme/vcftoprs/nextflow.config create mode 100644 tests/modules/iqtree/nextflow.config create mode 100644 tests/modules/ismapper/nextflow.config create mode 100644 tests/modules/isoseq3/cluster/nextflow.config create mode 100644 tests/modules/isoseq3/refine/nextflow.config create mode 100644 tests/modules/ivar/consensus/nextflow.config create mode 100644 tests/modules/ivar/trim/nextflow.config create mode 100644 tests/modules/ivar/variants/nextflow.config create mode 100644 tests/modules/jupyternotebook/nextflow.config create mode 100644 tests/modules/kallisto/index/nextflow.config create mode 100644 tests/modules/kallistobustools/count/nextflow.config create mode 100644 tests/modules/kallistobustools/ref/nextflow.config create mode 100644 tests/modules/khmer/normalizebymedian/nextflow.config create mode 100644 tests/modules/kleborate/nextflow.config create mode 100644 tests/modules/kraken2/kraken2/nextflow.config create mode 100644 tests/modules/krona/kronadb/main.nf create mode 100644 tests/modules/krona/kronadb/nextflow.config create mode 100644 tests/modules/krona/kronadb/test.yml create mode 100644 tests/modules/krona/ktimporttaxonomy/main.nf create mode 100644 tests/modules/krona/ktimporttaxonomy/nextflow.config create mode 100644 tests/modules/krona/ktimporttaxonomy/test.yml delete mode 100644 tests/modules/kronatools/kronadb/main.nf delete mode 100644 tests/modules/kronatools/kronadb/test.yml delete mode 100644 tests/modules/kronatools/ktimporttaxonomy/main.nf delete mode 100644 tests/modules/kronatools/ktimporttaxonomy/test.yml create mode 100644 tests/modules/last/dotplot/nextflow.config create mode 100644 tests/modules/last/lastal/nextflow.config create mode 100644 tests/modules/last/lastdb/nextflow.config create mode 100644 tests/modules/last/mafconvert/nextflow.config create mode 100644 tests/modules/last/mafswap/nextflow.config create mode 100644 tests/modules/last/postmask/nextflow.config create mode 100644 tests/modules/last/split/nextflow.config create mode 100644 tests/modules/last/train/nextflow.config create mode 100644 tests/modules/leehom/nextflow.config create mode 100644 tests/modules/lima/nextflow.config create mode 100644 tests/modules/lissero/nextflow.config create mode 100644 tests/modules/lofreq/call/nextflow.config create mode 100644 tests/modules/lofreq/callparallel/nextflow.config create mode 100644 tests/modules/lofreq/filter/nextflow.config create mode 100644 tests/modules/lofreq/indelqual/nextflow.config create mode 100644 tests/modules/macs2/callpeak/nextflow.config create mode 100644 tests/modules/malt/build_test/nextflow.config create mode 100644 tests/modules/malt/run/nextflow.config create mode 100644 tests/modules/maltextract/nextflow.config create mode 100644 tests/modules/manta/germline/nextflow.config create mode 100644 tests/modules/manta/somatic/nextflow.config create mode 100644 tests/modules/manta/tumoronly/nextflow.config create mode 100644 tests/modules/mapdamage2/nextflow.config create mode 100644 tests/modules/mash/sketch/nextflow.config create mode 100644 tests/modules/mashtree/nextflow.config create mode 100644 tests/modules/maxbin2/nextflow.config create mode 100644 tests/modules/medaka/nextflow.config create mode 100644 tests/modules/megahit/nextflow.config create mode 100644 tests/modules/meningotype/nextflow.config create mode 100644 tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config create mode 100644 tests/modules/metabat2/metabat2/nextflow.config create mode 100644 tests/modules/metaphlan3/nextflow.config create mode 100644 tests/modules/methyldackel/extract/nextflow.config create mode 100644 tests/modules/methyldackel/mbias/nextflow.config create mode 100644 tests/modules/minia/nextflow.config create mode 100644 tests/modules/miniasm/nextflow.config create mode 100644 tests/modules/minimap2/align/nextflow.config create mode 100644 tests/modules/minimap2/index/nextflow.config create mode 100644 tests/modules/mlst/nextflow.config create mode 100644 tests/modules/mosdepth/nextflow.config create mode 100644 tests/modules/msisensor/msi/nextflow.config create mode 100644 tests/modules/msisensor/scan/nextflow.config create mode 100644 tests/modules/mtnucratio/nextflow.config create mode 100644 tests/modules/multiqc/nextflow.config create mode 100644 tests/modules/mummer/nextflow.config create mode 100644 tests/modules/muscle/nextflow.config create mode 100644 tests/modules/nanolyse/nextflow.config create mode 100644 tests/modules/nanoplot/nextflow.config create mode 100644 tests/modules/ncbigenomedownload/nextflow.config create mode 100644 tests/modules/nextclade/nextflow.config create mode 100644 tests/modules/ngmaster/nextflow.config create mode 100644 tests/modules/nucmer/nextflow.config create mode 100644 tests/modules/optitype/nextflow.config create mode 100644 tests/modules/pairix/nextflow.config create mode 100644 tests/modules/pairtools/dedup/nextflow.config create mode 100644 tests/modules/pairtools/flip/nextflow.config create mode 100644 tests/modules/pairtools/parse/nextflow.config create mode 100644 tests/modules/pairtools/restrict/nextflow.config create mode 100644 tests/modules/pairtools/select/nextflow.config create mode 100644 tests/modules/pairtools/sort/nextflow.config create mode 100644 tests/modules/pangolin/nextflow.config create mode 100644 tests/modules/paraclu/nextflow.config create mode 100644 tests/modules/pbbam/pbmerge/nextflow.config create mode 100644 tests/modules/pbccs/nextflow.config create mode 100644 tests/modules/peddy/nextflow.config create mode 100644 tests/modules/phyloflash/nextflow.config create mode 100644 tests/modules/picard/collecthsmetrics/nextflow.config create mode 100644 tests/modules/picard/collectmultiplemetrics/nextflow.config create mode 100644 tests/modules/picard/collectwgsmetrics/nextflow.config create mode 100644 tests/modules/picard/filtersamreads/nextflow.config create mode 100644 tests/modules/picard/markduplicates/nextflow.config create mode 100644 tests/modules/picard/mergesamfiles/nextflow.config create mode 100644 tests/modules/picard/sortsam/nextflow.config create mode 100644 tests/modules/pirate/nextflow.config create mode 100644 tests/modules/plasmidid/nextflow.config create mode 100644 tests/modules/plink/extract/nextflow.config create mode 100644 tests/modules/plink/vcf/nextflow.config create mode 100644 tests/modules/plink2/vcf/nextflow.config create mode 100644 tests/modules/pmdtools/filter/nextflow.config create mode 100644 tests/modules/porechop/nextflow.config create mode 100644 tests/modules/preseq/lcextrap/nextflow.config create mode 100644 tests/modules/prodigal/nextflow.config create mode 100644 tests/modules/prokka/nextflow.config create mode 100644 tests/modules/pycoqc/nextflow.config create mode 100644 tests/modules/pydamage/analyze/nextflow.config create mode 100644 tests/modules/pydamage/filter/nextflow.config create mode 100644 tests/modules/qcat/nextflow.config create mode 100644 tests/modules/qualimap/bamqc/nextflow.config create mode 100644 tests/modules/quast/nextflow.config create mode 100644 tests/modules/racon/nextflow.config create mode 100644 tests/modules/rapidnj/nextflow.config create mode 100644 tests/modules/rasusa/nextflow.config create mode 100644 tests/modules/raxmlng/nextflow.config create mode 100644 tests/modules/rmarkdownnotebook/nextflow.config create mode 100644 tests/modules/roary/nextflow.config create mode 100644 tests/modules/rsem/calculateexpression/nextflow.config create mode 100644 tests/modules/rsem/preparereference/nextflow.config create mode 100644 tests/modules/rseqc/bamstat/nextflow.config create mode 100644 tests/modules/rseqc/inferexperiment/nextflow.config create mode 100644 tests/modules/rseqc/innerdistance/nextflow.config create mode 100644 tests/modules/rseqc/junctionannotation/nextflow.config create mode 100644 tests/modules/rseqc/junctionsaturation/nextflow.config create mode 100644 tests/modules/rseqc/readdistribution/nextflow.config create mode 100644 tests/modules/rseqc/readduplication/nextflow.config create mode 100644 tests/modules/salmon/index/nextflow.config create mode 100644 tests/modules/salmon/quant/nextflow.config create mode 100644 tests/modules/samblaster/nextflow.config create mode 100644 tests/modules/samtools/ampliconclip/nextflow.config create mode 100644 tests/modules/samtools/bam2fq/nextflow.config create mode 100644 tests/modules/samtools/depth/nextflow.config create mode 100644 tests/modules/samtools/faidx/nextflow.config create mode 100644 tests/modules/samtools/fastq/nextflow.config create mode 100644 tests/modules/samtools/fixmate/nextflow.config create mode 100644 tests/modules/samtools/flagstat/nextflow.config create mode 100644 tests/modules/samtools/idxstats/nextflow.config create mode 100644 tests/modules/samtools/index/nextflow.config create mode 100644 tests/modules/samtools/merge/nextflow.config create mode 100644 tests/modules/samtools/mpileup/nextflow.config create mode 100644 tests/modules/samtools/sort/nextflow.config create mode 100644 tests/modules/samtools/stats/nextflow.config create mode 100644 tests/modules/samtools/view/nextflow.config create mode 100644 tests/modules/scoary/nextflow.config create mode 100644 tests/modules/seacr/callpeak/nextflow.config create mode 100644 tests/modules/seqkit/split2/nextflow.config create mode 100644 tests/modules/seqsero2/nextflow.config create mode 100644 tests/modules/seqtk/mergepe/nextflow.config create mode 100644 tests/modules/seqtk/sample/nextflow.config create mode 100644 tests/modules/seqtk/subseq/nextflow.config create mode 100644 tests/modules/sequenzautils/bam2seqz/nextflow.config create mode 100644 tests/modules/sequenzautils/gcwiggle/nextflow.config create mode 100644 tests/modules/seqwish/induce/nextflow.config create mode 100644 tests/modules/shovill/nextflow.config create mode 100644 tests/modules/snpdists/nextflow.config create mode 100644 tests/modules/snpeff/nextflow.config create mode 100644 tests/modules/snpsites/nextflow.config create mode 100644 tests/modules/spades/nextflow.config create mode 100644 tests/modules/spatyper/nextflow.config create mode 100644 tests/modules/sratools/fasterqdump/nextflow.config create mode 100644 tests/modules/sratools/prefetch/nextflow.config create mode 100644 tests/modules/staphopiasccmec/nextflow.config create mode 100644 tests/modules/star/align/nextflow.config create mode 100644 tests/modules/star/genomegenerate/nextflow.config create mode 100644 tests/modules/strelka/germline/nextflow.config create mode 100644 tests/modules/strelka/somatic/nextflow.config create mode 100644 tests/modules/stringtie/merge/nextflow.config create mode 100644 tests/modules/stringtie/stringtie/nextflow.config create mode 100644 tests/modules/subread/featurecounts/nextflow.config create mode 100644 tests/modules/tabix/bgzip/nextflow.config create mode 100644 tests/modules/tabix/bgziptabix/nextflow.config create mode 100644 tests/modules/tabix/tabix/nextflow.config create mode 100644 tests/modules/tbprofiler/profile/nextflow.config create mode 100644 tests/modules/tiddit/cov/nextflow.config create mode 100644 tests/modules/tiddit/sv/nextflow.config create mode 100644 tests/modules/trimgalore/nextflow.config create mode 100644 tests/modules/ucsc/bed12tobigbed/nextflow.config create mode 100644 tests/modules/ucsc/bedclip/nextflow.config create mode 100644 tests/modules/ucsc/bedgraphtobigwig/nextflow.config create mode 100644 tests/modules/ucsc/bigwigaverageoverbed/nextflow.config create mode 100644 tests/modules/ucsc/liftover/nextflow.config create mode 100644 tests/modules/ucsc/wigtobigwig/nextflow.config create mode 100644 tests/modules/ultra/pipeline/nextflow.config create mode 100644 tests/modules/unicycler/nextflow.config create mode 100644 tests/modules/untar/nextflow.config create mode 100644 tests/modules/unzip/nextflow.config create mode 100644 tests/modules/variantbam/nextflow.config create mode 100644 tests/modules/vcftools/nextflow.config create mode 100644 tests/modules/yara/index/nextflow.config create mode 100644 tests/modules/yara/mapper/nextflow.config diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index 55b8c296..121dd865 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -7,7 +7,6 @@ on: pull_request: branches: [master] - jobs: changes: name: Check for changes @@ -25,9 +24,6 @@ jobs: lint: runs-on: ubuntu-20.04 - env: - NXF_VER: 21.04.0 - name: ${{ matrix.tags }} needs: changes if: needs.changes.outputs.modules != '[]' diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 7cbb2689..0bd892c8 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -23,13 +23,12 @@ jobs: test: runs-on: ubuntu-20.04 - name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} + name: ${{ matrix.tags }} ${{ matrix.profile }} needs: changes if: needs.changes.outputs.modules != '[]' strategy: fail-fast: false matrix: - nxf_version: ["21.04.0"] tags: ["${{ fromJson(needs.changes.outputs.modules) }}"] profile: ["docker", "singularity", "conda"] env: @@ -60,13 +59,12 @@ jobs: - uses: actions/cache@v2 with: path: /usr/local/bin/nextflow - key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }} + key: ${{ runner.os }} restore-keys: | ${{ runner.os }}-nextflow- - name: Install Nextflow env: - NXF_VER: ${{ matrix.nxf_version }} CAPSULE_LOG: none run: | wget -qO- get.nextflow.io | bash @@ -93,13 +91,13 @@ jobs: # Test the module - name: Run pytest-workflow # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof + run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof - name: Upload logs on failure if: failure() uses: actions/upload-artifact@v2 with: - name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} + name: logs-${{ matrix.profile }} path: | /home/runner/pytest_workflow_*/*/.nextflow.log /home/runner/pytest_workflow_*/*/log.out diff --git a/README.md b/README.md index f25b37d9..beee42e7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # ![nf-core/modules](docs/images/nfcore-modules_logo.png) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -78,7 +78,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi nextflow.enable.dsl = 2 - include { FASTQC } from './modules/nf-core/modules/fastqc/main' addParams( options: [:] ) + include { FASTQC } from './modules/nf-core/modules/fastqc/main' ``` 5. Remove the module from the pipeline repository if required: diff --git a/modules/abacas/functions.nf b/modules/abacas/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/abacas/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index bc5440b1..7fe71e3a 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ABACAS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::abacas=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/abacas:1.3.1--pl526_0" - } else { - container "quay.io/biocontainers/abacas:1.3.1--pl526_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/abacas:1.3.1--pl526_0' : + 'quay.io/biocontainers/abacas:1.3.1--pl526_0' }" input: tuple val(meta), path(scaffold) @@ -27,12 +16,13 @@ process ABACAS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ abacas.pl \\ -r $fasta \\ -q $scaffold \\ - $options.args \\ + $args \\ -o ${prefix}.abacas mv nucmer.delta ${prefix}.abacas.nucmer.delta @@ -40,8 +30,8 @@ process ABACAS { mv nucmer.tiling ${prefix}.abacas.nucmer.tiling mv unused_contigs.out ${prefix}.abacas.unused.contigs.out cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//') + "${task.process}": + abacas: \$(echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/adapterremoval/functions.nf b/modules/adapterremoval/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/adapterremoval/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 6d559826..33955ed2 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ADAPTERREMOVAL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::adapterremoval=2.3.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/adapterremoval:2.3.2--hb7ba0dd_0" - } else { - container "quay.io/biocontainers/adapterremoval:2.3.2--hb7ba0dd_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/adapterremoval:2.3.2--hb7ba0dd_0' : + 'quay.io/biocontainers/adapterremoval:2.3.2--hb7ba0dd_0' }" input: tuple val(meta), path(reads) @@ -26,13 +16,14 @@ process ADAPTERREMOVAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ AdapterRemoval \\ --file1 $reads \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -41,8 +32,8 @@ process ADAPTERREMOVAL { --gzip \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } else if (!meta.single_end && !meta.collapse) { @@ -50,7 +41,7 @@ process ADAPTERREMOVAL { AdapterRemoval \\ --file1 ${reads[0]} \\ --file2 ${reads[1]} \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -60,8 +51,8 @@ process ADAPTERREMOVAL { --gzip \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } else { @@ -70,7 +61,7 @@ process ADAPTERREMOVAL { --file1 ${reads[0]} \\ --file2 ${reads[1]} \\ --collapse \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -79,8 +70,8 @@ process ADAPTERREMOVAL { cat *.collapsed.gz *.collapsed.truncated.gz > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } diff --git a/modules/agrvate/functions.nf b/modules/agrvate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/agrvate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index c45bbe06..06392e16 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process AGRVATE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::agrvate=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/agrvate:1.0.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/agrvate:1.0.2--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/agrvate:1.0.2--hdfd78af_0' : + 'quay.io/biocontainers/agrvate:1.0.2--hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -27,15 +16,16 @@ process AGRVATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ agrvate \\ - $options.args \\ + $args \\ -i $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(agrvate -v 2>&1) | sed 's/agrvate v//;') + "${task.process}": + agrvate: \$(echo \$(agrvate -v 2>&1) | sed 's/agrvate v//;') END_VERSIONS """ } diff --git a/modules/allelecounter/functions.nf b/modules/allelecounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/allelecounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 8d986579..5cbc4cbd 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ALLELECOUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cancerit-allelecount=4.3.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.3.0--h41abebc_0" - } else { - container "quay.io/biocontainers/cancerit-allelecount:4.3.0--h41abebc_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.3.0--h41abebc_0' : + 'quay.io/biocontainers/cancerit-allelecount:4.3.0--h41abebc_0' }" input: tuple val(meta), path(input), path(input_index) @@ -28,20 +17,21 @@ process ALLELECOUNTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference_options = fasta ? "-r $fasta": "" """ alleleCounter \\ - $options.args \\ + $args \\ -l $loci \\ -b $input \\ $reference_options \\ -o ${prefix}.alleleCount cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(alleleCounter --version) + "${task.process}": + allelecounter: \$(alleleCounter --version) END_VERSIONS """ } diff --git a/modules/amps/functions.nf b/modules/amps/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/amps/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/amps/main.nf b/modules/amps/main.nf index f34423b5..871b57c6 100644 --- a/modules/amps/main.nf +++ b/modules/amps/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process AMPS { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hops=0.35" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" - } else { - container "quay.io/biocontainers/hops:0.35--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1' : + 'quay.io/biocontainers/hops:0.35--hdfd78af_1' }" input: path maltextract_results @@ -30,6 +19,7 @@ process AMPS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ postprocessing.AMPS.r \\ -r $maltextract_results \\ @@ -37,11 +27,11 @@ process AMPS { -m $filter \\ -t $task.cpus \\ -j \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') + "${task.process}": + amps: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') END_VERSIONS """ } diff --git a/modules/arriba/functions.nf b/modules/arriba/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/arriba/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf index 6abae233..459ff100 100644 --- a/modules/arriba/main.nf +++ b/modules/arriba/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARRIBA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::arriba=2.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/arriba:2.1.0--h3198e80_1" - } else { - container "quay.io/biocontainers/arriba:2.1.0--h3198e80_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/arriba:2.1.0--h3198e80_1' : + 'quay.io/biocontainers/arriba:2.1.0--h3198e80_1' }" input: tuple val(meta), path(bam) @@ -29,8 +18,9 @@ process ARRIBA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def blacklist = (options.args.contains('-b')) ? '' : '-f blacklist' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def blacklist = (args.contains('-b')) ? '' : '-f blacklist' """ arriba \\ -x $bam \\ @@ -39,11 +29,11 @@ process ARRIBA { -o ${prefix}.fusions.tsv \\ -O ${prefix}.fusions.discarded.tsv \\ $blacklist \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(arriba -h | grep 'Version:' 2>&1 | sed 's/Version:\s//') + "${task.process}": + arriba: \$(arriba -h | grep 'Version:' 2>&1 | sed 's/Version:\s//') END_VERSIONS """ } diff --git a/modules/artic/guppyplex/functions.nf b/modules/artic/guppyplex/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/artic/guppyplex/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/artic/guppyplex/main.nf b/modules/artic/guppyplex/main.nf index 87bd99c8..a69e5381 100644 --- a/modules/artic/guppyplex/main.nf +++ b/modules/artic/guppyplex/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARTIC_GUPPYPLEX { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::artic=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0" - } else { - container "quay.io/biocontainers/artic:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0' : + 'quay.io/biocontainers/artic:1.2.1--py_0' }" input: tuple val(meta), path(fastq_dir) @@ -26,18 +15,19 @@ process ARTIC_GUPPYPLEX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ artic \\ guppyplex \\ - $options.args \\ + $args \\ --directory $fastq_dir \\ --output ${prefix}.fastq pigz -p $task.cpus *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + "${task.process}": + artic: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/artic/minion/functions.nf b/modules/artic/minion/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/artic/minion/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/artic/minion/main.nf b/modules/artic/minion/main.nf index 68474f19..86863f95 100644 --- a/modules/artic/minion/main.nf +++ b/modules/artic/minion/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARTIC_MINION { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::artic=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0" - } else { - container "quay.io/biocontainers/artic:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0' : + 'quay.io/biocontainers/artic:1.2.1--py_0' }" input: tuple val(meta), path(fastq) @@ -43,20 +32,21 @@ process ARTIC_MINION { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def version = scheme_version.toString().toLowerCase().replaceAll('v','') - def fast5 = params.fast5_dir ? "--fast5-directory $fast5_dir" : "" - def summary = params.sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" + def fast5 = fast5_dir ? "--fast5-directory $fast5_dir" : "" + def summary = sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" def model = "" - if (options.args.tokenize().contains('--medaka')) { + if (args.tokenize().contains('--medaka')) { fast5 = "" summary = "" - model = file(params.artic_minion_medaka_model).exists() ? "--medaka-model ./$medaka_model" : "--medaka-model $params.artic_minion_medaka_model" + model = file(medaka_model).exists() ? "--medaka-model ./$medaka_model" : "--medaka-model $medaka_model" } """ artic \\ minion \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --read-file $fastq \\ --scheme-directory ./primer-schemes \\ @@ -68,8 +58,8 @@ process ARTIC_MINION { $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + "${task.process}": + artic: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/assemblyscan/functions.nf b/modules/assemblyscan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/assemblyscan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/assemblyscan/main.nf b/modules/assemblyscan/main.nf index 5b82f922..7b5b752b 100644 --- a/modules/assemblyscan/main.nf +++ b/modules/assemblyscan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ASSEMBLYSCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::assembly-scan=0.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0' : + 'quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0' }" input: tuple val(meta), path(assembly) @@ -26,13 +15,14 @@ process ASSEMBLYSCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ assembly-scan $assembly > ${prefix}.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' ) + "${task.process}": + assemblyscan: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' ) END_VERSIONS """ } diff --git a/modules/ataqv/ataqv/functions.nf b/modules/ataqv/ataqv/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ataqv/ataqv/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ataqv/ataqv/main.nf b/modules/ataqv/ataqv/main.nf index 5ddade28..39602d30 100644 --- a/modules/ataqv/ataqv/main.nf +++ b/modules/ataqv/ataqv/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ATAQV_ATAQV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ataqv=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ataqv:1.2.1--py39ha23c084_2" - } else { - container "quay.io/biocontainers/ataqv:1.2.1--py36hfdecbe1_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ataqv:1.2.1--py39ha23c084_2' : + 'quay.io/biocontainers/ataqv:1.2.1--py36hfdecbe1_2' }" input: tuple val(meta), path(bam), path(bai), path(peak_file) @@ -30,14 +20,15 @@ process ATAQV_ATAQV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def peak = peak_file ? "--peak-file $peak_file" : '' def tss = tss_file ? "--tss-file $tss_file" : '' def excl_regs = excl_regs_file ? "--excluded-region-file $excl_regs_file" : '' def autosom_ref = autosom_ref_file ? "--autosomal-reference-file $autosom_ref_file" : '' """ ataqv \\ - $options.args \\ + $args \\ $peak \\ $tss \\ $excl_regs \\ @@ -49,8 +40,8 @@ process ATAQV_ATAQV { $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( ataqv --version ) + "${task.process}": + ataqv: \$( ataqv --version ) END_VERSIONS """ } diff --git a/modules/bakta/functions.nf b/modules/bakta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bakta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf index 2939f575..20127e53 100644 --- a/modules/bakta/main.nf +++ b/modules/bakta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAKTA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bakta=1.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bakta:1.2.2--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/bakta:1.2.2--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bakta:1.2.2--pyhdfd78af_0' : + 'quay.io/biocontainers/bakta:1.2.2--pyhdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -37,27 +26,28 @@ process BAKTA { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigal-tf ${prodigal_tf[0]}" : "" """ bakta \\ - $options.args \\ + $args \\ --threads $task.cpus \\ - --prefix ${prefix} \\ + --prefix $prefix \\ --db $db \\ $proteins_opt \\ $prodigal_tf \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + "${task.process}": + bakta: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) END_VERSIONS """ stub: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ touch ${prefix}.embl touch ${prefix}.faa @@ -70,8 +60,8 @@ process BAKTA { touch ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + "${task.process}": + bakta: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) END_VERSIONS """ } diff --git a/modules/bamaligncleaner/functions.nf b/modules/bamaligncleaner/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamaligncleaner/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf index 720b495a..f1481c39 100644 --- a/modules/bamaligncleaner/main.nf +++ b/modules/bamaligncleaner/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMALIGNCLEANER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamaligncleaner=0.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamaligncleaner:0.2.1--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/bamaligncleaner:0.2.1--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamaligncleaner:0.2.1--pyhdfd78af_0' : + 'quay.io/biocontainers/bamaligncleaner:0.2.1--pyhdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process BAMALIGNCLEANER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bamAlignCleaner \\ - $options.args \\ + $args \\ -o ${prefix}.bam \\ ${bam} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bamAlignCleaner --version | sed 's/.*version //') + "${task.process}": + bamaligncleaner: \$(bamAlignCleaner --version | sed 's/.*version //') END_VERSIONS """ } diff --git a/modules/bamtools/split/functions.nf b/modules/bamtools/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamtools/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 86eaa5d6..676aab6f 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMTOOLS_SPLIT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamtools=2.5.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9" - } else { - container "quay.io/biocontainers/bamtools:2.5.1--h9a82719_9" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9' : + 'quay.io/biocontainers/bamtools:2.5.1--h9a82719_9' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process BAMTOOLS_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bamtools \\ split \\ -in $bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) + "${task.process}": + bamtools: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) END_VERSIONS """ } diff --git a/modules/bamutil/trimbam/functions.nf b/modules/bamutil/trimbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamutil/trimbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamutil/trimbam/main.nf b/modules/bamutil/trimbam/main.nf index 60949338..a210fe5f 100644 --- a/modules/bamutil/trimbam/main.nf +++ b/modules/bamutil/trimbam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMUTIL_TRIMBAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamutil=1.0.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamutil:1.0.15--h2e03b76_1" - } else { - container "quay.io/biocontainers/bamutil:1.0.15--h2e03b76_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamutil:1.0.15--h2e03b76_1' : + 'quay.io/biocontainers/bamutil:1.0.15--h2e03b76_1' }" input: tuple val(meta), path(bam), val(trim_left), val(trim_right) @@ -26,19 +15,20 @@ process BAMUTIL_TRIMBAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bam \\ trimBam \\ $bam \\ ${prefix}.bam \\ - $options.args \\ + $args \\ -L $trim_left \\ -R $trim_right cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( bam trimBam 2>&1 ) | sed 's/^Version: //;s/;.*//' ) + "${task.process}": + bamutil: \$( echo \$( bam trimBam 2>&1 ) | sed 's/^Version: //;s/;.*//' ) END_VERSIONS """ } diff --git a/modules/bandage/image/functions.nf b/modules/bandage/image/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bandage/image/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index b7a30a0b..e31566d1 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BANDAGE_IMAGE { tag "${meta.id}" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bandage=0.8.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bandage:0.8.1--hc9558a2_2" - } else { - container "quay.io/biocontainers/bandage:0.8.1--hc9558a2_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bandage:0.8.1--hc9558a2_2' : + 'quay.io/biocontainers/bandage:0.8.1--hc9558a2_2' }" input: tuple val(meta), path(gfa) @@ -27,14 +16,15 @@ process BANDAGE_IMAGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - Bandage image $gfa ${prefix}.png $options.args - Bandage image $gfa ${prefix}.svg $options.args + Bandage image $gfa ${prefix}.png $args + Bandage image $gfa ${prefix}.svg $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + bandage: \$(echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bbmap/align/functions.nf b/modules/bbmap/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 40810575..ef23fada 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_ALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.13 pigz=2.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" - } else { - container "quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' : + 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' }" input: tuple val(meta), path(fastq) @@ -28,7 +17,8 @@ process BBMAP_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" input = meta.single_end ? "in=${fastq}" : "in=${fastq[0]} in2=${fastq[1]}" @@ -50,14 +40,14 @@ process BBMAP_ALIGN { $db \\ $input \\ out=${prefix}.bam \\ - $options.args \\ + $args \\ threads=$task.cpus \\ -Xmx${task.memory.toGiga()}g \\ &> ${prefix}.bbmap.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS diff --git a/modules/bbmap/bbduk/functions.nf b/modules/bbmap/bbduk/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/bbduk/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/bbduk/main.nf b/modules/bbmap/bbduk/main.nf index d7243fdb..98a21eab 100644 --- a/modules/bbmap/bbduk/main.nf +++ b/modules/bbmap/bbduk/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_BBDUK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.90" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.90--he522d1c_1" - } else { - container "quay.io/biocontainers/bbmap:38.90--he522d1c_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.90--he522d1c_1' : + 'quay.io/biocontainers/bbmap:38.90--he522d1c_1' }" input: tuple val(meta), path(reads) @@ -27,7 +17,8 @@ process BBMAP_BBDUK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def raw = meta.single_end ? "in=${reads[0]}" : "in1=${reads[0]} in2=${reads[1]}" def trimmed = meta.single_end ? "out=${prefix}.fastq.gz" : "out1=${prefix}_1.fastq.gz out2=${prefix}_2.fastq.gz" def contaminants_fa = contaminants ? "ref=$contaminants" : '' @@ -38,12 +29,12 @@ process BBMAP_BBDUK { $raw \\ $trimmed \\ threads=$task.cpus \\ - $options.args \\ + $args \\ $contaminants_fa \\ &> ${prefix}.bbduk.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) END_VERSIONS """ } diff --git a/modules/bbmap/bbsplit/functions.nf b/modules/bbmap/bbsplit/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/bbsplit/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf index b2249b17..53f6b1aa 100644 --- a/modules/bbmap/bbsplit/main.nf +++ b/modules/bbmap/bbsplit/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_BBSPLIT { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.93" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0" - } else { - container "quay.io/biocontainers/bbmap:38.93--he522d1c_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0' : + 'quay.io/biocontainers/bbmap:38.93--he522d1c_0' }" input: tuple val(meta), path(reads) @@ -32,7 +21,8 @@ process BBMAP_BBSPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { @@ -54,11 +44,11 @@ process BBMAP_BBSPLIT { ${other_refs.join(' ')} \\ path=bbsplit \\ threads=$task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + "${task.process}": + bbmap: \$(bbversion.sh 2>&1) END_VERSIONS """ } else { @@ -83,11 +73,11 @@ process BBMAP_BBSPLIT { $fastq_in \\ $fastq_out \\ refstats=${prefix}.stats.txt \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + "${task.process}": + bbmap: \$(bbversion.sh 2>&1) END_VERSIONS """ } diff --git a/modules/bbmap/index/functions.nf b/modules/bbmap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/index/main.nf b/modules/bbmap/index/main.nf index b9e52ec7..4c02f84e 100644 --- a/modules/bbmap/index/main.nf +++ b/modules/bbmap/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_INDEX { tag "$fasta" label 'process_long' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bbmap=38.92" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.92--he522d1c_0" - } else { - container "quay.io/biocontainers/bbmap:38.92--he522d1c_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.92--he522d1c_0' : + 'quay.io/biocontainers/bbmap:38.92--he522d1c_0' }" input: path fasta @@ -26,16 +15,17 @@ process BBMAP_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bbmap.sh \\ ref=${fasta} \\ - $options.args \\ + $args \\ threads=$task.cpus \\ -Xmx${task.memory.toGiga()}g cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) END_VERSIONS """ } diff --git a/modules/bcftools/concat/functions.nf b/modules/bcftools/concat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/concat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf index 48280eea..dbd9d9dc 100644 --- a/modules/bcftools/concat/main.nf +++ b/modules/bcftools/concat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_CONCAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" - } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0' : + 'quay.io/biocontainers/bcftools:1.11--h7c999a4_0' }" input: tuple val(meta), path(vcfs) @@ -26,17 +15,18 @@ process BCFTOOLS_CONCAT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools concat \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcfs} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/consensus/functions.nf b/modules/bcftools/consensus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/consensus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 954b0eb8..9b9384a6 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_CONSENSUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' - } else { - container 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(tbi), path(fasta) @@ -26,15 +15,16 @@ process BCFTOOLS_CONSENSUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - cat $fasta | bcftools consensus $vcf $options.args > ${prefix}.fa + cat $fasta | bcftools consensus $vcf $args > ${prefix}.fa header=\$(head -n 1 ${prefix}.fa | sed 's/>//g') sed -i 's/\${header}/${meta.id}/g' ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/filter/functions.nf b/modules/bcftools/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index 5323e0fb..87ad3183 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_FILTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -26,16 +15,17 @@ process BCFTOOLS_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools filter \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/index/functions.nf b/modules/bcftools/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/index/main.nf b/modules/bcftools/index/main.nf index d67614d8..8f40c683 100644 --- a/modules/bcftools/index/main.nf +++ b/modules/bcftools/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_INDEX { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -27,18 +16,19 @@ process BCFTOOLS_INDEX { path "versions.yml" , emit: version script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools \\ index \\ - $options.args \\ + $args \\ --threads $task.cpus \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/isec/functions.nf b/modules/bcftools/isec/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/isec/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index cc3e425e..c4eab09d 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_ISEC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcfs), path(tbis) @@ -26,15 +15,16 @@ process BCFTOOLS_ISEC { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools isec \\ - $options.args \\ + $args \\ -p $prefix \\ *.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/merge/functions.nf b/modules/bcftools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index bb68f184..32ad760c 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcfs), path(tbis) @@ -26,15 +15,16 @@ process BCFTOOLS_MERGE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools merge -Oz \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ *.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/mpileup/functions.nf b/modules/bcftools/mpileup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/mpileup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index df8455a5..9d91193c 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(bam) @@ -29,21 +18,28 @@ process BCFTOOLS_MPILEUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ echo "${meta.id}" > sample_name.list + bcftools mpileup \\ --fasta-ref $fasta \\ - $options.args \\ + $args \\ $bam \\ - | bcftools call --output-type v $options.args2 \\ + | bcftools call --output-type v $args2 \\ | bcftools reheader --samples sample_name.list \\ - | bcftools view --output-file ${prefix}.vcf.gz --output-type z $options.args3 + | bcftools view --output-file ${prefix}.vcf.gz --output-type z $args3 + tabix -p vcf -f ${prefix}.vcf.gz + bcftools stats ${prefix}.vcf.gz > ${prefix}.bcftools_stats.txt + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/norm/functions.nf b/modules/bcftools/norm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/norm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index 7e506e49..79ab36e0 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_NORM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -27,18 +16,19 @@ process BCFTOOLS_NORM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bcftools norm \\ --fasta-ref ${fasta} \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/query/functions.nf b/modules/bcftools/query/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/query/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf index dae8bbc4..1919fa76 100644 --- a/modules/bcftools/query/main.nf +++ b/modules/bcftools/query/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_QUERY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(index) @@ -29,7 +18,8 @@ process BCFTOOLS_QUERY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" @@ -40,12 +30,12 @@ process BCFTOOLS_QUERY { ${regions_file} \\ ${targets_file} \\ ${samples_file} \\ - $options.args \\ + $args \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/reheader/functions.nf b/modules/bcftools/reheader/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/reheader/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf index 953a8adb..3cbe2d8f 100644 --- a/modules/bcftools/reheader/main.nf +++ b/modules/bcftools/reheader/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_REHEADER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -28,7 +17,8 @@ process BCFTOOLS_REHEADER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def update_sequences = fai ? "-f $fai" : "" def new_header = header ? "-h $header" : "" """ @@ -36,14 +26,14 @@ process BCFTOOLS_REHEADER { reheader \\ $update_sequences \\ $new_header \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -o ${prefix}.vcf.gz \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/stats/functions.nf b/modules/bcftools/stats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/stats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index 31bed814..c66f4453 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_STATS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -26,12 +15,13 @@ process BCFTOOLS_STATS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - bcftools stats $options.args $vcf > ${prefix}.bcftools_stats.txt + bcftools stats $args $vcf > ${prefix}.bcftools_stats.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/view/functions.nf b/modules/bcftools/view/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/view/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index ef72f081..b2cbb580 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_VIEW { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(index) @@ -29,25 +18,24 @@ process BCFTOOLS_VIEW { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" - - """ bcftools view \\ --output ${prefix}.vcf.gz \\ ${regions_file} \\ ${targets_file} \\ ${samples_file} \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bedtools/bamtobed/functions.nf b/modules/bedtools/bamtobed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/bamtobed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/bamtobed/main.nf b/modules/bedtools/bamtobed/main.nf index 71c439d3..aebf7339 100644 --- a/modules/bedtools/bamtobed/main.nf +++ b/modules/bedtools/bamtobed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_BAMTOBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process BEDTOOLS_BAMTOBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ bamtobed \\ - $options.args \\ + $args \\ -i $bam \\ | bedtools sort > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/complement/functions.nf b/modules/bedtools/complement/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/complement/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/complement/main.nf b/modules/bedtools/complement/main.nf index 77214c64..df44b5bc 100644 --- a/modules/bedtools/complement/main.nf +++ b/modules/bedtools/complement/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_COMPLEMENT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,18 +16,19 @@ process BEDTOOLS_COMPLEMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ complement \\ -i $bed \\ -g $sizes \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/genomecov/functions.nf b/modules/bedtools/genomecov/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/genomecov/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index 52f37f23..e2a74ed3 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_GENOMECOV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals), val(scale) @@ -28,15 +17,14 @@ process BEDTOOLS_GENOMECOV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args_token = options.args.tokenize() - def args = options.args + def args = task.ext.args ?: '' + def args_list = args.tokenize() args += (scale > 0 && scale != 1) ? " -scale $scale" : "" - - if (!args_token.contains('-bg') && (scale > 0 && scale != 1)) { + if (!args_list.contains('-bg') && (scale > 0 && scale != 1)) { args += " -bg" } + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (intervals.name =~ /\.bam/) { """ bedtools \\ @@ -46,8 +34,8 @@ process BEDTOOLS_GENOMECOV { > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } else { @@ -60,8 +48,8 @@ process BEDTOOLS_GENOMECOV { > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/getfasta/functions.nf b/modules/bedtools/getfasta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/getfasta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/getfasta/main.nf b/modules/bedtools/getfasta/main.nf index b27f6183..c4dae429 100644 --- a/modules/bedtools/getfasta/main.nf +++ b/modules/bedtools/getfasta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_GETFASTA { tag "$bed" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: path bed @@ -27,18 +16,19 @@ process BEDTOOLS_GETFASTA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${bed.baseName}${options.suffix}" : "${bed.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${bed.baseName}${task.ext.suffix}" : "${bed.baseName}" """ bedtools \\ getfasta \\ - $options.args \\ + $args \\ -fi $fasta \\ -bed $bed \\ -fo ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/intersect/functions.nf b/modules/bedtools/intersect/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/intersect/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/intersect/main.nf b/modules/bedtools/intersect/main.nf index 1ab0a8b2..e01c78ac 100644 --- a/modules/bedtools/intersect/main.nf +++ b/modules/bedtools/intersect/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_INTERSECT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals1), path(intervals2) @@ -27,18 +16,19 @@ process BEDTOOLS_INTERSECT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ intersect \\ -a $intervals1 \\ -b $intervals2 \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/makewindows/functions.nf b/modules/bedtools/makewindows/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/makewindows/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf index c9f863d0..cb7d6561 100644 --- a/modules/bedtools/makewindows/main.nf +++ b/modules/bedtools/makewindows/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MAKEWINDOWS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--h7d7f7ad_1" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--h7d7f7ad_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--h7d7f7ad_1' : + 'quay.io/biocontainers/bedtools:2.30.0--h7d7f7ad_1' }" input: tuple val(meta), path(regions) @@ -27,18 +16,19 @@ process BEDTOOLS_MAKEWINDOWS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def arg_input = use_bed ? "-b $regions" : "-g $regions" """ bedtools \\ makewindows \\ ${arg_input} \\ - $options.args \\ + $args \\ > ${prefix}.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/maskfasta/functions.nf b/modules/bedtools/maskfasta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/maskfasta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/maskfasta/main.nf b/modules/bedtools/maskfasta/main.nf index 8ee33d7a..77be060c 100644 --- a/modules/bedtools/maskfasta/main.nf +++ b/modules/bedtools/maskfasta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MASKFASTA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,17 +16,18 @@ process BEDTOOLS_MASKFASTA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ maskfasta \\ - $options.args \\ + $args \\ -fi $fasta \\ -bed $bed \\ -fo ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/merge/functions.nf b/modules/bedtools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/merge/main.nf b/modules/bedtools/merge/main.nf index 92a59f9e..907f1c9b 100644 --- a/modules/bedtools/merge/main.nf +++ b/modules/bedtools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -26,17 +15,18 @@ process BEDTOOLS_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ merge \\ -i $bed \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/slop/functions.nf b/modules/bedtools/slop/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/slop/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/slop/main.nf b/modules/bedtools/slop/main.nf index 4b412b1f..e5d92850 100644 --- a/modules/bedtools/slop/main.nf +++ b/modules/bedtools/slop/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SLOP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,18 +16,19 @@ process BEDTOOLS_SLOP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ slop \\ -i $bed \\ -g $sizes \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ diff --git a/modules/bedtools/sort/functions.nf b/modules/bedtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index 4a51c4b2..15e69036 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SORT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals) @@ -27,17 +16,18 @@ process BEDTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ sort \\ -i $intervals \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/subtract/functions.nf b/modules/bedtools/subtract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/subtract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/subtract/main.nf b/modules/bedtools/subtract/main.nf index 54a12bf4..e645109d 100644 --- a/modules/bedtools/subtract/main.nf +++ b/modules/bedtools/subtract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SUBTRACT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals1), path(intervals2) @@ -26,18 +15,19 @@ process BEDTOOLS_SUBTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedtools \\ subtract \\ -a $intervals1 \\ -b $intervals2 \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bismark/align/functions.nf b/modules/bismark/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index aa4879ba..95e7cdfc 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(reads) @@ -29,18 +18,19 @@ process BISMARK_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def fastq = meta.single_end ? reads : "-1 ${reads[0]} -2 ${reads[1]}" """ bismark \\ $fastq \\ - $options.args \\ + $args \\ --genome $index \\ --bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/deduplicate/functions.nf b/modules/bismark/deduplicate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/deduplicate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index c3ff27d6..c95c54d1 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_DEDUPLICATE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(bam) @@ -27,17 +16,18 @@ process BISMARK_DEDUPLICATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def seqtype = meta.single_end ? '-s' : '-p' """ deduplicate_bismark \\ - $options.args \\ + $args \\ $seqtype \\ --bam $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/genomepreparation/functions.nf b/modules/bismark/genomepreparation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/genomepreparation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/genomepreparation/main.nf b/modules/bismark/genomepreparation/main.nf index 0a86173d..e096b2b8 100644 --- a/modules/bismark/genomepreparation/main.nf +++ b/modules/bismark/genomepreparation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_GENOMEPREPARATION { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: path fasta, stageAs: "BismarkIndex/*" @@ -26,14 +15,15 @@ process BISMARK_GENOMEPREPARATION { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bismark_genome_preparation \\ - $options.args \\ + $args \\ BismarkIndex cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/methylationextractor/functions.nf b/modules/bismark/methylationextractor/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/methylationextractor/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/methylationextractor/main.nf b/modules/bismark/methylationextractor/main.nf index 5e89e6f8..d99c2b5e 100644 --- a/modules/bismark/methylationextractor/main.nf +++ b/modules/bismark/methylationextractor/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_METHYLATIONEXTRACTOR { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(bam) @@ -31,6 +20,7 @@ process BISMARK_METHYLATIONEXTRACTOR { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def seqtype = meta.single_end ? '-s' : '-p' """ bismark_methylation_extractor \\ @@ -39,12 +29,12 @@ process BISMARK_METHYLATIONEXTRACTOR { --gzip \\ --report \\ $seqtype \\ - $options.args \\ + $args \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/report/functions.nf b/modules/bismark/report/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/report/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/report/main.nf b/modules/bismark/report/main.nf index 70c6ba3b..f828ecd8 100644 --- a/modules/bismark/report/main.nf +++ b/modules/bismark/report/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_REPORT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(align_report), path(dedup_report), path(splitting_report), path(mbias) @@ -26,12 +15,13 @@ process BISMARK_REPORT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - bismark2report $options.args + bismark2report $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/summary/functions.nf b/modules/bismark/summary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/summary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/summary/main.nf b/modules/bismark/summary/main.nf index 3d5f294e..72dba72e 100644 --- a/modules/bismark/summary/main.nf +++ b/modules/bismark/summary/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_SUMMARY { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: path(bam) @@ -29,12 +18,13 @@ process BISMARK_SUMMARY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bismark2summary cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/blast/blastn/functions.nf b/modules/blast/blastn/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/blast/blastn/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index 0d65f1d0..d1bdcf77 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BLAST_BLASTN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' - } else { - container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' : + 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' }" input: tuple val(meta), path(fasta) @@ -27,18 +16,19 @@ process BLAST_BLASTN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.ndb" | sed 's/.ndb//'` blastn \\ -num_threads $task.cpus \\ -db \$DB \\ -query $fasta \\ - $options.args \\ + $args \\ -out ${prefix}.blastn.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + "${task.process}": + blast: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/blast/makeblastdb/functions.nf b/modules/blast/makeblastdb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/blast/makeblastdb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/blast/makeblastdb/main.nf b/modules/blast/makeblastdb/main.nf index 0538e0db..b4c426a4 100644 --- a/modules/blast/makeblastdb/main.nf +++ b/modules/blast/makeblastdb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BLAST_MAKEBLASTDB { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' - } else { - container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' : + 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' }" input: path fasta @@ -26,15 +15,16 @@ process BLAST_MAKEBLASTDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ makeblastdb \\ -in $fasta \\ - $options.args + $args mkdir blast_db mv ${fasta}* blast_db cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + "${task.process}": + blast: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie/align/functions.nf b/modules/bowtie/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 764b5be2..12188269 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bowtie=1.3.0 bioconda::samtools=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' - } else { - container 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' : + 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,9 @@ process BOWTIE_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def unaligned = params.save_unaligned ? "--un ${prefix}.unmapped.fastq" : '' def endedness = meta.single_end ? "$reads" : "-1 ${reads[0]} -2 ${reads[1]}" """ @@ -40,10 +31,10 @@ process BOWTIE_ALIGN { -x \$INDEX \\ -q \\ $unaligned \\ - $options.args \\ + $args \\ $endedness \\ 2> ${prefix}.out \\ - | samtools view $options.args2 -@ $task.cpus -bS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq ]; then gzip ${prefix}.unmapped.fastq @@ -54,8 +45,8 @@ process BOWTIE_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + "${task.process}": + bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bowtie/build/functions.nf b/modules/bowtie/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie/build/main.nf b/modules/bowtie/build/main.nf index 1b83541b..dbbc8efa 100644 --- a/modules/bowtie/build/main.nf +++ b/modules/bowtie/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE_BUILD { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::bowtie=1.3.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bowtie:1.3.0--py38hed8969a_1' - } else { - container 'quay.io/biocontainers/bowtie:1.3.0--py38hed8969a_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bowtie:1.3.0--py38hed8969a_1' : + 'quay.io/biocontainers/bowtie:1.3.0--py38hed8969a_1' }" input: path fasta @@ -26,12 +15,13 @@ process BOWTIE_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bowtie bowtie-build --threads $task.cpus $fasta bowtie/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + "${task.process}": + bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie2/align/functions.nf b/modules/bowtie2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 6f923951..11c9c20a 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE2_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bowtie2=2.4.2 bioconda::samtools=1.11 conda-forge::pigz=2.3.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0" - } else { - container "quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0' : + 'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,9 @@ process BOWTIE2_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { def unaligned = params.save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : '' """ @@ -39,13 +30,13 @@ process BOWTIE2_ALIGN { -U $reads \\ --threads $task.cpus \\ $unaligned \\ - $options.args \\ + $args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS @@ -60,9 +51,9 @@ process BOWTIE2_ALIGN { -2 ${reads[1]} \\ --threads $task.cpus \\ $unaligned \\ - $options.args \\ + $args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq.1.gz ]; then mv ${prefix}.unmapped.fastq.1.gz ${prefix}.unmapped_1.fastq.gz @@ -72,8 +63,8 @@ process BOWTIE2_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS diff --git a/modules/bowtie2/build/functions.nf b/modules/bowtie2/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie2/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie2/build/main.nf b/modules/bowtie2/build/main.nf index bc95eea8..c0cbcd79 100644 --- a/modules/bowtie2/build/main.nf +++ b/modules/bowtie2/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE2_BUILD { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' - } else { - container 'quay.io/biocontainers/bowtie2:2.4.4--py36hd4290be_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' : + 'quay.io/biocontainers/bowtie2:2.4.4--py36hd4290be_0' }" input: path fasta @@ -26,12 +15,13 @@ process BOWTIE2_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bowtie2 - bowtie2-build $options.args --threads $task.cpus $fasta bowtie2/${fasta.baseName} + bowtie2-build $args --threads $task.cpus $fasta bowtie2/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bwa/aln/functions.nf b/modules/bwa/aln/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/aln/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index 07135aea..f6cdaefa 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_ALN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--h5bf99c6_8" - } else { - container "quay.io/biocontainers/bwa:0.7.17--h5bf99c6_8" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa:0.7.17--h5bf99c6_8' : + 'quay.io/biocontainers/bwa:0.7.17--h5bf99c6_8' }" input: tuple val(meta), path(reads) @@ -27,22 +16,23 @@ process BWA_ALN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.sai \\ \$INDEX \\ ${reads} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } else { @@ -50,22 +40,22 @@ process BWA_ALN { INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.1.sai \\ \$INDEX \\ ${reads[0]} bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.2.sai \\ \$INDEX \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/index/functions.nf b/modules/bwa/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index db1911cb..89102737 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--hed695b0_7" - } else { - container "quay.io/biocontainers/bwa:0.7.17--hed695b0_7" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa:0.7.17--hed695b0_7' : + 'quay.io/biocontainers/bwa:0.7.17--hed695b0_7' }" input: path fasta @@ -26,17 +15,18 @@ process BWA_INDEX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ mkdir bwa bwa \\ index \\ - $options.args \\ + $args \\ -p bwa/${fasta.baseName} \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/mem/functions.nf b/modules/bwa/mem/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/mem/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index b6a548d7..9a04ed63 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_MEM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads) @@ -27,22 +16,24 @@ process BWA_MEM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa mem \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwa/sampe/functions.nf b/modules/bwa/sampe/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/sampe/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index 38127793..2abd9335 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_SAMPE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads), path(sai) @@ -27,22 +16,23 @@ process BWA_SAMPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa sampe \\ - $options.args \\ + $args \\ $read_group \\ \$INDEX \\ $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwa/samse/functions.nf b/modules/bwa/samse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/samse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 68fa95c7..56e9127f 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_SAMSE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads), path(sai) @@ -27,22 +16,23 @@ process BWA_SAMSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa samse \\ - $options.args \\ + $args \\ $read_group \\ \$INDEX \\ $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwamem2/index/functions.nf b/modules/bwamem2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwamem2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwamem2/index/main.nf b/modules/bwamem2/index/main.nf index 5732017f..e00538c9 100644 --- a/modules/bwamem2/index/main.nf +++ b/modules/bwamem2/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMEM2_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa-mem2:2.2.1--he513fc3_0" - } else { - container "quay.io/biocontainers/bwa-mem2:2.2.1--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa-mem2:2.2.1--he513fc3_0' : + 'quay.io/biocontainers/bwa-mem2:2.2.1--he513fc3_0' }" input: path fasta @@ -26,16 +15,17 @@ process BWAMEM2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bwamem2 bwa-mem2 \\ index \\ - $options.args \\ + $args \\ $fasta -p bwamem2/${fasta} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') END_VERSIONS """ } diff --git a/modules/bwamem2/mem/functions.nf b/modules/bwamem2/mem/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwamem2/mem/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index f88d840f..7c238741 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMEM2_MEM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0" - } else { - container "quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' : + 'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' }" input: tuple val(meta), path(reads) @@ -27,23 +16,25 @@ process BWAMEM2_MEM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa-mem2 \\ mem \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwameth/align/functions.nf b/modules/bwameth/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwameth/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index e15aba6d..06e9da44 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMETH_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwameth=0.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1" - } else { - container "quay.io/biocontainers/bwameth:0.2.2--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1' : + 'quay.io/biocontainers/bwameth:0.2.2--py_1' }" input: tuple val(meta), path(reads) @@ -27,7 +16,9 @@ process BWAMETH_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ${index} -name "*.bwameth.c2t" | sed 's/.bwameth.c2t//'` @@ -37,16 +28,16 @@ process BWAMETH_ALIGN { touch -c -- * bwameth.py \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ --reference \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") + "${task.process}": + bwameth: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/bwameth/index/functions.nf b/modules/bwameth/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwameth/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwameth/index/main.nf b/modules/bwameth/index/main.nf index 68fb33d4..f5b8ff59 100644 --- a/modules/bwameth/index/main.nf +++ b/modules/bwameth/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMETH_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwameth=0.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1" - } else { - container "quay.io/biocontainers/bwameth:0.2.2--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1' : + 'quay.io/biocontainers/bwameth:0.2.2--py_1' }" input: path fasta, stageAs: "bwameth/*" @@ -26,12 +15,13 @@ process BWAMETH_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bwameth.py index $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") + "${task.process}": + bwameth: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/cat/cat/functions.nf b/modules/cat/cat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cat/cat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index dac301cb..0c087270 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CAT_CAT { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::pigz=2.3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pigz:2.3.4" - } else { - container "quay.io/biocontainers/pigz:2.3.4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pigz:2.3.4' : + 'quay.io/biocontainers/pigz:2.3.4' }" input: path files_in @@ -26,6 +15,8 @@ process CAT_CAT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' def file_list = files_in.collect { it.toString() } if (file_list.size > 1) { @@ -39,16 +30,16 @@ process CAT_CAT { def in_zip = file_list[0].endsWith('.gz') def out_zip = file_out.endsWith('.gz') def command1 = (in_zip && !out_zip) ? 'zcat' : 'cat' - def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $options.args2" : '' + def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $args2" : '' """ $command1 \\ - $options.args \\ + $args \\ ${file_list.join(' ')} \\ $command2 \\ > $file_out cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/cat/fastq/functions.nf b/modules/cat/fastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cat/fastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index 538915a7..b6be93b0 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CAT_FASTQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'merged_fastq', meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: tuple val(meta), path(reads) @@ -26,7 +15,8 @@ process CAT_FASTQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def readList = reads.collect{ it.toString() } if (meta.single_end) { if (readList.size > 1) { @@ -34,8 +24,8 @@ process CAT_FASTQ { cat ${readList.sort().join(' ')} > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + "${task.process}": + cat: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') END_VERSIONS """ } @@ -49,8 +39,8 @@ process CAT_FASTQ { cat ${read2.sort().join(' ')} > ${prefix}_2.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + "${task.process}": + cat: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/cellranger/mkref/functions.nf b/modules/cellranger/mkref/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cellranger/mkref/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cellranger/mkref/main.nf b/modules/cellranger/mkref/main.nf index 22ad66ba..c5d83ac9 100644 --- a/modules/cellranger/mkref/main.nf +++ b/modules/cellranger/mkref/main.nf @@ -1,15 +1,6 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CELLRANGER_MKREF { tag 'mkref' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } if (params.enable_conda) { exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." @@ -19,22 +10,24 @@ process CELLRANGER_MKREF { input: path fasta path gtf - val(reference_name) + val reference_name output: - path "versions.yml" , emit: versions path "${reference_name}", emit: reference + path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - cellranger mkref \\ - --genome=${reference_name} \\ - --fasta=${fasta} \\ - --genes=${gtf} + cellranger \\ + mkref \\ + --genome=$reference_name \\ + --fasta=$fasta \\ + --genes=$gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) END_VERSIONS """ } diff --git a/modules/checkm/lineagewf/functions.nf b/modules/checkm/lineagewf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/checkm/lineagewf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/checkm/lineagewf/main.nf b/modules/checkm/lineagewf/main.nf index e655e5f5..119ee491 100644 --- a/modules/checkm/lineagewf/main.nf +++ b/modules/checkm/lineagewf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CHECKM_LINEAGEWF { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::checkm-genome=1.1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/checkm-genome:1.1.3--py_1" - } else { - container "quay.io/biocontainers/checkm-genome:1.1.3--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/checkm-genome:1.1.3--py_1' : + 'quay.io/biocontainers/checkm-genome:1.1.3--py_1' }" input: tuple val(meta), path(fasta) @@ -28,7 +17,8 @@ process CHECKM_LINEAGEWF { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ checkm \\ lineage_wf \\ @@ -37,13 +27,13 @@ process CHECKM_LINEAGEWF { --tab_table \\ --pplacer_threads $task.cpus \\ -x $fasta_ext \\ - $options.args \\ + $args \\ . \\ $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( checkm 2>&1 | grep '...:::' | sed 's/.*CheckM v//;s/ .*//' ) + "${task.process}": + checkm: \$( checkm 2>&1 | grep '...:::' | sed 's/.*CheckM v//;s/ .*//' ) END_VERSIONS """ } diff --git a/modules/chromap/chromap/functions.nf b/modules/chromap/chromap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/chromap/chromap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index 9826eed1..f6686cf2 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1' // No version information printed +def VERSION = '0.1' // Version information not provided by tool on CLI process CHROMAP_CHROMAP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } else { - container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' }" input: tuple val(meta), path(reads) @@ -37,34 +26,37 @@ process CHROMAP_CHROMAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args_list = args.tokenize() - def file_extension = options.args.contains("--SAM") ? 'sam' : options.args.contains("--TagAlign")? 'tagAlign' : options.args.contains("--pairs")? 'pairs' : 'bed' + def file_extension = args.contains("--SAM") ? 'sam' : args.contains("--TagAlign")? 'tagAlign' : args.contains("--pairs")? 'pairs' : 'bed' if (barcodes) { - args << "-b ${barcodes.join(',')}" + args_list << "-b ${barcodes.join(',')}" if (whitelist) { - args << "--barcode-whitelist $whitelist" + args_list << "--barcode-whitelist $whitelist" } } if (chr_order) { - args << "--chr-order $chr_order" + args_list << "--chr-order $chr_order" } if (pairs_chr_order){ - args << "--pairs-natural-chr-order $pairs_chr_order" + args_list << "--pairs-natural-chr-order $pairs_chr_order" } - def final_args = args.join(' ') + def final_args = args_list.join(' ') def compression_cmds = "gzip ${prefix}.${file_extension}" - if (options.args.contains("--SAM")) { + if (args.contains("--SAM")) { compression_cmds = """ - samtools view $options.args2 -@ ${task.cpus} -bh \\ + samtools view $args2 -@ $task.cpus -bh \\ -o ${prefix}.bam ${prefix}.${file_extension} rm ${prefix}.${file_extension} """ } if (meta.single_end) { """ - chromap ${final_args} \\ + chromap \\ + $final_args \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ @@ -74,13 +66,14 @@ process CHROMAP_CHROMAP { $compression_cmds cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION END_VERSIONS """ } else { """ - chromap ${final_args} \\ + chromap \\ + $final_args \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ @@ -91,8 +84,8 @@ process CHROMAP_CHROMAP { $compression_cmds cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION END_VERSIONS """ } diff --git a/modules/chromap/index/functions.nf b/modules/chromap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/chromap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index efe85733..cafeca2f 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = 0.1 // No version information printed +def VERSION = '0.1' // Version information not provided by tool on CLI process CHROMAP_INDEX { tag '$fasta' label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } else { - container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' }" input: path fasta @@ -28,18 +17,19 @@ process CHROMAP_INDEX { path "versions.yml", emit: versions script: - def prefix = fasta.baseName + def args = task.ext.args ?: '' + def prefix = fasta.baseName """ chromap \\ -i \\ - $options.args \\ + $args \\ -t $task.cpus \\ -r $fasta \\ -o ${prefix}.index cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/clonalframeml/functions.nf b/modules/clonalframeml/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/clonalframeml/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/clonalframeml/main.nf b/modules/clonalframeml/main.nf index f99f944b..60eaad12 100644 --- a/modules/clonalframeml/main.nf +++ b/modules/clonalframeml/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CLONALFRAMEML { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::clonalframeml=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/clonalframeml:1.12--h7d875b9_1" - } else { - container "quay.io/biocontainers/clonalframeml:1.12--h7d875b9_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/clonalframeml:1.12--h7d875b9_1' : + 'quay.io/biocontainers/clonalframeml:1.12--h7d875b9_1' }" input: tuple val(meta), path(newick), path(msa) @@ -31,17 +20,18 @@ process CLONALFRAMEML { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ClonalFrameML \\ $newick \\ <(gzip -cdf $msa) \\ $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(ClonalFrameML -version 2>&1) | sed 's/^.*ClonalFrameML v//' ) + "${task.process}": + clonalframeml: \$( echo \$(ClonalFrameML -version 2>&1) | sed 's/^.*ClonalFrameML v//' ) END_VERSIONS """ } diff --git a/modules/cmseq/polymut/functions.nf b/modules/cmseq/polymut/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cmseq/polymut/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cmseq/polymut/main.nf b/modules/cmseq/polymut/main.nf index 4c061e26..18bb8c59 100644 --- a/modules/cmseq/polymut/main.nf +++ b/modules/cmseq/polymut/main.nf @@ -1,23 +1,13 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.0.4' +def VERSION = '1.0.4' // Version information not provided by tool on CLI process CMSEQ_POLYMUT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cmseq=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cmseq:1.0.4--pyhb7b1952_0" - } else { - container "quay.io/biocontainers/cmseq:1.0.4--pyhb7b1952_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cmseq:1.0.4--pyhb7b1952_0' : + 'quay.io/biocontainers/cmseq:1.0.4--pyhb7b1952_0' }" input: tuple val(meta), path(bam), path(bai), path(gff), path(fasta) @@ -27,20 +17,21 @@ process CMSEQ_POLYMUT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def fasta_refid = fasta ? "-c $fasta" : "" def sortindex = bai ? "" : "--sortindex" """ polymut.py \\ - $options.args \\ + $args \\ $sortindex \\ $fasta_refid \\ --gff_file $gff \\ $bam > ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) + "${task.process}": + cmseq: $VERSION END_VERSIONS """ } diff --git a/modules/cnvkit/batch/functions.nf b/modules/cnvkit/batch/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/cnvkit/batch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cnvkit/batch/main.nf b/modules/cnvkit/batch/main.nf index 06ecaa40..811cb409 100644 --- a/modules/cnvkit/batch/main.nf +++ b/modules/cnvkit/batch/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CNVKIT_BATCH { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cnvkit=0.9.9' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cnvkit:0.9.9--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/cnvkit:0.9.9--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cnvkit:0.9.9--pyhdfd78af_0' : + 'quay.io/biocontainers/cnvkit:0.9.9--pyhdfd78af_0' }" input: tuple val(meta), path(tumor), path(normal) @@ -32,18 +21,18 @@ process CNVKIT_BATCH { path "versions.yml" , emit: versions script: - normal_args = normal ? "--normal $normal" : "" - fasta_args = fasta ? "--fasta $fasta" : "" - reference_args = reference ? "--reference $reference" : "" + def args = task.ext.args ?: '' + def normal_args = normal ? "--normal $normal" : "" + def fasta_args = fasta ? "--fasta $fasta" : "" + def reference_args = reference ? "--reference $reference" : "" def target_args = "" - if (options.args.contains("--method wgs") || options.args.contains("-m wgs")) { + if (args.contains("--method wgs") || args.contains("-m wgs")) { target_args = targets ? "--targets $targets" : "" } else { target_args = "--targets $targets" } - """ cnvkit.py \\ batch \\ @@ -52,12 +41,12 @@ process CNVKIT_BATCH { $fasta_args \\ $reference_args \\ $target_args \\ - --processes ${task.cpus} \\ - $options.args + --processes $task.cpus \\ + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cnvkit.py version | sed -e "s/cnvkit v//g") + "${task.process}": + cnvkit: \$(cnvkit.py version | sed -e "s/cnvkit v//g") END_VERSIONS """ } diff --git a/modules/cooler/cload/functions.nf b/modules/cooler/cload/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/cload/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/cload/main.nf b/modules/cooler/cload/main.nf index ec0cad56..ed7a41a1 100644 --- a/modules/cooler/cload/main.nf +++ b/modules/cooler/cload/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_CLOAD { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(pairs), path(index) @@ -28,20 +17,21 @@ process COOLER_CLOAD { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def nproc = options.args.contains('pairix') || options.args.contains('tabix')? "--nproc ${task.cpus}" : '' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def nproc = args.contains('pairix') || args.contains('tabix')? "--nproc $task.cpus" : '' """ cooler cload \\ - $options.args \\ + $args \\ $nproc \\ ${chromsizes}:${cool_bin} \\ $pairs \\ ${prefix}.${cool_bin}.cool cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/digest/functions.nf b/modules/cooler/digest/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/digest/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/digest/main.nf b/modules/cooler/digest/main.nf index 5728b649..9658ec31 100644 --- a/modules/cooler/digest/main.nf +++ b/modules/cooler/digest/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_DIGEST { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: path fasta @@ -28,17 +17,18 @@ process COOLER_DIGEST { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ cooler digest \\ - $options.args \\ + $args \\ -o "${fasta.baseName}_${enzyme.replaceAll(/[^0-9a-zA-Z]+/, '_')}.bed" \\ $chromsizes \\ $fasta \\ $enzyme cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/dump/functions.nf b/modules/cooler/dump/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/dump/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 1ca11c7d..0836640e 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_DUMP { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(cool) @@ -27,17 +16,18 @@ process COOLER_DUMP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def suffix = resolution ? "::$resolution" : "" """ cooler dump \\ - $options.args \\ + $args \\ -o ${prefix}.bedpe \\ $cool$suffix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/dump/meta.yml b/modules/cooler/dump/meta.yml index 659b06a1..a9d1afd5 100644 --- a/modules/cooler/dump/meta.yml +++ b/modules/cooler/dump/meta.yml @@ -21,6 +21,9 @@ input: type: file description: Path to COOL file pattern: "*.{cool,mcool}" + - resolution: + type: value + description: Resolution output: - meta: diff --git a/modules/cooler/merge/functions.nf b/modules/cooler/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/merge/main.nf b/modules/cooler/merge/main.nf index b15439a4..0fed76c9 100644 --- a/modules/cooler/merge/main.nf +++ b/modules/cooler/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_MERGE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(cool) @@ -26,16 +15,17 @@ process COOLER_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ cooler merge \\ - $options.args \\ + $args \\ ${prefix}.cool \\ ${cool} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/zoomify/functions.nf b/modules/cooler/zoomify/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/zoomify/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/zoomify/main.nf b/modules/cooler/zoomify/main.nf index 3f1ed4e7..e61ca99d 100644 --- a/modules/cooler/zoomify/main.nf +++ b/modules/cooler/zoomify/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_ZOOMIFY { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(cool) @@ -26,17 +15,18 @@ process COOLER_ZOOMIFY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ cooler zoomify \\ - $options.args \\ + $args \\ -n $task.cpus \\ -o ${prefix}.mcool \\ $cool cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/csvtk/concat/functions.nf b/modules/csvtk/concat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/csvtk/concat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/csvtk/concat/main.nf b/modules/csvtk/concat/main.nf index 194b1e14..745a9ac4 100644 --- a/modules/csvtk/concat/main.nf +++ b/modules/csvtk/concat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CSVTK_CONCAT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0' : + 'quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0' }" input: tuple val(meta), path(csv) @@ -28,14 +17,15 @@ process CSVTK_CONCAT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def delimiter = in_format == "tsv" ? "\t" : (in_format == "csv" ? "," : in_format) def out_delimiter = out_format == "tsv" ? "\t" : (out_format == "csv" ? "," : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' """ csvtk \\ concat \\ - $options.args \\ + $args \\ --num-cpus $task.cpus \\ --delimiter "${delimiter}" \\ --out-delimiter "${out_delimiter}" \\ @@ -43,7 +33,7 @@ process CSVTK_CONCAT { $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": csvtk: \$(echo \$( csvtk version | sed -e "s/csvtk v//g" )) END_VERSIONS """ diff --git a/modules/csvtk/split/functions.nf b/modules/csvtk/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/csvtk/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/csvtk/split/main.nf b/modules/csvtk/split/main.nf index 727e046a..89b44154 100644 --- a/modules/csvtk/split/main.nf +++ b/modules/csvtk/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CSVTK_SPLIT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0' : + 'quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0' }" input: tuple val(meta), path(csv) @@ -28,7 +17,8 @@ process CSVTK_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def delimiter = in_format == "tsv" ? "--tabs" : (in_format == "csv" ? "--delimiter ',' " : in_format) def out_delimiter = out_format == "tsv" ? "--out-tabs" : (out_format == "csv" ? "--out-delimiter ',' " : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' @@ -36,15 +26,15 @@ process CSVTK_SPLIT { sed -i.bak '/^##/d' $csv csvtk \\ split \\ - $options.args \\ + $args \\ --num-cpus $task.cpus \\ $delimiter \\ $out_delimiter \\ $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$( csvtk version | sed -e 's/csvtk v//g' )) + "${task.process}": + csvtk: \$(echo \$( csvtk version | sed -e 's/csvtk v//g' )) END_VERSIONS """ } diff --git a/modules/custom/dumpsoftwareversions/functions.nf b/modules/custom/dumpsoftwareversions/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/custom/dumpsoftwareversions/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf index faf2073f..934bb467 100644 --- a/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" input: path versions @@ -27,80 +16,6 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { path "versions.yml" , emit: versions script: - """ - #!/usr/bin/env python - - import yaml - import platform - from textwrap import dedent - - def _make_versions_html(versions): - html = [ - dedent( - '''\\ - - - - - - - - - - ''' - ) - ] - for process, tmp_versions in sorted(versions.items()): - html.append("") - for i, (tool, version) in enumerate(sorted(tmp_versions.items())): - html.append( - dedent( - f'''\\ - - - - - - ''' - ) - ) - html.append("") - html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") - return "\\n".join(html) - - module_versions = {} - module_versions["${getProcessName(task.process)}"] = { - 'python': platform.python_version(), - 'yaml': yaml.__version__ - } - - with open("$versions") as f: - workflow_versions = yaml.load(f, Loader=yaml.BaseLoader) | module_versions - - workflow_versions["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version" - } - - versions_mqc = { - 'id': 'software_versions', - 'section_name': '${workflow.manifest.name} Software Versions', - 'section_href': 'https://github.com/${workflow.manifest.name}', - 'plot_type': 'html', - 'description': 'are collected at run time from the software output.', - 'data': _make_versions_html(workflow_versions) - } - - with open("software_versions.yml", 'w') as f: - yaml.dump(workflow_versions, f, default_flow_style=False) - with open("software_versions_mqc.yml", 'w') as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - - with open('versions.yml', 'w') as f: - yaml.dump(module_versions, f, default_flow_style=False) - """ + def args = task.ext.args ?: '' + template 'dumpsoftwareversions.py' } diff --git a/modules/custom/dumpsoftwareversions/meta.yml b/modules/custom/dumpsoftwareversions/meta.yml index c8310e35..5b5b8a60 100644 --- a/modules/custom/dumpsoftwareversions/meta.yml +++ b/modules/custom/dumpsoftwareversions/meta.yml @@ -31,3 +31,4 @@ output: authors: - "@drpatelh" + - "@grst" diff --git a/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py new file mode 100644 index 00000000..d1390392 --- /dev/null +++ b/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +import yaml +import platform +from textwrap import dedent + + +def _make_versions_html(versions): + html = [ + dedent( + """\\ + + + + + + + + + + """ + ) + ] + for process, tmp_versions in sorted(versions.items()): + html.append("") + for i, (tool, version) in enumerate(sorted(tmp_versions.items())): + html.append( + dedent( + f"""\\ + + + + + + """ + ) + ) + html.append("") + html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") + return "\\n".join(html) + + +versions_this_module = {} +versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, +} + +with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + +# aggregate versions by the module name (derived from fully-qualified process name) +versions_by_module = {} +for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + assert versions_by_module[module] == process_versions, ( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + +versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", +} + +versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), +} + +with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) +with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + +with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) diff --git a/modules/custom/getchromsizes/functions.nf b/modules/custom/getchromsizes/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/custom/getchromsizes/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/custom/getchromsizes/main.nf b/modules/custom/getchromsizes/main.nf index fb46986b..270b3f48 100644 --- a/modules/custom/getchromsizes/main.nf +++ b/modules/custom/getchromsizes/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUSTOM_GETCHROMSIZES { tag "$fasta" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: path fasta @@ -27,13 +16,14 @@ process CUSTOM_GETCHROMSIZES { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ samtools faidx $fasta cut -f 1,2 ${fasta}.fai > ${fasta}.sizes cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + custom: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/cutadapt/functions.nf b/modules/cutadapt/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cutadapt/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index 32faf2cf..f98113e8 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUTADAPT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cutadapt=3.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/cutadapt:3.4--py39h38f01e4_1' - } else { - container 'quay.io/biocontainers/cutadapt:3.4--py37h73a75cf_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cutadapt:3.4--py39h38f01e4_1' : + 'quay.io/biocontainers/cutadapt:3.4--py37h73a75cf_1' }" input: tuple val(meta), path(reads) @@ -27,18 +16,19 @@ process CUTADAPT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def trimmed = meta.single_end ? "-o ${prefix}.trim.fastq.gz" : "-o ${prefix}_1.trim.fastq.gz -p ${prefix}_2.trim.fastq.gz" """ cutadapt \\ --cores $task.cpus \\ - $options.args \\ + $args \\ $trimmed \\ $reads \\ > ${prefix}.cutadapt.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cutadapt --version) + "${task.process}": + cutadapt: \$(cutadapt --version) END_VERSIONS """ } diff --git a/modules/damageprofiler/functions.nf b/modules/damageprofiler/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/damageprofiler/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index 3800a305..da37909e 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DAMAGEPROFILER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::damageprofiler=1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/damageprofiler:1.1--hdfd78af_2" - } else { - container "quay.io/biocontainers/damageprofiler:1.1--hdfd78af_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/damageprofiler:1.1--hdfd78af_2' : + 'quay.io/biocontainers/damageprofiler:1.1--hdfd78af_2' }" input: tuple val(meta), path(bam) @@ -29,23 +18,21 @@ process DAMAGEPROFILER { path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "-r $fasta" : "" def species_list = specieslist ? "-sf $specieslist" : "" - """ damageprofiler \\ - -i $bam \\ - -o $prefix/ \\ - $options.args \\ - $reference \\ - $species_list + -i $bam \\ + -o $prefix/ \\ + $args \\ + $reference \\ + $species_list cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(damageprofiler -v | sed 's/^DamageProfiler v//') + "${task.process}": + damageprofiler: \$(damageprofiler -v | sed 's/^DamageProfiler v//') END_VERSIONS """ - } diff --git a/modules/dastool/dastool/functions.nf b/modules/dastool/dastool/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dastool/dastool/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf index dff32294..b67ee993 100644 --- a/modules/dastool/dastool/main.nf +++ b/modules/dastool/dastool/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DASTOOL_DASTOOL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" - } else { - container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : + 'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" input: tuple val(meta), path(contigs), path(bins) @@ -37,7 +27,8 @@ process DASTOOL_DASTOOL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def bin_list = bins instanceof List ? bins.join(",") : "$bins" def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" def db_dir = db_directory ? "--db_directory $db_directory" : "" @@ -56,7 +47,7 @@ process DASTOOL_DASTOOL { $decompress_contigs DAS_Tool \\ - $options.args \\ + $args \\ $proteins_pred \\ $db_dir \\ $engine \\ @@ -66,8 +57,8 @@ process DASTOOL_DASTOOL { -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + "${task.process}": + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) END_VERSIONS """ } diff --git a/modules/dastool/scaffolds2bin/functions.nf b/modules/dastool/scaffolds2bin/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dastool/scaffolds2bin/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf index b51a6e6e..78a06b6e 100644 --- a/modules/dastool/scaffolds2bin/main.nf +++ b/modules/dastool/scaffolds2bin/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DASTOOL_SCAFFOLDS2BIN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0" - } else { - container "quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : + 'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -26,21 +16,22 @@ process DASTOOL_SCAFFOLDS2BIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def file_extension = extension ? extension : "fasta" """ gunzip -f *.${file_extension}.gz Fasta_to_Scaffolds2Bin.sh \\ - $options.args \\ + $args \\ -i . \\ -e $file_extension \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + "${task.process}": + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) END_VERSIONS """ } diff --git a/modules/dedup/functions.nf b/modules/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dedup/main.nf b/modules/dedup/main.nf index 62d720f6..60fc376e 100644 --- a/modules/dedup/main.nf +++ b/modules/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEDUP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dedup=0.12.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1" - } else { - container "quay.io/biocontainers/dedup:0.12.8--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1' : + 'quay.io/biocontainers/dedup:0.12.8--hdfd78af_1' }" input: tuple val(meta), path(bam) @@ -29,18 +18,19 @@ process DEDUP { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dedup \\ -Xmx${task.memory.toGiga()}g \\ -i $bam \\ -o . \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//') + "${task.process}": + dedup: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//') END_VERSIONS """ diff --git a/modules/deeptools/computematrix/functions.nf b/modules/deeptools/computematrix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/computematrix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index 9fffdb8e..e39310f4 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_COMPUTEMATRIX { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(bigwig) @@ -28,10 +17,11 @@ process DEEPTOOLS_COMPUTEMATRIX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ computeMatrix \\ - $options.args \\ + $args \\ --regionsFileName $bed \\ --scoreFileName $bigwig \\ --outFileName ${prefix}.computeMatrix.mat.gz \\ @@ -39,8 +29,8 @@ process DEEPTOOLS_COMPUTEMATRIX { --numberOfProcessors $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(computeMatrix --version | sed -e "s/computeMatrix //g") + "${task.process}": + deeptools: \$(computeMatrix --version | sed -e "s/computeMatrix //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotfingerprint/functions.nf b/modules/deeptools/plotfingerprint/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotfingerprint/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index b2d167f9..aeb635ce 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTFINGERPRINT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(bams), path(bais) @@ -28,11 +17,12 @@ process DEEPTOOLS_PLOTFINGERPRINT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def extend = (meta.single_end && params.fragment_size > 0) ? "--extendReads ${params.fragment_size}" : '' """ plotFingerprint \\ - $options.args \\ + $args \\ $extend \\ --bamfiles ${bams.join(' ')} \\ --plotFile ${prefix}.plotFingerprint.pdf \\ @@ -41,8 +31,8 @@ process DEEPTOOLS_PLOTFINGERPRINT { --numberOfProcessors $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") + "${task.process}": + deeptools: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotheatmap/functions.nf b/modules/deeptools/plotheatmap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotheatmap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index 19c243df..f981744e 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTHEATMAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(matrix) @@ -27,17 +16,18 @@ process DEEPTOOLS_PLOTHEATMAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plotHeatmap \\ - $options.args \\ + $args \\ --matrixFile $matrix \\ --outFileName ${prefix}.plotHeatmap.pdf \\ --outFileNameMatrix ${prefix}.plotHeatmap.mat.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") + "${task.process}": + deeptools: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotprofile/functions.nf b/modules/deeptools/plotprofile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotprofile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index 3a196bd5..b32e04d3 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTPROFILE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(matrix) @@ -27,17 +16,18 @@ process DEEPTOOLS_PLOTPROFILE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plotProfile \\ - $options.args \\ + $args \\ --matrixFile $matrix \\ --outFileName ${prefix}.plotProfile.pdf \\ --outFileNameData ${prefix}.plotProfile.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotProfile --version | sed -e "s/plotProfile //g") + "${task.process}": + deeptools: \$(plotProfile --version | sed -e "s/plotProfile //g") END_VERSIONS """ } diff --git a/modules/delly/call/functions.nf b/modules/delly/call/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/delly/call/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index 59979dc9..d4aa1adb 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DELLY_CALL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::delly=0.8.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/delly:0.8.7--he03298f_1" - } else { - container "quay.io/biocontainers/delly:0.8.7--he03298f_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/delly:0.8.7--he03298f_1' : + 'quay.io/biocontainers/delly:0.8.7--he03298f_1' }" input: tuple val(meta), path(bam), path(bai) @@ -29,18 +18,19 @@ process DELLY_CALL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ delly \\ call \\ - $options.args \\ + $args \\ -o ${prefix}.bcf \\ -g $fasta \\ $bam \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(delly --version 2>&1) | sed 's/^.*Delly version: v//; s/ using.*\$//') + "${task.process}": + delly: \$( echo \$(delly --version 2>&1) | sed 's/^.*Delly version: v//; s/ using.*\$//') END_VERSIONS """ } diff --git a/modules/diamond/blastp/functions.nf b/modules/diamond/blastp/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/blastp/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 6afc66c4..015be864 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_BLASTP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: tuple val(meta), path(fasta) @@ -29,7 +18,8 @@ process DIAMOND_BLASTP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -38,12 +28,12 @@ process DIAMOND_BLASTP { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - $options.args \\ + $args \\ --out ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/diamond/blastx/functions.nf b/modules/diamond/blastx/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/blastx/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index db2953da..f4018aa9 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_BLASTX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: tuple val(meta), path(fasta) @@ -29,7 +18,8 @@ process DIAMOND_BLASTX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -38,12 +28,12 @@ process DIAMOND_BLASTX { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - $options.args \\ + $args \\ --out ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/diamond/makedb/functions.nf b/modules/diamond/makedb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/makedb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/makedb/main.nf b/modules/diamond/makedb/main.nf index e4533f8f..cccfcce9 100644 --- a/modules/diamond/makedb/main.nf +++ b/modules/diamond/makedb/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_MAKEDB { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? 'bioconda::diamond=2.0.9' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: path fasta @@ -28,17 +17,18 @@ process DIAMOND_MAKEDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ diamond \\ makedb \\ --threads $task.cpus \\ --in $fasta \\ -d $fasta \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/dragonflye/functions.nf b/modules/dragonflye/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dragonflye/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dragonflye/main.nf b/modules/dragonflye/main.nf index f9dc9004..8ca98832 100644 --- a/modules/dragonflye/main.nf +++ b/modules/dragonflye/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DRAGONFLYE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dragonflye=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0" - } else { - container "quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0' : + 'quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -30,18 +19,19 @@ process DRAGONFLYE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def memory = task.memory.toGiga() """ dragonflye \\ --reads ${reads} \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --ram $memory \\ --outdir ./ \\ --force cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) + "${task.process}": + dragonflye: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) END_VERSIONS """ } diff --git a/modules/dshbio/exportsegments/functions.nf b/modules/dshbio/exportsegments/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/exportsegments/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index ec471000..7cc5da22 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_EXPORTSEGMENTS { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gfa) @@ -26,17 +15,18 @@ process DSHBIO_EXPORTSEGMENTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ export-segments \\ - $options.args \\ + $args \\ -i $gfa \\ -o ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/filterbed/functions.nf b/modules/dshbio/filterbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/filterbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 9ad8ce8b..065d8bec 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_FILTERBED { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -26,17 +15,18 @@ process DSHBIO_FILTERBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ filter-bed \\ - $options.args \\ + $args \\ -i $bed \\ -o ${prefix}.bed.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/filtergff3/functions.nf b/modules/dshbio/filtergff3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/filtergff3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index bf729dbf..c738c95a 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_FILTERGFF3 { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gff3) @@ -26,17 +15,18 @@ process DSHBIO_FILTERGFF3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ filter-gff3 \\ - $options.args \\ + $args \\ -i $gff3 \\ -o ${prefix}.gff3.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/splitbed/functions.nf b/modules/dshbio/splitbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/splitbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 20e679f4..60b8b7a3 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_SPLITBED { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -26,18 +15,19 @@ process DSHBIO_SPLITBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ split-bed \\ - $options.args \\ + $args \\ -p $prefix \\ -s '.bed.gz' \\ -i $bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/splitgff3/functions.nf b/modules/dshbio/splitgff3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/splitgff3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index e0312a19..7ad2fd08 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_SPLITGFF3 { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gff3) @@ -26,18 +15,19 @@ process DSHBIO_SPLITGFF3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ dsh-bio \\ split-gff3 \\ - $options.args \\ + $args \\ -p $prefix \\ -s '.gff3.gz' \\ -i $gff3 cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/ectyper/functions.nf b/modules/ectyper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ectyper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ectyper/main.nf b/modules/ectyper/main.nf index b5d8202d..5f458eb9 100644 --- a/modules/ectyper/main.nf +++ b/modules/ectyper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ECTYPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ectyper=1.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ectyper:1.0.0--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/ectyper:1.0.0--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ectyper:1.0.0--pyhdfd78af_1' : + 'quay.io/biocontainers/ectyper:1.0.0--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -28,7 +17,8 @@ process ECTYPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def is_compressed = fasta.getName().endsWith(".gz") ? true : false def fasta_name = fasta.getName().replace(".gz", "") """ @@ -37,15 +27,16 @@ process ECTYPER { fi ectyper \\ - $options.args \\ + $args \\ --cores $task.cpus \\ --output ./ \\ --input $fasta_name + mv output.tsv ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ectyper --version 2>&1) | sed 's/.*ectyper //; s/ .*\$//') + "${task.process}": + ectyper: \$(echo \$(ectyper --version 2>&1) | sed 's/.*ectyper //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/emmtyper/functions.nf b/modules/emmtyper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/emmtyper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/emmtyper/main.nf b/modules/emmtyper/main.nf index 74624c1f..9cf98694 100644 --- a/modules/emmtyper/main.nf +++ b/modules/emmtyper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process EMMTYPER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::emmtyper=0.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/emmtyper:0.2.0--py_0" - } else { - container "quay.io/biocontainers/emmtyper:0.2.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/emmtyper:0.2.0--py_0' : + 'quay.io/biocontainers/emmtyper:0.2.0--py_0' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process EMMTYPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ emmtyper \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(emmtyper --version 2>&1) | sed 's/^.*emmtyper v//' ) + "${task.process}": + emmtyper: \$( echo \$(emmtyper --version 2>&1) | sed 's/^.*emmtyper v//' ) END_VERSIONS """ } diff --git a/modules/ensemblvep/functions.nf b/modules/ensemblvep/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ensemblvep/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index ad9c38a6..76cd9235 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -1,26 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) -params.use_cache = false -params.vep_tag = "" - process ENSEMBLVEP { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - if (params.use_cache) { - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0" - } else { - container "quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0" - } + if (task.ext.use_cache) { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" } else { - container "nfcore/vep:${params.vep_tag}" + container "nfcore/vep:${task.ext.vep_tag}" } input: @@ -36,15 +23,16 @@ process ENSEMBLVEP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - dir_cache = params.use_cache ? "\${PWD}/${cache}" : "/.vep" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def dir_cache = task.ext.use_cache ? "\${PWD}/${cache}" : "/.vep" """ mkdir $prefix vep \\ -i $vcf \\ -o ${prefix}.ann.vcf \\ - $options.args \\ + $args \\ --assembly $genome \\ --species $species \\ --cache \\ @@ -57,8 +45,8 @@ process ENSEMBLVEP { rm -rf $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(vep --help 2>&1) | sed 's/^.*Versions:.*ensembl-vep : //;s/ .*\$//') + "${task.process}": + ensemblvep: \$( echo \$(vep --help 2>&1) | sed 's/^.*Versions:.*ensembl-vep : //;s/ .*\$//') END_VERSIONS """ } diff --git a/modules/expansionhunter/functions.nf b/modules/expansionhunter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/expansionhunter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 845de15d..2ef00d17 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process EXPANSIONHUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::expansionhunter=4.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/expansionhunter:4.0.2--he785bd8_0" - } else { - container "quay.io/biocontainers/expansionhunter:4.0.2--he785bd8_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/expansionhunter:4.0.2--he785bd8_0' : + 'quay.io/biocontainers/expansionhunter:4.0.2--he785bd8_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,11 +17,12 @@ process EXPANSIONHUNTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def gender = (meta.gender == 'male' || meta.gender == 1 || meta.gender == 'XY') ? "male" : "female" """ ExpansionHunter \\ - $options.args \\ + $args \\ --reads $bam \\ --output-prefix $prefix \\ --reference $fasta \\ @@ -40,8 +30,8 @@ process EXPANSIONHUNTER { --sex $gender cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') + "${task.process}": + expansionhunter: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') END_VERSIONS """ } diff --git a/modules/fargene/functions.nf b/modules/fargene/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fargene/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf index f2afe4be..5bf1c604 100644 --- a/modules/fargene/main.nf +++ b/modules/fargene/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1' +def VERSION = '0.1' // Version information not provided by tool on CLI process FARGENE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fargene=0.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fargene:0.1--py27h21c881e_4" - } else { - container "quay.io/biocontainers/fargene:0.1--py27h21c881e_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fargene:0.1--py27h21c881e_4' : + 'quay.io/biocontainers/fargene:0.1--py27h21c881e_4' }" input: // input may be fasta (for genomes or longer contigs) or paired-end fastq (for metagenome), the latter in addition with --meta flag @@ -43,21 +32,22 @@ process FARGENE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gzip \\ -cdf $input \\ > unziped.fa | fargene \\ - $options.args \\ + $args \\ -p $task.cpus \\ -i unziped.fa \\ --hmm-model $hmm_model \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + fargene: $VERSION END_VERSIONS """ } diff --git a/modules/fastani/functions.nf b/modules/fastani/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastani/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf index 5c6366f9..7e3721bd 100644 --- a/modules/fastani/main.nf +++ b/modules/fastani/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTANI { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastani=1.32" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastani:1.32--he1c1bb9_0" - } else { - container "quay.io/biocontainers/fastani:1.32--he1c1bb9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastani:1.32--he1c1bb9_0' : + 'quay.io/biocontainers/fastani:1.32--he1c1bb9_0' }" input: tuple val(meta), path(query) @@ -27,7 +16,8 @@ process FASTANI { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.batch_input) { """ @@ -37,8 +27,8 @@ process FASTANI { -o ${prefix}.ani.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + "${task.process}": + fastani: \$(fastANI --version 2>&1 | sed 's/version//;') END_VERSIONS """ } else { @@ -49,8 +39,8 @@ process FASTANI { -o ${prefix}.ani.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + "${task.process}": + fastani: \$(fastANI --version 2>&1 | sed 's/version//;') END_VERSIONS """ } diff --git a/modules/fastp/functions.nf b/modules/fastp/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastp/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index e99540d5..05eb1e98 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::fastp=0.20.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/fastp:0.20.1--h8b12597_0' - } else { - container 'quay.io/biocontainers/fastp:0.20.1--h8b12597_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastp:0.20.1--h8b12597_0' : + 'quay.io/biocontainers/fastp:0.20.1--h8b12597_0' }" input: tuple val(meta), path(reads) @@ -33,8 +22,9 @@ process FASTP { tuple val(meta), path('*.merged.fastq.gz'), optional:true, emit: reads_merged script: + def args = task.ext.args ?: '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { def fail_fastq = save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' """ @@ -46,11 +36,11 @@ process FASTP { --json ${prefix}.fastp.json \\ --html ${prefix}.fastp.html \\ $fail_fastq \\ - $options.args \\ + $args \\ 2> ${prefix}.fastp.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + "${task.process}": + fastp: \$(fastp --version 2>&1 | sed -e "s/fastp //g") END_VERSIONS """ } else { @@ -70,12 +60,12 @@ process FASTP { $merge_fastq \\ --thread $task.cpus \\ --detect_adapter_for_pe \\ - $options.args \\ + $args \\ 2> ${prefix}.fastp.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + "${task.process}": + fastp: \$(fastp --version 2>&1 | sed -e "s/fastp //g") END_VERSIONS """ } diff --git a/modules/fastqc/functions.nf b/modules/fastqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index 9f6cfc55..673a00b8 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTQC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0" - } else { - container "quay.io/biocontainers/fastqc:0.11.9--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : + 'quay.io/biocontainers/fastqc:0.11.9--0' }" input: tuple val(meta), path(reads) @@ -27,27 +16,28 @@ process FASTQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz + fastqc $args --threads $task.cpus ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } else { """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz + fastqc $args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } diff --git a/modules/fastqscan/functions.nf b/modules/fastqscan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastqscan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastqscan/main.nf b/modules/fastqscan/main.nf index 0106892f..768728f2 100644 --- a/modules/fastqscan/main.nf +++ b/modules/fastqscan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTQSCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastq-scan=0.4.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastq-scan:0.4.4--h7d875b9_0" - } else { - container "quay.io/biocontainers/fastq-scan:0.4.4--h7d875b9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastq-scan:0.4.4--h7d875b9_0' : + 'quay.io/biocontainers/fastq-scan:0.4.4--h7d875b9_0' }" input: tuple val(meta), path(reads) @@ -26,15 +15,16 @@ process FASTQSCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ zcat $reads | \\ fastq-scan \\ - $options.args > ${prefix}.json + $args > ${prefix}.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fastq-scan -v 2>&1) | sed 's/^.*fastq-scan //' ) + "${task.process}": + fastqscan: \$( echo \$(fastq-scan -v 2>&1) | sed 's/^.*fastq-scan //' ) END_VERSIONS """ } diff --git a/modules/fasttree/functions.nf b/modules/fasttree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fasttree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fasttree/main.nf b/modules/fasttree/main.nf index 5f81d1f2..5e57aae9 100644 --- a/modules/fasttree/main.nf +++ b/modules/fasttree/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTTREE { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::fasttree=2.1.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fasttree:2.1.10--h516909a_4" - } else { - container "quay.io/biocontainers/fasttree:2.1.10--h516909a_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fasttree:2.1.10--h516909a_4' : + 'quay.io/biocontainers/fasttree:2.1.10--h516909a_4' }" input: path alignment @@ -25,16 +14,17 @@ process FASTTREE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ fasttree \\ - $options.args \\ + $args \\ -log fasttree_phylogeny.tre.log \\ -nt $alignment \\ > fasttree_phylogeny.tre cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fasttree -help 2>&1 | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/') + "${task.process}": + fasttree: \$(fasttree -help 2>&1 | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/') END_VERSIONS """ } diff --git a/modules/fgbio/callmolecularconsensusreads/functions.nf b/modules/fgbio/callmolecularconsensusreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/callmolecularconsensusreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index 23056b90..f514b69a 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_CALLMOLECULARCONSENSUSREADS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0" - } else { - container "quay.io/biocontainers/fgbio:1.3.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0' : + 'quay.io/biocontainers/fgbio:1.3.0--0' }" input: tuple val(meta), path(bam) @@ -25,17 +15,18 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ fgbio \\ CallMolecularConsensusReads \\ -i $bam \\ - $options.args \\ + $args \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/fastqtobam/functions.nf b/modules/fgbio/fastqtobam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/fastqtobam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/fastqtobam/main.nf b/modules/fgbio/fastqtobam/main.nf index 68a85508..40713d03 100644 --- a/modules/fgbio/fastqtobam/main.nf +++ b/modules/fgbio/fastqtobam/main.nf @@ -1,51 +1,39 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_FASTQTOBAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0' : + 'quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0' }" input: tuple val(meta), path(reads) - val(read_structure) + val read_structure output: tuple val(meta), path("*_umi_converted.bam"), emit: umibam path "versions.yml" , emit: version script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - mkdir tmpFolder + mkdir tmp fgbio \\ - --tmp-dir=${PWD}/tmpFolder \\ + --tmp-dir=${PWD}/tmp \\ FastqToBam \\ -i $reads \\ -o "${prefix}_umi_converted.bam" \\ --read-structures $read_structure \\ --sample $meta.id \\ --library $meta.id \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/groupreadsbyumi/functions.nf b/modules/fgbio/groupreadsbyumi/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/groupreadsbyumi/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/groupreadsbyumi/main.nf b/modules/fgbio/groupreadsbyumi/main.nf index 8e16f0a5..b35186a5 100644 --- a/modules/fgbio/groupreadsbyumi/main.nf +++ b/modules/fgbio/groupreadsbyumi/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_GROUPREADSBYUMI { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0' : + 'quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0' }" input: tuple val(meta), path(taggedbam) @@ -28,7 +17,8 @@ process FGBIO_GROUPREADSBYUMI { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir tmp @@ -37,14 +27,14 @@ process FGBIO_GROUPREADSBYUMI { --tmp-dir=${PWD}/tmp \\ GroupReadsByUmi \\ -s $strategy \\ - ${options.args} \\ + $args \\ -i $taggedbam \\ -o ${prefix}_umi-grouped.bam \\ -f ${prefix}_umi_histogram.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/sortbam/functions.nf b/modules/fgbio/sortbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/sortbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index 34e0b377..c2822548 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_SORTBAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0" - } else { - container "quay.io/biocontainers/fgbio:1.3.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0' : + 'quay.io/biocontainers/fgbio:1.3.0--0' }" input: tuple val(meta), path(bam) @@ -25,16 +15,17 @@ process FGBIO_SORTBAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ fgbio \\ SortBam \\ -i $bam \\ - $options.args \\ + $args \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/filtlong/functions.nf b/modules/filtlong/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/filtlong/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/filtlong/main.nf b/modules/filtlong/main.nf index 6e82f112..10e147a6 100644 --- a/modules/filtlong/main.nf +++ b/modules/filtlong/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FILTLONG { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::filtlong=0.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0" - } else { - container "quay.io/biocontainers/filtlong:0.2.1--h9a82719_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0' : + 'quay.io/biocontainers/filtlong:0.2.1--h9a82719_0' }" input: tuple val(meta), path(shortreads), path(longreads) @@ -26,18 +15,19 @@ process FILTLONG { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" """ filtlong \\ $short_reads \\ - $options.args \\ + $args \\ $longreads \\ | gzip -n > ${prefix}_lr_filtlong.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( filtlong --version | sed -e "s/Filtlong v//g" ) + "${task.process}": + filtlong: \$( filtlong --version | sed -e "s/Filtlong v//g" ) END_VERSIONS """ } diff --git a/modules/flash/functions.nf b/modules/flash/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/flash/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/flash/main.nf b/modules/flash/main.nf index 912b2961..23bd1892 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FLASH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::flash=1.2.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/flash:1.2.11--hed695b0_5" - } else { - container "quay.io/biocontainers/flash:1.2.11--hed695b0_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/flash:1.2.11--hed695b0_5' : + 'quay.io/biocontainers/flash:1.2.11--hed695b0_5' }" input: tuple val(meta), path(reads) @@ -25,18 +14,19 @@ process FLASH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ flash \\ - $options.args \\ + $args \\ -o ${prefix} \\ -z \\ ${reads[0]} \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(flash --version 2>&1) | sed 's/^.*FLASH v//; s/ .*\$//') + "${task.process}": + flash: \$(echo \$(flash --version 2>&1) | sed 's/^.*FLASH v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/freebayes/functions.nf b/modules/freebayes/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/freebayes/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/freebayes/main.nf b/modules/freebayes/main.nf index 0b23dc40..b9a63d02 100644 --- a/modules/freebayes/main.nf +++ b/modules/freebayes/main.nf @@ -1,38 +1,28 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FREEBAYES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::freebayes=1.3.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3" - } else { - container "quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3' : + 'quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3' }" input: tuple val(meta), path(input_1), path(input_1_index), path(input_2), path(input_2_index) path fasta - path fai + path fasta_fai path targets path samples path populations path cnv output: - tuple val(meta), path("*.vcf.gz") , emit: vcf - path "versions.yml" , emit: versions + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input = input_2 ? "${input_1} ${input_2}" : "${input_1}" def targets_file = targets ? "--target ${targets}" : "" def samples_file = samples ? "--samples ${samples}" : "" @@ -42,20 +32,20 @@ process FREEBAYES { if (task.cpus > 1) { """ freebayes-parallel \\ - <(fasta_generate_regions.py ${fasta}.fai 10000) ${task.cpus} \\ + <(fasta_generate_regions.py $fasta_fai 10000) $task.cpus \\ -f $fasta \\ $targets_file \\ $samples_file \\ $populations_file \\ $cnv_file \\ - $options.args \\ + $args \\ $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + "${task.process}": + freebayes: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) END_VERSIONS """ @@ -67,14 +57,14 @@ process FREEBAYES { $samples_file \\ $populations_file \\ $cnv_file \\ - $options.args \\ + $args \\ $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + "${task.process}": + freebayes: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) END_VERSIONS """ } diff --git a/modules/freebayes/meta.yml b/modules/freebayes/meta.yml index 75d44826..abba1daa 100644 --- a/modules/freebayes/meta.yml +++ b/modules/freebayes/meta.yml @@ -36,10 +36,10 @@ input: type: file description: reference fasta file pattern: ".{fa,fa.gz,fasta,fasta.gz}" - - fai: + - fasta_fai: type: file description: reference fasta file index - pattern: "*.fai" + pattern: "*.{fa,fasta}.fai" - targets: type: file description: Optional - Limit analysis to targets listed in this BED-format FILE. @@ -55,8 +55,7 @@ input: - cnv: type: file description: | - A copy number map BED file, which has - either a sample-level ploidy: + A copy number map BED file, which has either a sample-level ploidy: sample_name copy_number or a region-specific format: seq_name start end sample_name copy_number diff --git a/modules/gatk4/applybqsr/functions.nf b/modules/gatk4/applybqsr/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/applybqsr/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index c89a4a4d..f93dd574 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_APPLYBQSR { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index), path(bqsr_table) @@ -30,7 +19,8 @@ process GATK4_APPLYBQSR { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" if (!task.memory) { log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -45,11 +35,11 @@ process GATK4_APPLYBQSR { $interval \\ --tmp-dir . \\ -O ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/baserecalibrator/functions.nf b/modules/gatk4/baserecalibrator/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/baserecalibrator/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index ce6f5906..b422a798 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_BASERECALIBRATOR { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index) @@ -32,7 +21,8 @@ process GATK4_BASERECALIBRATOR { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') @@ -48,12 +38,12 @@ process GATK4_BASERECALIBRATOR { $sitesCommand \ $intervalsCommand \ --tmp-dir . \ - $options.args \ + $args \ -O ${prefix}.table cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/bedtointervallist/functions.nf b/modules/gatk4/bedtointervallist/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/bedtointervallist/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 7c06ccef..77819a0f 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_BEDTOINTERVALLIST { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -27,17 +16,18 @@ process GATK4_BEDTOINTERVALLIST { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk BedToIntervalList \\ -I $bed \\ -SD $sequence_dict \\ -O ${prefix}.interval_list \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/calculatecontamination/functions.nf b/modules/gatk4/calculatecontamination/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/calculatecontamination/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index 28dd7ccf..93a2ee57 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CALCULATECONTAMINATION { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(pileup), path(matched) @@ -28,7 +17,8 @@ process GATK4_CALCULATECONTAMINATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def matched_command = matched ? " -matched ${matched} " : '' def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' """ @@ -37,11 +27,11 @@ process GATK4_CALCULATECONTAMINATION { $matched_command \\ -O ${prefix}.contamination.table \\ $segment_command \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsequencedictionary/functions.nf b/modules/gatk4/createsequencedictionary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/createsequencedictionary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index db28e244..8d001856 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CREATESEQUENCEDICTIONARY { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: path fasta @@ -26,6 +15,7 @@ process GATK4_CREATESEQUENCEDICTIONARY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[GATK] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -37,11 +27,11 @@ process GATK4_CREATESEQUENCEDICTIONARY { CreateSequenceDictionary \\ --REFERENCE $fasta \\ --URI $fasta \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsomaticpanelofnormals/functions.nf b/modules/gatk4/createsomaticpanelofnormals/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/createsomaticpanelofnormals/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 49136256..9bc8d1d0 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CREATESOMATICPANELOFNORMALS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(genomicsdb) @@ -30,18 +19,19 @@ process GATK4_CREATESOMATICPANELOFNORMALS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk \\ CreateSomaticPanelOfNormals \\ -R $fasta \\ -V gendb://$genomicsdb \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/estimatelibrarycomplexity/functions.nf b/modules/gatk4/estimatelibrarycomplexity/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/estimatelibrarycomplexity/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index bfaeedbc..b0b35e42 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_ESTIMATELIBRARYCOMPLEXITY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(cram) @@ -29,7 +18,8 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ") def avail_mem = 3 @@ -44,11 +34,11 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { -O ${prefix}.metrics \ --REFERENCE_SEQUENCE ${fasta} \ --VALIDATION_STRINGENCY SILENT \ - --TMP_DIR . $options.args + --TMP_DIR . $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/fastqtosam/functions.nf b/modules/gatk4/fastqtosam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/fastqtosam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index 5879618d..fc075735 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_FASTQTOSAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -26,18 +15,19 @@ process GATK4_FASTQTOSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" """ gatk FastqToSam \\ $read_files \\ -O ${prefix}.bam \\ -SM $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/filtermutectcalls/functions.nf b/modules/gatk4/filtermutectcalls/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/filtermutectcalls/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 6e10ff0f..7111db37 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_FILTERMUTECTCALLS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(tbi), path(stats), path(orientationbias), path(segmentation), path(contaminationfile), val(contaminationest) @@ -31,7 +20,8 @@ process GATK4_FILTERMUTECTCALLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def orientationbias_options = '' if (orientationbias) { @@ -55,11 +45,11 @@ process GATK4_FILTERMUTECTCALLS { $segmentation_options \\ $contamination_options \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/genomicsdbimport/functions.nf b/modules/gatk4/genomicsdbimport/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/genomicsdbimport/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index c5582563..110dbf4f 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GENOMICSDBIMPORT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(tbi), path(intervalfile), val(intervalval), path(wspace) @@ -31,7 +20,8 @@ process GATK4_GENOMICSDBIMPORT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" // settings for running default create gendb mode inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" @@ -57,11 +47,11 @@ process GATK4_GENOMICSDBIMPORT { $inputs_command \\ $dir_command \\ $intervals_command \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/genotypegvcfs/functions.nf b/modules/gatk4/genotypegvcfs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/genotypegvcfs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index 6fbbe663..ddb4a922 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GENOTYPEGVCFS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0' : + 'quay.io/biocontainers/gatk4:4.2.0.0--0' }" input: tuple val(meta), path(gvcf), path(gvcf_index) @@ -32,14 +21,15 @@ process GATK4_GENOTYPEGVCFS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" """ gatk \\ GenotypeGVCFs \\ - $options.args \\ + $args \\ $interval_options \\ $dbsnp_options \\ -R $fasta \\ @@ -47,8 +37,8 @@ process GATK4_GENOTYPEGVCFS { -O ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/getpileupsummaries/functions.nf b/modules/gatk4/getpileupsummaries/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/getpileupsummaries/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index f08d4d91..0894e17b 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GETPILEUPSUMMARIES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -29,7 +18,8 @@ process GATK4_GETPILEUPSUMMARIES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def sitesCommand = '' sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " @@ -40,11 +30,11 @@ process GATK4_GETPILEUPSUMMARIES { -V $variants \\ $sitesCommand \\ -O ${prefix}.pileups.table \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/haplotypecaller/functions.nf b/modules/gatk4/haplotypecaller/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/haplotypecaller/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 1e540d17..418a2785 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_HAPLOTYPECALLER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index) @@ -33,7 +22,8 @@ process GATK4_HAPLOTYPECALLER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def interval_option = interval ? "-L ${interval}" : "" def dbsnp_option = dbsnp ? "-D ${dbsnp}" : "" def avail_mem = 3 @@ -51,12 +41,12 @@ process GATK4_HAPLOTYPECALLER { ${dbsnp_option} \\ ${interval_option} \\ -O ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --tmp-dir . cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/indexfeaturefile/functions.nf b/modules/gatk4/indexfeaturefile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/indexfeaturefile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf index 8f40a3e3..d33e030c 100644 --- a/modules/gatk4/indexfeaturefile/main.nf +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_INDEXFEATUREFILE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0' : + 'quay.io/biocontainers/gatk4:4.2.0.0--0' }" input: tuple val(meta), path(feature_file) @@ -26,15 +15,16 @@ process GATK4_INDEXFEATUREFILE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ gatk \\ IndexFeatureFile \\ - $options.args \\ + $args \\ -I $feature_file cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/intervallisttools/functions.nf b/modules/gatk4/intervallisttools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/intervallisttools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 5da651b9..8e5b70e1 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_INTERVALLISTTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(interval_list) @@ -26,7 +15,8 @@ process GATK4_INTERVALLISTTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir ${prefix}_split @@ -35,7 +25,7 @@ process GATK4_INTERVALLISTTOOLS { IntervalListTools \\ -I ${interval_list} \\ -O ${prefix}_split \\ - $options.args + $args python3 < versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/learnreadorientationmodel/functions.nf b/modules/gatk4/learnreadorientationmodel/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/learnreadorientationmodel/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index b8aee764..5e9700e3 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_LEARNREADORIENTATIONMODEL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(f1r2) @@ -26,7 +15,8 @@ process GATK4_LEARNREADORIENTATIONMODEL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def inputs_list = [] f1r2.each() { a -> inputs_list.add(" -I " + a) } """ @@ -34,11 +24,11 @@ process GATK4_LEARNREADORIENTATIONMODEL { LearnReadOrientationModel \\ ${inputs_list.join(' ')} \\ -O ${prefix}.tar.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/markduplicates/functions.nf b/modules/gatk4/markduplicates/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/markduplicates/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index e44f4bfc..9f0b46da 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MARKDUPLICATES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bams) @@ -28,7 +17,8 @@ process GATK4_MARKDUPLICATES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") def avail_mem = 3 if (!task.memory) { @@ -43,11 +33,11 @@ process GATK4_MARKDUPLICATES { --TMP_DIR . \\ --CREATE_INDEX true \\ --OUTPUT ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergebamalignment/functions.nf b/modules/gatk4/mergebamalignment/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mergebamalignment/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 9c5fe26c..01effb0f 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MERGEBAMALIGNMENT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(aligned) @@ -29,18 +18,19 @@ process GATK4_MERGEBAMALIGNMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk MergeBamAlignment \\ ALIGNED=$aligned \\ UNMAPPED=$unmapped \\ R=$fasta \\ O=${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergevcfs/functions.nf b/modules/gatk4/mergevcfs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mergevcfs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index 28073fcb..cbfc2e9d 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MERGEVCFS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcfs) @@ -28,7 +17,8 @@ process GATK4_MERGEVCFS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" // Make list of VCFs to merge def input = "" @@ -41,11 +31,11 @@ process GATK4_MERGEVCFS { $input \\ O=${prefix}.vcf.gz \\ $ref \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mutect2/functions.nf b/modules/gatk4/mutect2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mutect2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index e0e2661b..662b3f0c 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MUTECT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta) , path(input) , path(input_index) , val(which_norm) @@ -40,7 +29,8 @@ process GATK4_MUTECT2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def panels_command = '' def normals_command = '' @@ -70,11 +60,11 @@ process GATK4_MUTECT2 { ${normals_command} \\ ${panels_command} \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/revertsam/functions.nf b/modules/gatk4/revertsam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/revertsam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 7b5ee696..bca31a29 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_REVERTSAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process GATK4_REVERTSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk RevertSam \\ I=$bam \\ O=${prefix}.reverted.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/samtofastq/functions.nf b/modules/gatk4/samtofastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/samtofastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 843c61ce..aa9a6b2d 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_SAMTOFASTQ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process GATK4_SAMTOFASTQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" """ gatk SamToFastq \\ I=$bam \\ $output \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/splitncigarreads/functions.nf b/modules/gatk4/splitncigarreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/splitncigarreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 26fb799d..32d36df9 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_SPLITNCIGARREADS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -29,17 +18,18 @@ process GATK4_SPLITNCIGARREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gatk SplitNCigarReads \\ -R $fasta \\ -I $bam \\ -O ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/variantfiltration/functions.nf b/modules/gatk4/variantfiltration/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/variantfiltration/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index e0f0727a..d5cc1eb3 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_VARIANTFILTRATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(vcf_tbi) @@ -30,7 +19,8 @@ process GATK4_VARIANTFILTRATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -42,11 +32,11 @@ process GATK4_VARIANTFILTRATION { -R $fasta \\ -V $vcf \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/genmap/index/functions.nf b/modules/genmap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genmap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genmap/index/main.nf b/modules/genmap/index/main.nf index c79596f0..943f1a31 100644 --- a/modules/genmap/index/main.nf +++ b/modules/genmap/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENMAP_INDEX { tag '$fasta' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::genmap=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1" - } else { - container "quay.io/biocontainers/genmap:1.3.0--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1' : + 'quay.io/biocontainers/genmap:1.3.0--h1b792b2_1' }" input: path fasta @@ -26,6 +15,7 @@ process GENMAP_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ genmap \\ index \\ @@ -33,8 +23,8 @@ process GENMAP_INDEX { -I genmap cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + "${task.process}": + genmap: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') END_VERSIONS """ } diff --git a/modules/genmap/mappability/functions.nf b/modules/genmap/mappability/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genmap/mappability/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genmap/mappability/main.nf b/modules/genmap/mappability/main.nf index 4d858cbb..94083f14 100644 --- a/modules/genmap/mappability/main.nf +++ b/modules/genmap/mappability/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENMAP_MAPPABILITY { tag '$fasta' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::genmap=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1" - } else { - container "quay.io/biocontainers/genmap:1.3.0--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1' : + 'quay.io/biocontainers/genmap:1.3.0--h1b792b2_1' }" input: path index @@ -28,16 +17,17 @@ process GENMAP_MAPPABILITY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ genmap \\ map \\ - $options.args \\ + $args \\ -I $index \\ -O mappability cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + "${task.process}": + genmap: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') END_VERSIONS """ } diff --git a/modules/genrich/functions.nf b/modules/genrich/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genrich/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genrich/main.nf b/modules/genrich/main.nf index f34f9cd2..dfbebd3a 100644 --- a/modules/genrich/main.nf +++ b/modules/genrich/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENRICH { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::genrich=0.6.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genrich:0.6.1--h5bf99c6_1" - } else { - container "quay.io/biocontainers/genrich:0.6.1--h5bf99c6_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genrich:0.6.1--h5bf99c6_1' : + 'quay.io/biocontainers/genrich:0.6.1--h5bf99c6_1' }" input: tuple val(meta), path(treatment_bam) @@ -36,7 +25,8 @@ process GENRICH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def control = control_bam ? "-c $control_bam" : '' def blacklist = blacklist_bed ? "-E $blacklist_bed" : "" def pvalues = save_pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" @@ -44,7 +34,7 @@ process GENRICH { def bed = save_bed ? "-b ${prefix}.intervals.bed" : "" def duplicates = "" if (save_duplicates) { - if (options.args.contains('-r')) { + if (args.contains('-r')) { duplicates = "-R ${prefix}.duplicates.txt" } else { log.info '[Genrich] Duplicates can only be saved if they are filtered, defaulting to -r option (Remove PCR duplicates).' @@ -54,7 +44,7 @@ process GENRICH { """ Genrich \\ -t $treatment_bam \\ - $options.args \\ + $args \\ $control \\ $blacklist \\ -o ${prefix}.narrowPeak \\ @@ -65,8 +55,8 @@ process GENRICH { $control cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(Genrich --version 2>&1) | sed 's/^Genrich, version //; s/ .*\$//') + "${task.process}": + genrich: \$(echo \$(Genrich --version 2>&1) | sed 's/^Genrich, version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gffread/functions.nf b/modules/gffread/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gffread/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index 4133ee08..d31f76f8 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GFFREAD { tag "$gff" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gffread=0.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gffread:0.12.1--h8b12597_0" - } else { - container "quay.io/biocontainers/gffread:0.12.1--h8b12597_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gffread:0.12.1--h8b12597_0' : + 'quay.io/biocontainers/gffread:0.12.1--h8b12597_0' }" input: path gff @@ -26,15 +15,16 @@ process GFFREAD { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${gff.baseName}${options.suffix}" : "${gff.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${gff.baseName}${task.ext.suffix}" : "${gff.baseName}" """ gffread \\ $gff \\ - $options.args \\ + $args \\ -o ${prefix}.gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gffread --version 2>&1) + "${task.process}": + gffread: \$(gffread --version 2>&1) END_VERSIONS """ } diff --git a/modules/glnexus/functions.nf b/modules/glnexus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/glnexus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index 1384334f..e36729b2 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GLNEXUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::glnexus=1.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/glnexus:1.4.1--h40d77a6_0" - } else { - container "quay.io/biocontainers/glnexus:1.4.1--h40d77a6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/glnexus:1.4.1--h40d77a6_0' : + 'quay.io/biocontainers/glnexus:1.4.1--h40d77a6_0' }" input: tuple val(meta), path(gvcfs) @@ -26,7 +15,8 @@ process GLNEXUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" // Make list of GVCFs to merge def input = gvcfs.collect { it.toString() } @@ -40,13 +30,13 @@ process GLNEXUS { glnexus_cli \\ --threads $task.cpus \\ --mem-gbytes $avail_mem \\ - $options.args \\ + $args \\ ${input.join(' ')} \\ > ${prefix}.bcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') + "${task.process}": + glnexus: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/graphmap2/align/functions.nf b/modules/graphmap2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/graphmap2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index 831b0b3b..e0f2d4cd 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GRAPHMAP2_ALIGN { tag "$meta.id" label 'process_medium' tag "$meta.id" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::graphmap=0.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0" - } else { - container "quay.io/biocontainers/graphmap:0.6.3--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0' : + 'quay.io/biocontainers/graphmap:0.6.3--he513fc3_0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,8 @@ process GRAPHMAP2_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ graphmap2 \\ align \\ @@ -38,11 +28,11 @@ process GRAPHMAP2_ALIGN { -i $index \\ -d $reads \\ -o ${prefix}.sam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') + "${task.process}": + graphmap2: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/graphmap2/index/functions.nf b/modules/graphmap2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/graphmap2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/graphmap2/index/main.nf b/modules/graphmap2/index/main.nf index a8b03074..fffc7bcb 100644 --- a/modules/graphmap2/index/main.nf +++ b/modules/graphmap2/index/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GRAPHMAP2_INDEX { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:['']) } conda (params.enable_conda ? "bioconda::graphmap=0.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0" - } else { - container "quay.io/biocontainers/graphmap:0.6.3--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0' : + 'quay.io/biocontainers/graphmap:0.6.3--he513fc3_0' }" input: path fasta @@ -25,17 +14,18 @@ process GRAPHMAP2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ graphmap2 \\ align \\ -t $task.cpus \\ -I \\ - $options.args \\ + $args \\ -r $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') + "${task.process}": + graphmap2: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gstama/collapse/functions.nf b/modules/gstama/collapse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gstama/collapse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf index 8fc7877f..d8a64113 100644 --- a/modules/gstama/collapse/main.nf +++ b/modules/gstama/collapse/main.nf @@ -1,23 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GSTAMA_COLLAPSE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gs-tama=1.0.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0" - } else { - container "quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0" - - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0' : + 'quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -37,17 +25,18 @@ process GSTAMA_COLLAPSE { tuple val(meta), path("*_variants.txt") , emit: variants, optional: true script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ tama_collapse.py \\ -s $bam \\ -f $fasta \\ -p ${prefix} \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) + "${task.process}": + gstama: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) END_VERSIONS """ } diff --git a/modules/gstama/merge/functions.nf b/modules/gstama/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gstama/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gstama/merge/main.nf b/modules/gstama/merge/main.nf index 37d685f6..4a8e829c 100644 --- a/modules/gstama/merge/main.nf +++ b/modules/gstama/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GSTAMA_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0' : + 'quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -30,17 +19,18 @@ process GSTAMA_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ tama_merge.py \\ -f $filelist \\ -d merge_dup \\ -p ${prefix} \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( tama_merge.py -version | head -n1 ) + "${task.process}": + gstama: \$( tama_merge.py -version | head -n1 ) END_VERSIONS """ } diff --git a/modules/gtdbtk/classifywf/functions.nf b/modules/gtdbtk/classifywf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gtdbtk/classifywf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gtdbtk/classifywf/main.nf b/modules/gtdbtk/classifywf/main.nf index fdcef76a..4a4b3a01 100644 --- a/modules/gtdbtk/classifywf/main.nf +++ b/modules/gtdbtk/classifywf/main.nf @@ -1,22 +1,12 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.5.0' // When using stubs for the GTDB database, the version info isn't printed. +def VERSION = '1.5.0' // Version information not provided by tool on CLI process GTDBTK_CLASSIFYWF { tag "${meta.assembler}-${meta.id}" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gtdbtk=1.5.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gtdbtk:1.5.0--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gtdbtk:1.5.0--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gtdbtk:1.5.0--pyhdfd78af_0' : + 'quay.io/biocontainers/gtdbtk:1.5.0--pyhdfd78af_0' }" input: tuple val(meta), path("bins/*") @@ -35,6 +25,7 @@ process GTDBTK_CLASSIFYWF { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def pplacer_scratch = params.gtdbtk_pplacer_scratch ? "--scratch_dir pplacer_tmp" : "" """ export GTDBTK_DATA_PATH="\${PWD}/database" @@ -43,7 +34,7 @@ process GTDBTK_CLASSIFYWF { fi gtdbtk classify_wf \\ - $options.args \\ + $args \\ --genome_dir bins \\ --prefix "gtdbtk.${meta.assembler}-${meta.id}" \\ --out_dir "\${PWD}" \\ @@ -58,8 +49,8 @@ process GTDBTK_CLASSIFYWF { mv gtdbtk.warnings.log "gtdbtk.${meta.assembler}-${meta.id}.warnings.log" cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gtdbtk --version -v 2>&1) | sed "s/gtdbtk: version //; s/ Copyright.*//") + "${task.process}": + gtdbtk: \$(echo \$(gtdbtk --version -v 2>&1) | sed "s/gtdbtk: version //; s/ Copyright.*//") END_VERSIONS """ @@ -76,8 +67,8 @@ process GTDBTK_CLASSIFYWF { touch gtdbtk.${meta.assembler}-${meta.id}.failed_genomes.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + gtdbtk: $VERSION END_VERSIONS """ } diff --git a/modules/gubbins/functions.nf b/modules/gubbins/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gubbins/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gubbins/main.nf b/modules/gubbins/main.nf index da194906..b4c6dc23 100644 --- a/modules/gubbins/main.nf +++ b/modules/gubbins/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUBBINS { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::gubbins=3.0.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gubbins:3.0.0--py39h5bf99c6_0" - } else { - container "quay.io/biocontainers/gubbins:3.0.0--py39h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gubbins:3.0.0--py39h5bf99c6_0' : + 'quay.io/biocontainers/gubbins:3.0.0--py39h5bf99c6_0' }" input: path alignment @@ -33,14 +22,15 @@ process GUBBINS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ run_gubbins.py \\ --threads $task.cpus \\ - $options.args \\ + $args \\ $alignment cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(run_gubbins.py --version 2>&1) + "${task.process}": + gubbins: \$(run_gubbins.py --version 2>&1) END_VERSIONS """ } diff --git a/modules/gunc/downloaddb/functions.nf b/modules/gunc/downloaddb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunc/downloaddb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunc/downloaddb/main.nf b/modules/gunc/downloaddb/main.nf index af421608..430b862b 100644 --- a/modules/gunc/downloaddb/main.nf +++ b/modules/gunc/downloaddb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNC_DOWNLOADDB { tag '$db_name' label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0' : + 'quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0' }" input: val db_name @@ -26,12 +15,13 @@ process GUNC_DOWNLOADDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - gunc download_db . -db $db_name $options.args + gunc download_db . -db $db_name $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( gunc --version ) + "${task.process}": + gunc: \$( gunc --version ) END_VERSIONS """ } diff --git a/modules/gunc/run/functions.nf b/modules/gunc/run/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunc/run/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunc/run/main.nf b/modules/gunc/run/main.nf index f873a7df..6ac681ad 100644 --- a/modules/gunc/run/main.nf +++ b/modules/gunc/run/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNC_RUN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0' : + 'quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -28,18 +17,19 @@ process GUNC_RUN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gunc \\ run \\ --input_fasta $fasta \\ --db_file $db \\ --threads $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( gunc --version ) + "${task.process}": + gunc: \$( gunc --version ) END_VERSIONS """ } diff --git a/modules/gunzip/functions.nf b/modules/gunzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index 564fa99d..77a4e546 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNZIP { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: tuple val(meta), path(archive) @@ -26,16 +15,17 @@ process GUNZIP { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' gunzip = archive.toString() - '.gz' """ gunzip \\ -f \\ - $options.args \\ + $args \\ $archive cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') + "${task.process}": + gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/gunzip/test.txt.gz b/modules/gunzip/test.txt.gz deleted file mode 100644 index 381417cf643f1b5c547b57b251d71e6d5ce11e16..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 47 zcmb2|=HU3lo{`AFT#{N`qE}K;!r-m#=Xv^+o}cIW6JE^0nUR_|V;IhR&VMY%z`y_i DevlAG diff --git a/modules/hicap/functions.nf b/modules/hicap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hicap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf index fbc157b1..ed1d7797 100644 --- a/modules/hicap/main.nf +++ b/modules/hicap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HICAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hicap=1.0.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hicap:1.0.3--py_0" - } else { - container "quay.io/biocontainers/hicap:1.0.3--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hicap:1.0.3--py_0' : + 'quay.io/biocontainers/hicap:1.0.3--py_0' }" input: tuple val(meta), path(fasta) @@ -30,7 +19,8 @@ process HICAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def database_args = database_dir ? "--database_dir ${database_dir}" : "" def model_args = model_fp ? "--model_fp ${model_fp}" : "" def is_compressed = fasta.getName().endsWith(".gz") ? true : false @@ -43,13 +33,13 @@ process HICAP { --query_fp $fasta_name \\ $database_args \\ $model_args \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -o ./ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( hicap --version 2>&1 ) | sed 's/^.*hicap //' ) + "${task.process}": + hicap: \$( echo \$( hicap --version 2>&1 ) | sed 's/^.*hicap //' ) END_VERSIONS """ } diff --git a/modules/hifiasm/functions.nf b/modules/hifiasm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hifiasm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf index 9dfc9618..7fc857f1 100644 --- a/modules/hifiasm/main.nf +++ b/modules/hifiasm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HIFIASM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hifiasm=0.15.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hifiasm:0.15.4--h2e03b76_0" - } else { - container "quay.io/biocontainers/hifiasm:0.15.4--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hifiasm:0.15.4--h2e03b76_0' : + 'quay.io/biocontainers/hifiasm:0.15.4--h2e03b76_0' }" input: tuple val(meta), path(reads) @@ -37,11 +26,12 @@ process HIFIASM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (use_parental_kmers) { """ hifiasm \\ - $options.args \\ + $args \\ -o ${prefix}.asm \\ -t $task.cpus \\ -1 $paternal_kmer_dump \\ @@ -49,21 +39,21 @@ process HIFIASM { $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + "${task.process}": + hifiasm: \$(hifiasm --version 2>&1) END_VERSIONS """ } else { // Phasing with Hi-C data is not supported yet """ hifiasm \\ - $options.args \\ + $args \\ -o ${prefix}.asm \\ -t $task.cpus \\ $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + "${task.process}": + hifiasm: \$(hifiasm --version 2>&1) END_VERSIONS """ } diff --git a/modules/hisat2/align/functions.nf b/modules/hisat2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 9b73216b..0c5f4134 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hisat2=2.2.0 bioconda::samtools=1.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0" - } else { - container "quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' : + 'quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' }" input: tuple val(meta), path(reads) @@ -26,14 +15,14 @@ process HISAT2_ALIGN { path splicesites output: - tuple val(meta), path("*.bam"), emit: bam - tuple val(meta), path("*.log"), emit: summary - path "versions.yml" , emit: versions - + tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.log") , emit: summary tuple val(meta), path("*fastq.gz"), optional:true, emit: fastq + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -55,12 +44,12 @@ process HISAT2_ALIGN { --threads $task.cpus \\ $seq_center \\ $unaligned \\ - $options.args \\ + $args \\ | samtools view -bS -F 4 -F 256 - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ @@ -80,7 +69,7 @@ process HISAT2_ALIGN { $unaligned \\ --no-mixed \\ --no-discordant \\ - $options.args \\ + $args \\ | samtools view -bS -F 4 -F 8 -F 256 - > ${prefix}.bam if [ -f ${prefix}.unmapped.fastq.1.gz ]; then @@ -91,8 +80,8 @@ process HISAT2_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/hisat2/build/functions.nf b/modules/hisat2/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/build/main.nf b/modules/hisat2/build/main.nf index 015f6f59..4e8cd02b 100644 --- a/modules/hisat2/build/main.nf +++ b/modules/hisat2/build/main.nf @@ -1,25 +1,14 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_BUILD { tag "$fasta" label 'process_high' label 'process_high_memory' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" - } else { - container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : + 'quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3' }" input: path fasta @@ -31,6 +20,7 @@ process HISAT2_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 0 if (!task.memory) { log.info "[HISAT2 index build] Available memory not known - defaulting to 0. Specify process memory requirements to change this." @@ -52,7 +42,6 @@ process HISAT2_BUILD { log.info "[HISAT2 index build] Less than ${hisat2_build_memory} GB available, so NOT using splice sites and exons to build HISAT2 index." log.info "[HISAT2 index build] Use --hisat2_build_memory [small number] to skip this check." } - """ mkdir hisat2 $extract_exons @@ -60,13 +49,13 @@ process HISAT2_BUILD { -p $task.cpus \\ $ss \\ $exon \\ - $options.args \\ + $args \\ $fasta \\ hisat2/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION END_VERSIONS """ } diff --git a/modules/hisat2/extractsplicesites/functions.nf b/modules/hisat2/extractsplicesites/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/extractsplicesites/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/extractsplicesites/main.nf b/modules/hisat2/extractsplicesites/main.nf index 1c8b7830..302c35f1 100644 --- a/modules/hisat2/extractsplicesites/main.nf +++ b/modules/hisat2/extractsplicesites/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_EXTRACTSPLICESITES { tag "$gtf" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" - } else { - container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : + 'quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3' }" input: path gtf @@ -28,11 +17,12 @@ process HISAT2_EXTRACTSPLICESITES { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ hisat2_extract_splice_sites.py $gtf > ${gtf.baseName}.splice_sites.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION END_VERSIONS """ } diff --git a/modules/hmmcopy/gccounter/functions.nf b/modules/hmmcopy/gccounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmcopy/gccounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmcopy/gccounter/main.nf b/modules/hmmcopy/gccounter/main.nf index 6e7bc11f..36666095 100644 --- a/modules/hmmcopy/gccounter/main.nf +++ b/modules/hmmcopy/gccounter/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1.1' +def VERSION = '0.1.1' // Version information not provided by tool on CLI process HMMCOPY_GCCOUNTER { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" - } else { - container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" input: path fasta @@ -27,14 +16,15 @@ process HMMCOPY_GCCOUNTER { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ gcCounter \\ - $options.args \\ + $args \\ ${fasta} > ${fasta.baseName}.gc.wig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hmmcopy: $VERSION END_VERSIONS """ } diff --git a/modules/hmmcopy/readcounter/functions.nf b/modules/hmmcopy/readcounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmcopy/readcounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmcopy/readcounter/main.nf b/modules/hmmcopy/readcounter/main.nf index 9e3e72a7..6cd776a1 100644 --- a/modules/hmmcopy/readcounter/main.nf +++ b/modules/hmmcopy/readcounter/main.nf @@ -1,42 +1,32 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1.1' +def VERSION = '0.1.1' // Version information not provided by tool on CLI process HMMCOPY_READCOUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" - } else { - container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(bam), path(bai) output: - tuple val(meta), path("*.wig"), emit: wig - path "versions.yml" , emit: versions + tuple val(meta), path("*.wig"), emit: wig + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ readCounter \\ - $options.args \\ + $args \\ ${bam} > ${prefix}.wig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hmmcopy: $VERSION END_VERSIONS """ } diff --git a/modules/hmmer/hmmalign/functions.nf b/modules/hmmer/hmmalign/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmer/hmmalign/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmer/hmmalign/main.nf b/modules/hmmer/hmmalign/main.nf index b4292feb..a25871e8 100644 --- a/modules/hmmer/hmmalign/main.nf +++ b/modules/hmmer/hmmalign/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HMMER_HMMALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hmmer=3.3.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1" - } else { - container "quay.io/biocontainers/hmmer:3.3.2--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1' : + 'quay.io/biocontainers/hmmer:3.3.2--h1b792b2_1' }" input: tuple val(meta), path(fasta) @@ -27,18 +16,19 @@ process HMMER_HMMALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def fastacmd = fasta.getExtension() == 'gz' ? "gunzip -c $fasta" : "cat $fasta" """ $fastacmd | \\ hmmalign \\ - $options.args \\ + $args \\ $hmm \\ - | gzip -c > ${meta.id}.sthlm.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hmmalign -h | grep -o '^# HMMER [0-9.]*' | sed 's/^# HMMER *//') + "${task.process}": + hmmer: \$(hmmalign -h | grep -o '^# HMMER [0-9.]*' | sed 's/^# HMMER *//') END_VERSIONS """ } diff --git a/modules/homer/annotatepeaks/functions.nf b/modules/homer/annotatepeaks/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/annotatepeaks/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/annotatepeaks/main.nf b/modules/homer/annotatepeaks/main.nf index 1714644b..321dbc7c 100644 --- a/modules/homer/annotatepeaks/main.nf +++ b/modules/homer/annotatepeaks/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_ANNOTATEPEAKS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(peak) @@ -30,19 +19,20 @@ process HOMER_ANNOTATEPEAKS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ annotatePeaks.pl \\ $peak \\ $fasta \\ - $options.args \\ + $args \\ -gtf $gtf \\ -cpu $task.cpus \\ > ${prefix}.annotatePeaks.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/findpeaks/functions.nf b/modules/homer/findpeaks/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/findpeaks/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index 2e0b6db9..a39fe753 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_FINDPEAKS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(tagDir) @@ -28,17 +17,18 @@ process HOMER_FINDPEAKS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ findPeaks \\ $tagDir \\ - $options.args \\ + $args \\ -o ${prefix}.peaks.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/maketagdirectory/functions.nf b/modules/homer/maketagdirectory/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/maketagdirectory/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index 4f531e82..44490d50 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_MAKETAGDIRECTORY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(bed) @@ -26,20 +15,21 @@ process HOMER_MAKETAGDIRECTORY { output: tuple val(meta), path("tag_dir"), emit: tagdir - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ makeTagDirectory \\ tag_dir \\ - $options.args \\ + $args \\ $bed \\ -genome $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/makeucscfile/functions.nf b/modules/homer/makeucscfile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/makeucscfile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index c56da24b..8a0e3f37 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_MAKEUCSCFILE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(tagDir) @@ -28,16 +17,17 @@ process HOMER_MAKEUCSCFILE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ makeUCSCfile \\ $tagDir \\ - -o auto - $options.args + -o auto \\ + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/idr/functions.nf b/modules/idr/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/idr/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/idr/main.nf b/modules/idr/main.nf index 006826ac..44b07be4 100644 --- a/modules/idr/main.nf +++ b/modules/idr/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IDR { tag "$prefix" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::idr=2.0.4.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/idr:2.0.4.2--py39hcbe4a3b_5" - } else { - container "quay.io/biocontainers/idr:2.0.4.2--py38h9af456f_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/idr:2.0.4.2--py39hcbe4a3b_5' : + 'quay.io/biocontainers/idr:2.0.4.2--py38h9af456f_5' }" input: path peaks @@ -30,6 +19,7 @@ process IDR { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' if (peaks.toList().size < 2) { log.error "[ERROR] idr needs at least two replicates only one provided." } @@ -46,11 +36,11 @@ process IDR { --output-file $idr_vals \\ --log-output-file $log_file \\ --plot \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(idr --version 2>&1) | sed 's/^.*IDR //; s/ .*\$//') + "${task.process}": + idr: \$(echo \$(idr --version 2>&1) | sed 's/^.*IDR //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/imputeme/vcftoprs/functions.nf b/modules/imputeme/vcftoprs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/imputeme/vcftoprs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/imputeme/vcftoprs/main.nf b/modules/imputeme/vcftoprs/main.nf index a3ce7e3c..0c8c1952 100644 --- a/modules/imputeme/vcftoprs/main.nf +++ b/modules/imputeme/vcftoprs/main.nf @@ -1,23 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName } from './functions' - - -params.options = [:] -options = initOptions(params.options) - process IMPUTEME_VCFTOPRS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "YOUR-TOOL-HERE" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/imputeme/vv1.0.7_cv1/imputeme_vv1.0.7_cv1.img" - } else { - container "biocontainers/imputeme:vv1.0.7_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/imputeme/vv1.0.7_cv1/imputeme_vv1.0.7_cv1.img' : + 'biocontainers/imputeme:vv1.0.7_cv1' }" input: tuple val(meta), path(vcf) @@ -27,14 +15,15 @@ process IMPUTEME_VCFTOPRS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ #!/usr/bin/env Rscript - #Set configuration - either from options.args or from defaults + #Set configuration - either from args or from defaults source("/imputeme/code/impute-me/functions.R") - if(file.exists('$options.args')){ - set_conf("set_from_file",'$options.args') + if(file.exists('$args')){ + set_conf("set_from_file",'$args') }else{ set_conf("set_from_file", "/imputeme/code/impute-me/template/nextflow_default_configuration.R") } diff --git a/modules/iqtree/functions.nf b/modules/iqtree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/iqtree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/iqtree/main.nf b/modules/iqtree/main.nf index bec879df..54a6486d 100644 --- a/modules/iqtree/main.nf +++ b/modules/iqtree/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IQTREE { tag "$alignment" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::iqtree=2.1.4_beta' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/iqtree:2.1.4_beta--hdcc8f71_0" - } else { - container "quay.io/biocontainers/iqtree:2.1.4_beta--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/iqtree:2.1.4_beta--hdcc8f71_0' : + 'quay.io/biocontainers/iqtree:2.1.4_beta--hdcc8f71_0' }" input: path alignment @@ -27,20 +16,21 @@ process IQTREE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def fconst_args = constant_sites ? "-fconst $constant_sites" : '' def memory = task.memory.toString().replaceAll(' ', '') """ iqtree \\ $fconst_args \\ - $options.args \\ + $args \\ -s $alignment \\ -nt AUTO \\ -ntmax $task.cpus \\ -mem $memory \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version //;s/ .*//') + "${task.process}": + iqtree: \$(echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version //;s/ .*//') END_VERSIONS """ } diff --git a/modules/ismapper/functions.nf b/modules/ismapper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ismapper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ismapper/main.nf b/modules/ismapper/main.nf index 20d3d5b7..4a33261b 100644 --- a/modules/ismapper/main.nf +++ b/modules/ismapper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISMAPPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ismapper=2.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ismapper:2.0.2--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/ismapper:2.0.2--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ismapper:2.0.2--pyhdfd78af_1' : + 'quay.io/biocontainers/ismapper:2.0.2--pyhdfd78af_1' }" input: tuple val(meta), path(reads), path(reference), path(query) @@ -26,10 +15,11 @@ process ISMAPPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ismap \\ - $options.args \\ + $args \\ --t $task.cpus \\ --output_dir results \\ --queries $query \\ @@ -37,8 +27,8 @@ process ISMAPPER { --reads $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( ismap --version 2>&1 ) | sed 's/^.*ismap //' ) + "${task.process}": + ismapper: \$( echo \$( ismap --version 2>&1 ) | sed 's/^.*ismap //' ) END_VERSIONS """ } diff --git a/modules/isoseq3/cluster/functions.nf b/modules/isoseq3/cluster/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/isoseq3/cluster/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/isoseq3/cluster/main.nf b/modules/isoseq3/cluster/main.nf index df005706..27d5c3d8 100644 --- a/modules/isoseq3/cluster/main.nf +++ b/modules/isoseq3/cluster/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISOSEQ3_CLUSTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" - } else { - container "quay.io/biocontainers/isoseq3:3.4.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0' : + 'quay.io/biocontainers/isoseq3:3.4.0--0' }" input: tuple val(meta), path(bam) @@ -27,28 +16,27 @@ process ISOSEQ3_CLUSTER { tuple val(meta), path("*.transcripts.cluster") , emit: cluster tuple val(meta), path("*.transcripts.cluster_report.csv"), emit: cluster_report tuple val(meta), path("*.transcripts.transcriptset.xml") , emit: transcriptset - path "versions.yml" , emit: versions - tuple val(meta), path("*.transcripts.hq.bam") , optional: true, emit: hq_bam tuple val(meta), path("*.transcripts.hq.bam.pbi") , optional: true, emit: hq_pbi tuple val(meta), path("*.transcripts.lq.bam") , optional: true, emit: lq_bam tuple val(meta), path("*.transcripts.lq.bam.pbi") , optional: true, emit: lq_pbi tuple val(meta), path("*.transcripts.singletons.bam") , optional: true, emit: singletons_bam tuple val(meta), path("*.transcripts.singletons.bam.pbi"), optional: true, emit: singletons_pbi - + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ isoseq3 \\ cluster \\ $bam \\ ${prefix}.transcripts.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - isoseq3 cluster: \$( isoseq3 cluster --version|sed 's/isoseq cluster //g'|sed 's/ (.*//g' ) + "${task.process}": + isoseq3: \$( isoseq3 cluster --version|sed 's/isoseq cluster //g'|sed 's/ (.*//g' ) END_VERSIONS """ } diff --git a/modules/isoseq3/refine/functions.nf b/modules/isoseq3/refine/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/isoseq3/refine/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/isoseq3/refine/main.nf b/modules/isoseq3/refine/main.nf index 5a45eb2d..5bde2f8f 100644 --- a/modules/isoseq3/refine/main.nf +++ b/modules/isoseq3/refine/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISOSEQ3_REFINE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" - } else { - container "quay.io/biocontainers/isoseq3:3.4.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0' : + 'quay.io/biocontainers/isoseq3:3.4.0--0' }" input: tuple val(meta), path(bam) @@ -31,19 +20,20 @@ process ISOSEQ3_REFINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ isoseq3 \\ refine \\ -j $task.cpus \\ - $options.args \\ + $args \\ $bam \\ $primers \\ ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( isoseq3 refine --version|sed 's/isoseq refine //'|sed 's/ (commit.\\+//' ) + "${task.process}": + isoseq3: \$( isoseq3 refine --version|sed 's/isoseq refine //'|sed 's/ (commit.\\+//' ) END_VERSIONS """ } diff --git a/modules/ivar/consensus/functions.nf b/modules/ivar/consensus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/consensus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index 33fa11f7..4a657756 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_CONSENSUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam) @@ -29,21 +18,23 @@ process IVAR_CONSENSUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" """ samtools mpileup \\ --reference $fasta \\ - $options.args2 \\ + $args2 \\ $bam | \\ $save_mpileup \\ ivar consensus \\ - $options.args \\ + $args \\ -p $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/trim/functions.nf b/modules/ivar/trim/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/trim/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index 6cf8171c..35798123 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_TRIM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,18 +17,19 @@ process IVAR_TRIM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ivar trim \\ - $options.args \\ + $args \\ -i $bam \\ -b $bed \\ -p $prefix \\ > ${prefix}.ivar.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/variants/functions.nf b/modules/ivar/variants/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/variants/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index d079a8e9..ba791307 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_VARIANTS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam) @@ -29,24 +18,26 @@ process IVAR_VARIANTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" def features = params.gff ? "-g $gff" : "" """ samtools mpileup \\ - $options.args2 \\ + $args2 \\ --reference $fasta \\ $bam | \\ $save_mpileup \\ ivar variants \\ - $options.args \\ + $args \\ $features \\ -r $fasta \\ -p $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/jupyternotebook/functions.nf b/modules/jupyternotebook/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/jupyternotebook/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/jupyternotebook/main.nf b/modules/jupyternotebook/main.nf index 2d8ad92f..02f1947f 100644 --- a/modules/jupyternotebook/main.nf +++ b/modules/jupyternotebook/main.nf @@ -1,29 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' include { dump_params_yml; indent_code_block } from "./parametrize" -params.options = [:] -options = initOptions(params.options) -params.parametrize = true -params.implicit_params = true -params.meta_params = true - process JUPYTERNOTEBOOK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } //NB: You likely want to override this with a container containing all required //dependencies for your analysis. The container at least needs to contain the //ipykernel, jupytext, papermill and nbconvert Python packages. conda (params.enable_conda ? "ipykernel=6.0.3 jupytext=1.11.4 nbconvert=6.1.0 papermill=2.3.3 matplotlib=3.4.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963%3A879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" - } else { - container "quay.io/biocontainers/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963:879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963%3A879972fc8bdc81ee92f2bce3b4805d89a772bf84-0' : + 'quay.io/biocontainers/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963:879972fc8bdc81ee92f2bce3b4805d89a772bf84-0' }" input: tuple val(meta), path(notebook) @@ -36,7 +23,11 @@ process JUPYTERNOTEBOOK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize + def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params + def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params // Dump parameters to yaml file. // Using a yaml file over using the CLI params because @@ -44,14 +35,14 @@ process JUPYTERNOTEBOOK { // * allows to pass nested maps instead of just single values def params_cmd = "" def render_cmd = "" - if (params.parametrize) { + if (parametrize) { nb_params = [:] - if (params.implicit_params) { + if (implicit_params) { nb_params["cpus"] = task.cpus nb_params["artifact_dir"] = "artifacts" nb_params["input_dir"] = "./" } - if (params.meta_params) { + if (meta_params) { nb_params["meta"] = meta } nb_params += parameters @@ -71,10 +62,10 @@ process JUPYTERNOTEBOOK { mkdir artifacts # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources - export MKL_NUM_THREADS="${task.cpus}" - export OPENBLAS_NUM_THREADS="${task.cpus}" - export OMP_NUM_THREADS="${task.cpus}" - export NUMBA_NUM_THREADS="${task.cpus}" + export MKL_NUM_THREADS="$task.cpus" + export OPENBLAS_NUM_THREADS="$task.cpus" + export OMP_NUM_THREADS="$task.cpus" + export NUMBA_NUM_THREADS="$task.cpus" # Convert notebook to ipynb using jupytext, execute using papermill, convert using nbconvert jupytext --to notebook --output - --set-kernel - ${notebook} \\ @@ -82,7 +73,7 @@ process JUPYTERNOTEBOOK { | jupyter nbconvert --stdin --to html --output ${prefix}.html cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": jupytext: \$(jupytext --version) ipykernel: \$(python -c "import ipykernel; print(ipykernel.__version__)") nbconvert: \$(jupyter nbconvert --version) diff --git a/modules/kallisto/index/functions.nf b/modules/kallisto/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallisto/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallisto/index/main.nf b/modules/kallisto/index/main.nf index 96457b6d..4dc9c6d0 100644 --- a/modules/kallisto/index/main.nf +++ b/modules/kallisto/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTO_INDEX { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::kallisto=0.46.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kallisto:0.46.2--h4f7b962_1" - } else { - container "quay.io/biocontainers/kallisto:0.46.2--h4f7b962_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kallisto:0.46.2--h4f7b962_1' : + 'quay.io/biocontainers/kallisto:0.46.2--h4f7b962_1' }" input: path fasta @@ -26,16 +15,17 @@ process KALLISTO_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ kallisto \\ index \\ - $options.args \\ + $args \\ -i kallisto \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') + "${task.process}": + kallisto: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/count/functions.nf b/modules/kallistobustools/count/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallistobustools/count/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 8c705e51..00ca8971 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTOBUSTOOLS_COUNT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0' : + 'quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0' }" input: tuple val(meta), path(reads) @@ -24,7 +13,7 @@ process KALLISTOBUSTOOLS_COUNT { path t2g path t1c path t2c - val workflow + val workflow_mode val technology output: @@ -32,7 +21,8 @@ process KALLISTOBUSTOOLS_COUNT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def cdna = t1c ? "-c1 $t1c" : '' def introns = t2c ? "-c2 $t2c" : '' """ @@ -43,16 +33,16 @@ process KALLISTOBUSTOOLS_COUNT { -g $t2g \\ $cdna \\ $introns \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ -x $technology \\ - $options.args \\ + $args \\ -o ${prefix}.count \\ ${reads[0]} \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/count/meta.yml b/modules/kallistobustools/count/meta.yml index bc2433bb..911697d2 100644 --- a/modules/kallistobustools/count/meta.yml +++ b/modules/kallistobustools/count/meta.yml @@ -39,9 +39,9 @@ input: type: file description: kb ref's c2 unspliced_t2c file pattern: "*.{introns_t2c.txt}" - - workflow: + - workflow_mode: type: value - description: String value defining worfklow to use, can be one of "standard", "lamanno", "nucleus" + description: String value defining workflow to use, can be one of "standard", "lamanno", "nucleus" pattern: "{standard,lamanno,nucleus,kite}" - technology: type: value diff --git a/modules/kallistobustools/ref/functions.nf b/modules/kallistobustools/ref/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallistobustools/ref/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index a8287498..1e496f67 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTOBUSTOOLS_REF { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0' : + 'quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0' }" input: path fasta path gtf - val workflow + val workflow_mode output: path "versions.yml" , emit: versions @@ -33,20 +22,21 @@ process KALLISTOBUSTOOLS_REF { path "intron_t2c.txt" , optional:true, emit: intron_t2c script: - if (workflow == "standard") { + def args = task.ext.args ?: '' + if (workflow_mode == "standard") { """ kb \\ ref \\ -i kb_ref_out.idx \\ -g t2g.txt \\ -f1 cdna.fa \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ $fasta \\ $gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } else { @@ -59,13 +49,13 @@ process KALLISTOBUSTOOLS_REF { -f2 intron.fa \\ -c1 cdna_t2c.txt \\ -c2 intron_t2c.txt \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ $fasta \\ $gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/ref/meta.yml b/modules/kallistobustools/ref/meta.yml index 353b9c11..dcc78c66 100644 --- a/modules/kallistobustools/ref/meta.yml +++ b/modules/kallistobustools/ref/meta.yml @@ -21,9 +21,9 @@ input: type: file description: Genomic gtf file pattern: "*.{gtf,gtf.gz}" - - workflow: + - workflow_mode: type: value - description: String value defining worfklow to use, can be one of "standard", "lamanno", "nucleus" + description: String value defining workflow to use, can be one of "standard", "lamanno", "nucleus" pattern: "{standard,lamanno,nucleus}" output: diff --git a/modules/khmer/normalizebymedian/functions.nf b/modules/khmer/normalizebymedian/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/khmer/normalizebymedian/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/khmer/normalizebymedian/main.nf b/modules/khmer/normalizebymedian/main.nf index 234d172b..50b3d603 100644 --- a/modules/khmer/normalizebymedian/main.nf +++ b/modules/khmer/normalizebymedian/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KHMER_NORMALIZEBYMEDIAN { tag "${name}" label 'process_long' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::khmer=3.0.0a3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2" - } else { - container "quay.io/biocontainers/khmer:3.0.0a3--py37haa7609a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2' : + 'quay.io/biocontainers/khmer:3.0.0a3--py37haa7609a_2' }" input: path pe_reads @@ -28,22 +17,22 @@ process KHMER_NORMALIZEBYMEDIAN { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' pe_args = pe_reads ? "--paired" : "" se_args = se_reads ? "--unpaired-reads ${se_reads}" : "" files = pe_reads ? pe_reads : se_reads - """ normalize-by-median.py \\ -M ${task.memory.toGiga()}e9 \\ - --gzip ${options.args} \\ + --gzip $args \\ -o ${name}.fastq.gz \\ - ${pe_args} \\ - ${se_args} \\ - ${files} + $pe_args \\ + $se_args \\ + $files cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( normalize-by-median.py --version 2>&1 | grep ^khmer | sed 's/^khmer //' ) + "${task.process}": + khmer: \$( normalize-by-median.py --version 2>&1 | grep ^khmer | sed 's/^khmer //' ) END_VERSIONS """ } diff --git a/modules/kleborate/functions.nf b/modules/kleborate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kleborate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index 5bb76ad0..b64a0c45 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KLEBORATE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::kleborate=2.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kleborate:2.1.0--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/kleborate:2.1.0--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kleborate:2.1.0--pyhdfd78af_1' : + 'quay.io/biocontainers/kleborate:2.1.0--pyhdfd78af_1' }" input: tuple val(meta), path(fastas) @@ -26,16 +15,17 @@ process KLEBORATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ kleborate \\ - $options.args \\ + $args \\ --outfile ${prefix}.results.txt \\ --assemblies *.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(kleborate --version | sed 's/Kleborate v//;')) + "${task.process}": + kleborate: \$( echo \$(kleborate --version | sed 's/Kleborate v//;')) END_VERSIONS """ } diff --git a/modules/kraken2/kraken2/functions.nf b/modules/kraken2/kraken2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kraken2/kraken2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index 0d4e5840..e5fb4b80 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KRAKEN2_KRAKEN2 { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::kraken2=2.1.1 conda-forge::pigz=2.6' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' - } else { - container 'quay.io/biocontainers/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' : + 'quay.io/biocontainers/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,8 @@ process KRAKEN2_KRAKEN2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired = meta.single_end ? "" : "--paired" def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" def unclassified = meta.single_end ? "${prefix}.unclassified.fastq" : "${prefix}.unclassified#.fastq" @@ -42,14 +32,14 @@ process KRAKEN2_KRAKEN2 { --report ${prefix}.kraken2.report.txt \\ --gzip-compressed \\ $paired \\ - $options.args \\ + $args \\ $reads pigz -p $task.cpus *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') + "${task.process}": + kraken2: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/krona/kronadb/main.nf b/modules/krona/kronadb/main.nf new file mode 100644 index 00000000..ca7fc3d3 --- /dev/null +++ b/modules/krona/kronadb/main.nf @@ -0,0 +1,27 @@ +def VERSION='2.7.1' // Version information not provided by tool on CLI + +process KRONA_KRONADB { + label 'process_low' + + conda (params.enable_conda ? "bioconda::krona=2.7.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/krona:2.7.1--pl526_5' : + 'quay.io/biocontainers/krona:2.7.1--pl526_5' }" + + input: + + output: + path 'taxonomy/taxonomy.tab', emit: db + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + ktUpdateTaxonomy.sh taxonomy + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + krona: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/kronadb/meta.yml b/modules/krona/kronadb/meta.yml similarity index 95% rename from modules/kronatools/kronadb/meta.yml rename to modules/krona/kronadb/meta.yml index 5a637949..2a12aaaf 100644 --- a/modules/kronatools/kronadb/meta.yml +++ b/modules/krona/kronadb/meta.yml @@ -1,11 +1,11 @@ -name: kronatools_kronadb +name: krona_kronadb description: KronaTools Update Taxonomy downloads a taxonomy database keywords: - database - taxonomy - krona tools: - - kronatools: + - krona: description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. homepage: https://github.com/marbl/Krona/wiki/KronaTools documentation: https://github.com/marbl/Krona/wiki/Installing diff --git a/modules/krona/ktimporttaxonomy/main.nf b/modules/krona/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..bc79c98c --- /dev/null +++ b/modules/krona/ktimporttaxonomy/main.nf @@ -0,0 +1,30 @@ +def VERSION = '2.8' // Version information not provided by tool on CLI + +process KRONA_KTIMPORTTAXONOMY { + tag "${meta.id}" + label 'process_high' + + conda (params.enable_conda ? "bioconda::krona=2.8" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/krona:2.8--pl5262hdfd78af_2' : + 'quay.io/biocontainers/krona:2.8--pl5262hdfd78af_2' }" + + input: + tuple val(meta), path(report) + path "taxonomy/taxonomy.tab" + + output: + tuple val(meta), path ('*.html'), emit: html + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + ktImportTaxonomy "$report" -tax taxonomy + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + krona: $VERSION + END_VERSIONS + """ +} diff --git a/modules/kronatools/ktimporttaxonomy/meta.yml b/modules/krona/ktimporttaxonomy/meta.yml similarity index 95% rename from modules/kronatools/ktimporttaxonomy/meta.yml rename to modules/krona/ktimporttaxonomy/meta.yml index f37f2db4..b65919f8 100644 --- a/modules/kronatools/ktimporttaxonomy/meta.yml +++ b/modules/krona/ktimporttaxonomy/meta.yml @@ -1,4 +1,4 @@ -name: kronatools_ktimporttaxonomy +name: krona_ktimporttaxonomy description: KronaTools Import Taxonomy imports taxonomy classifications and produces an interactive Krona plot. keywords: - plot @@ -8,7 +8,7 @@ keywords: - visualisation - krona chart tools: - - kronatools: + - krona: description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. homepage: https://github.com/marbl/Krona/wiki/KronaTools documentation: http://manpages.ubuntu.com/manpages/impish/man1/ktImportTaxonomy.1.html @@ -24,7 +24,7 @@ input: e.g. [ id:'test'] - database: type: path - description: "Path to the taxonomy database downloaded by kronatools/kronadb" + description: "Path to the taxonomy database downloaded by krona/kronadb" - report: type: file description: "A tab-delimited file with taxonomy IDs and (optionally) query IDs, magnitudes, and scores. Query IDs are taken from column 1, taxonomy IDs from column 2, and scores from column 3. Lines beginning with # will be ignored." diff --git a/modules/kronatools/kronadb/functions.nf b/modules/kronatools/kronadb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kronatools/kronadb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kronatools/kronadb/main.nf b/modules/kronatools/kronadb/main.nf deleted file mode 100644 index 7dee12d0..00000000 --- a/modules/kronatools/kronadb/main.nf +++ /dev/null @@ -1,35 +0,0 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -process KRONATOOLS_KRONADB { - label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - - conda (params.enable_conda ? "bioconda::krona=2.7.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/krona:2.7.1--pl526_5" - } else { - container "quay.io/biocontainers/krona:2.7.1--pl526_5" - } - input: - - output: - path 'taxonomy/taxonomy.tab', emit: db - path "versions.yml" , emit: versions - - script: - def VERSION='2.7.1' - """ - ktUpdateTaxonomy.sh taxonomy - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: $VERSION - END_VERSIONS - """ -} diff --git a/modules/kronatools/ktimporttaxonomy/functions.nf b/modules/kronatools/ktimporttaxonomy/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kronatools/ktimporttaxonomy/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kronatools/ktimporttaxonomy/main.nf b/modules/kronatools/ktimporttaxonomy/main.nf deleted file mode 100644 index 893bc5b2..00000000 --- a/modules/kronatools/ktimporttaxonomy/main.nf +++ /dev/null @@ -1,39 +0,0 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -process KRONATOOLS_KTIMPORTTAXONOMY { - tag "${meta.id}" - label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - - conda (params.enable_conda ? "bioconda::krona=2.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/krona:2.8--pl5262hdfd78af_2" - } else { - container "quay.io/biocontainers/krona:2.8--pl5262hdfd78af_2" - } - - input: - tuple val(meta), path(report) - path "taxonomy/taxonomy.tab" - - output: - tuple val(meta), path ('*.html'), emit: html - path "versions.yml" , emit: versions - - script: - def VERSION='2.8' - """ - ktImportTaxonomy "$report" -tax taxonomy - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: $VERSION - END_VERSIONS - """ -} diff --git a/modules/last/dotplot/functions.nf b/modules/last/dotplot/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/dotplot/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index d02e98ad..51667378 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_DOTPLOT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -28,17 +17,18 @@ process LAST_DOTPLOT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ last-dotplot \\ - $options.args \\ + $args \\ $maf \\ $prefix.$format # last-dotplot has no --version option so let's use lastal from the same suite cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/lastal/functions.nf b/modules/last/lastal/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/lastal/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index c4335f25..4b90a965 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_LASTAL { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx), path (param_file) @@ -27,13 +16,14 @@ process LAST_LASTAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def trained_params = param_file ? "-p ${param_file}" : '' """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) lastal \\ $trained_params \\ - $options.args \\ + $args \\ -P $task.cpus \\ ${index}/\$INDEX_NAME \\ $fastx \\ @@ -42,8 +32,8 @@ process LAST_LASTAL { # which makes its checksum non-reproducible. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version 2>&1 | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/lastdb/functions.nf b/modules/last/lastdb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/lastdb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index fb765ada..ff6485dc 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_LASTDB { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx) @@ -26,18 +15,19 @@ process LAST_LASTDB { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir lastdb lastdb \\ - $options.args \\ + $args \\ -P $task.cpus \\ lastdb/${prefix} \\ $fastx cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/mafconvert/functions.nf b/modules/last/mafconvert/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/mafconvert/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index 5e259109..f1a7312e 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_MAFCONVERT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -35,15 +24,16 @@ process LAST_MAFCONVERT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - maf-convert $options.args $format $maf | gzip --no-name \\ + maf-convert $args $format $maf | gzip --no-name \\ > ${prefix}.${format}.gz # maf-convert has no --version option but lastdb (part of the same package) has. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/mafswap/functions.nf b/modules/last/mafswap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/mafswap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index 5ce38c92..c66e47d4 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_MAFSWAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,14 +15,15 @@ process LAST_MAFSWAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - maf-swap $options.args $maf | gzip --no-name > ${prefix}.swapped.maf.gz + maf-swap $args $maf | gzip --no-name > ${prefix}.swapped.maf.gz # maf-swap has no --version option but lastdb, part of the same package, has. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/postmask/functions.nf b/modules/last/postmask/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/postmask/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index 3102fbe6..e4f4390a 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_POSTMASK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,15 +15,16 @@ process LAST_POSTMASK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if( "$maf" == "${prefix}.maf.gz" ) error "Input and output names are the same, use the suffix option to disambiguate" """ - last-postmask $options.args $maf | gzip --no-name > ${prefix}.maf.gz + last-postmask $args $maf | gzip --no-name > ${prefix}.maf.gz # last-postmask does not have a --version option cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version 2>&1 | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/split/functions.nf b/modules/last/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index 2a9e5621..ecc47e80 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_SPLIT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,13 +15,14 @@ process LAST_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - zcat < $maf | last-split $options.args | gzip --no-name > ${prefix}.maf.gz + zcat < $maf | last-split $args | gzip --no-name > ${prefix}.maf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(last-split --version 2>&1 | sed 's/last-split //') + "${task.process}": + last: \$(last-split --version 2>&1 | sed 's/last-split //') END_VERSIONS """ } diff --git a/modules/last/train/functions.nf b/modules/last/train/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/train/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index f0b958bc..0a949857 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_TRAIN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx) @@ -27,20 +16,21 @@ process LAST_TRAIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) last-train \\ - $options.args \\ + $args \\ -P $task.cpus \\ ${index}/\$INDEX_NAME \\ $fastx \\ > ${prefix}.\$INDEX_NAME.par cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/leehom/functions.nf b/modules/leehom/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/leehom/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/leehom/main.nf b/modules/leehom/main.nf index e0d9ee39..d997e68b 100644 --- a/modules/leehom/main.nf +++ b/modules/leehom/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION="1.2.15" +def VERSION = '1.2.15' // Version information not provided by tool on CLI process LEEHOM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::leehom=1.2.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/leehom:1.2.15--h29e30f7_1" - } else { - container "quay.io/biocontainers/leehom:1.2.15--h29e30f7_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/leehom:1.2.15--h29e30f7_1' : + 'quay.io/biocontainers/leehom:1.2.15--h29e30f7_1' }" input: tuple val(meta), path(reads) @@ -32,54 +21,54 @@ process LEEHOM { tuple val(meta), path("${prefix}_r2.fq.gz") , optional: true, emit: unmerged_r2_fq_pass tuple val(meta), path("${prefix}_r2.fail.fq.gz"), optional: true, emit: unmerged_r2_fq_fail tuple val(meta), path("*.log") , emit: log - path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" - if ( reads.toString().endsWith('.bam') ) { - """ - leeHom \\ - $options.args \\ - -t $task.cpus \\ - -o ${prefix}.bam \\ - --log ${prefix}.log \\ - $reads + if (reads.toString().endsWith('.bam')) { + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -o ${prefix}.bam \\ + --log ${prefix}.log \\ + $reads - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) - END_VERSIONS - """ - } else if ( meta.single_end ) { - """ - leeHom \\ - $options.args \\ - -t $task.cpus \\ - -fq1 $reads \\ - -fqo ${prefix} \\ - --log ${prefix}.log + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ + } else if (meta.single_end) { + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -fq1 $reads \\ + -fqo $prefix \\ + --log ${prefix}.log - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) - END_VERSIONS - """ + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ } else { - """ - leeHom \\ - $options.args \\ - -t $task.cpus \\ - -fq1 ${reads[0]} \\ - -fq2 ${reads[1]} \\ - -fqo ${prefix} \\ - --log ${prefix}.log + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -fq1 ${reads[0]} \\ + -fq2 ${reads[1]} \\ + -fqo $prefix \\ + --log ${prefix}.log - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) - END_VERSIONS - """ + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ } } diff --git a/modules/lib/functions.nf b/modules/lib/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lib/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lima/functions.nf b/modules/lima/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lima/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lima/main.nf b/modules/lima/main.nf index 16525953..64f6d87d 100644 --- a/modules/lima/main.nf +++ b/modules/lima/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LIMA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lima=2.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lima:2.2.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/lima:2.2.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lima:2.2.0--h9ee0642_0' : + 'quay.io/biocontainers/lima:2.2.0--h9ee0642_0' }" input: tuple val(meta), path(ccs) @@ -40,7 +29,8 @@ process LIMA { tuple val(meta), path("*.json") , optional: true, emit: json script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ OUT_EXT="" @@ -62,11 +52,11 @@ process LIMA { $primers \\ $prefix.\$OUT_EXT \\ -j $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) + "${task.process}": + lima: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) END_VERSIONS """ } diff --git a/modules/lissero/functions.nf b/modules/lissero/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lissero/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lissero/main.nf b/modules/lissero/main.nf index ff863aaa..b5cd2b68 100644 --- a/modules/lissero/main.nf +++ b/modules/lissero/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LISSERO { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lissero=0.4.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lissero:0.4.9--py_0" - } else { - container "quay.io/biocontainers/lissero:0.4.9--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lissero:0.4.9--py_0' : + 'quay.io/biocontainers/lissero:0.4.9--py_0' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process LISSERO { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lissero \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(lissero --version 2>&1) | sed 's/^.*LisSero //' ) + "${task.process}": + lissero: \$( echo \$(lissero --version 2>&1) | sed 's/^.*LisSero //' ) END_VERSIONS """ } diff --git a/modules/lofreq/call/functions.nf b/modules/lofreq/call/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/call/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index e77d7a78..74995152 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_CALL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam) @@ -27,18 +16,19 @@ process LOFREQ_CALL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq \\ call \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.vcf.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/callparallel/functions.nf b/modules/lofreq/callparallel/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/callparallel/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index a86748d7..63ae2886 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_CALLPARALLEL { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam), path(bai) @@ -28,19 +17,20 @@ process LOFREQ_CALLPARALLEL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq \\ call-parallel \\ --pp-threads $task.cpus \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.vcf.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/filter/functions.nf b/modules/lofreq/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index 905a961d..6f13ae44 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_FILTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(vcf) @@ -26,17 +15,18 @@ process LOFREQ_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq \\ filter \\ - $options.args \\ + $args \\ -i $vcf \\ -o ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/indelqual/functions.nf b/modules/lofreq/indelqual/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/indelqual/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index b33a1e04..bf04c5d2 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_INDELQUAL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam) @@ -26,17 +16,18 @@ process LOFREQ_INDELQUAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ lofreq indelqual \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.bam \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/macs2/callpeak/functions.nf b/modules/macs2/callpeak/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/macs2/callpeak/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index 94f8945b..e8bfcda0 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MACS2_CALLPEAK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::macs2=2.2.7.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h4a8c8d9_3" - } else { - container "quay.io/biocontainers/macs2:2.2.7.1--py38h4a8c8d9_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h4a8c8d9_3' : + 'quay.io/biocontainers/macs2:2.2.7.1--py38h4a8c8d9_3' }" input: tuple val(meta), path(ipbam), path(controlbam) @@ -32,20 +21,21 @@ process MACS2_CALLPEAK { tuple val(meta), path("*.bdg") , optional:true, emit: bdg script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() - def format = meta.single_end ? 'BAM' : 'BAMPE' - def control = controlbam ? "--control $controlbam" : '' - if(args.contains('--format')){ - def id = args.findIndexOf{it=='--format'} - format = args[id+1] - args.remove(id+1) - args.remove(id) + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args_list = args.tokenize() + def format = meta.single_end ? 'BAM' : 'BAMPE' + def control = controlbam ? "--control $controlbam" : '' + if(args_list.contains('--format')){ + def id = args_list.findIndexOf{it=='--format'} + format = args_list[id+1] + args_list.remove(id+1) + args_list.remove(id) } """ macs2 \\ callpeak \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ --gsize $macs2_gsize \\ --format $format \\ --name $prefix \\ @@ -53,8 +43,8 @@ process MACS2_CALLPEAK { $control cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(macs2 --version | sed -e "s/macs2 //g") + "${task.process}": + macs2: \$(macs2 --version | sed -e "s/macs2 //g") END_VERSIONS """ } diff --git a/modules/malt/build/functions.nf b/modules/malt/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/malt/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/malt/build/main.nf b/modules/malt/build/main.nf index 48259a50..d1b0c427 100644 --- a/modules/malt/build/main.nf +++ b/modules/malt/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALT_BUILD { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::malt=0.53" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" - } else { - container "quay.io/biocontainers/malt:0.53--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0' : + 'quay.io/biocontainers/malt:0.53--hdfd78af_0' }" input: path fastas @@ -30,6 +19,7 @@ process MALT_BUILD { path "malt-build.log", emit: log script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[MALT_BUILD] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -46,13 +36,13 @@ process MALT_BUILD { -s $seq_type \\ $igff \\ -d 'malt_index/' \\ - -t ${task.cpus} \\ - $options.args \\ + -t $task.cpus \\ + $args \\ -mdb ${map_db}/*.db |&tee malt-build.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2) + "${task.process}": + malt: \$(malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2) END_VERSIONS """ } diff --git a/modules/malt/run/functions.nf b/modules/malt/run/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/malt/run/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index bc78de8c..8b8f05cc 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALT_RUN { label 'process_high_memory' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::malt=0.53" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" - } else { - container "quay.io/biocontainers/malt:0.53--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0' : + 'quay.io/biocontainers/malt:0.53--hdfd78af_0' }" input: path fastqs @@ -30,6 +19,7 @@ process MALT_RUN { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[MALT_RUN] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -40,17 +30,17 @@ process MALT_RUN { """ malt-run \\ -J-Xmx${avail_mem}g \\ - -t ${task.cpus} \\ + -t $task.cpus \\ -v \\ -o . \\ - $options.args \\ + $args \\ --inFile ${fastqs.join(' ')} \\ -m $mode \\ --index $index/ |&tee malt-run.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(malt-run --help 2>&1 | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ') + "${task.process}": + malt: \$(malt-run --help 2>&1 | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/maltextract/functions.nf b/modules/maltextract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/maltextract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/maltextract/main.nf b/modules/maltextract/main.nf index d909ec96..e3a42016 100644 --- a/modules/maltextract/main.nf +++ b/modules/maltextract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALTEXTRACT { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hops=0.35" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" - } else { - container "quay.io/biocontainers/hops:0.35--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1' : + 'quay.io/biocontainers/hops:0.35--hdfd78af_1' }" input: path rma6 @@ -28,6 +17,7 @@ process MALTEXTRACT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ MaltExtract \\ -Xmx${task.memory.toGiga()}g \\ @@ -36,11 +26,11 @@ process MALTEXTRACT { -t $taxon_list \\ -r $ncbi_dir \\ -o results/ \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MaltExtract --help | head -n 2 | tail -n 1 | sed 's/MaltExtract version//') + "${task.process}": + maltextract: \$(MaltExtract --help | head -n 2 | tail -n 1 | sed 's/MaltExtract version//') END_VERSIONS """ } diff --git a/modules/manta/germline/functions.nf b/modules/manta/germline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/germline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index f957a7ec..553f0be9 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_GERMLINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input), path(input_index) @@ -35,7 +24,8 @@ process MANTA_GERMLINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ @@ -59,10 +49,9 @@ process MANTA_GERMLINE { mv manta/results/variants/diploidSV.vcf.gz.tbi \ ${prefix}.diploid_sv.vcf.gz.tbi - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/manta/somatic/functions.nf b/modules/manta/somatic/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/somatic/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf index f912d478..38d73133 100644 --- a/modules/manta/somatic/main.nf +++ b/modules/manta/somatic/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_SOMATIC { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor) @@ -37,7 +26,8 @@ process MANTA_SOMATIC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ @@ -60,8 +50,8 @@ process MANTA_SOMATIC { mv manta/results/variants/somaticSV.vcf.gz.tbi ${prefix}.somatic_sv.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/manta/tumoronly/functions.nf b/modules/manta/tumoronly/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/tumoronly/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf index f20e8128..dc72fcc4 100644 --- a/modules/manta/tumoronly/main.nf +++ b/modules/manta/tumoronly/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_TUMORONLY { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input), path(input_index) @@ -35,7 +24,8 @@ process MANTA_TUMORONLY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ @@ -59,10 +49,9 @@ process MANTA_TUMORONLY { mv manta/results/variants/tumorSV.vcf.gz.tbi \ ${prefix}.tumor_sv.vcf.gz.tbi - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/mapdamage2/functions.nf b/modules/mapdamage2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mapdamage2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mapdamage2/main.nf b/modules/mapdamage2/main.nf index e252e27c..3673970e 100644 --- a/modules/mapdamage2/main.nf +++ b/modules/mapdamage2/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MAPDAMAGE2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mapdamage2=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mapdamage2:2.2.1--pyr40_0" - } else { - container "quay.io/biocontainers/mapdamage2:2.2.1--pyr40_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mapdamage2:2.2.1--pyr40_0' : + 'quay.io/biocontainers/mapdamage2:2.2.1--pyr40_0' }" input: tuple val(meta), path(bam) @@ -43,16 +33,17 @@ process MAPDAMAGE2 { path "versions.yml",emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mapDamage \\ - $options.args \\ + $args \\ -i $bam \\ -r $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(mapDamage --version)) + "${task.process}": + mapdamage2: \$(echo \$(mapDamage --version)) END_VERSIONS """ } diff --git a/modules/mash/sketch/functions.nf b/modules/mash/sketch/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mash/sketch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index f434a5f1..0c0b6e17 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -1,20 +1,10 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MASH_SKETCH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mash=2.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mash:2.3--he348c14_1" - } else { - container "quay.io/biocontainers/mash:2.3--he348c14_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mash:2.3--he348c14_1' : + 'quay.io/biocontainers/mash:2.3--he348c14_1' }" input: tuple val(meta), path(reads) @@ -25,19 +15,20 @@ process MASH_SKETCH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mash \\ sketch \\ - $options.args \\ + $args \\ -p $task.cpus \\ -o ${prefix} \\ -r $reads \\ 2> ${prefix}.mash_stats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(mash --version 2>&1) + "${task.process}": + mash: \$(mash --version 2>&1) END_VERSIONS """ } diff --git a/modules/mashtree/functions.nf b/modules/mashtree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mashtree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mashtree/main.nf b/modules/mashtree/main.nf index db0b14f5..6728e3ce 100644 --- a/modules/mashtree/main.nf +++ b/modules/mashtree/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MASHTREE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mashtree=1.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mashtree:1.2.0--pl526h516909a_0" - } else { - container "quay.io/biocontainers/mashtree:1.2.0--pl526h516909a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mashtree:1.2.0--pl526h516909a_0' : + 'quay.io/biocontainers/mashtree:1.2.0--pl526h516909a_0' }" input: tuple val(meta), path(seqs) @@ -27,18 +16,19 @@ process MASHTREE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mashtree \\ - $options.args \\ + $args \\ --numcpus $task.cpus \\ --outmatrix ${prefix}.tsv \\ --outtree ${prefix}.dnd \\ $seqs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( mashtree --version 2>&1 ) | sed 's/^.*Mashtree //' ) + "${task.process}": + mashtree: \$( echo \$( mashtree --version 2>&1 ) | sed 's/^.*Mashtree //' ) END_VERSIONS """ } diff --git a/modules/maxbin2/functions.nf b/modules/maxbin2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/maxbin2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/maxbin2/main.nf b/modules/maxbin2/main.nf index bcfa9590..e13af704 100644 --- a/modules/maxbin2/main.nf +++ b/modules/maxbin2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MAXBIN2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::maxbin2=2.2.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/maxbin2:2.2.7--he1b5a44_2" - } else { - container "quay.io/biocontainers/maxbin2:2.2.7--he1b5a44_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/maxbin2:2.2.7--he1b5a44_2' : + 'quay.io/biocontainers/maxbin2:2.2.7--he1b5a44_2' }" input: tuple val(meta), path(contigs), path(reads), path(abund) @@ -33,20 +22,21 @@ process MAXBIN2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def associate_files = reads ? "-reads $reads" : "-abund $abund" """ run_MaxBin.pl \\ -contig $contigs \\ $associate_files \\ -thread $task.cpus \\ - $options.args \\ + $args \\ -out $prefix gzip *.fasta *.noclass *.tooshort *log *.marker cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": maxbin2: \$( run_MaxBin.pl -v | head -n 1 | sed 's/MaxBin //' ) END_VERSIONS """ diff --git a/modules/medaka/functions.nf b/modules/medaka/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/medaka/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/medaka/main.nf b/modules/medaka/main.nf index a0db4150..e7a8b9cc 100644 --- a/modules/medaka/main.nf +++ b/modules/medaka/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MEDAKA { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::medaka=1.4.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/medaka:1.4.4--py38h130def0_0" - } else { - container "quay.io/biocontainers/medaka:1.4.4--py38h130def0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/medaka:1.4.4--py38h130def0_0' : + 'quay.io/biocontainers/medaka:1.4.4--py38h130def0_0' }" input: tuple val(meta), path(reads), path(assembly) @@ -26,11 +15,12 @@ process MEDAKA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ medaka_consensus \\ -t $task.cpus \\ - $options.args \\ + $args \\ -i $reads \\ -d $assembly \\ -o ./ @@ -40,8 +30,8 @@ process MEDAKA { gzip -n ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( medaka --version 2>&1 | sed 's/medaka //g' ) + "${task.process}": + medaka: \$( medaka --version 2>&1 | sed 's/medaka //g' ) END_VERSIONS """ } diff --git a/modules/megahit/functions.nf b/modules/megahit/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/megahit/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/megahit/main.nf b/modules/megahit/main.nf index 8c8a5555..011fa7d3 100644 --- a/modules/megahit/main.nf +++ b/modules/megahit/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MEGAHIT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::megahit=1.2.9 conda-forge::pigz=2.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" - } else { - container "quay.io/biocontainers/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0' : + 'quay.io/biocontainers/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0' }" input: tuple val(meta), path(reads) @@ -30,25 +19,27 @@ process MEGAHIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ megahit \\ -r ${reads} \\ -t $task.cpus \\ - $options.args \\ + $args \\ --out-prefix $prefix pigz \\ --no-name \\ -p $task.cpus \\ - $options.args2 \\ + $args2 \\ megahit_out/*.fa \\ megahit_out/intermediate_contigs/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + "${task.process}": + megahit: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') END_VERSIONS """ } else { @@ -57,19 +48,19 @@ process MEGAHIT { -1 ${reads[0]} \\ -2 ${reads[1]} \\ -t $task.cpus \\ - $options.args \\ + $args \\ --out-prefix $prefix pigz \\ --no-name \\ -p $task.cpus \\ - $options.args2 \\ + $args2 \\ megahit_out/*.fa \\ megahit_out/intermediate_contigs/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + "${task.process}": + megahit: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') END_VERSIONS """ } diff --git a/modules/meningotype/functions.nf b/modules/meningotype/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/meningotype/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/meningotype/main.nf b/modules/meningotype/main.nf index 4e779e8c..5dde5633 100644 --- a/modules/meningotype/main.nf +++ b/modules/meningotype/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MENINGOTYPE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::meningotype=0.8.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/meningotype:0.8.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/meningotype:0.8.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/meningotype:0.8.5--pyhdfd78af_0' : + 'quay.io/biocontainers/meningotype:0.8.5--pyhdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process MENINGOTYPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ meningotype \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(meningotype --version 2>&1) | sed 's/^.*meningotype v//' ) + "${task.process}": + meningotype: \$( echo \$(meningotype --version 2>&1) | sed 's/^.*meningotype v//' ) END_VERSIONS """ } diff --git a/modules/metabat2/jgisummarizebamcontigdepths/functions.nf b/modules/metabat2/jgisummarizebamcontigdepths/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/metabat2/jgisummarizebamcontigdepths/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/modules/metabat2/jgisummarizebamcontigdepths/main.nf index 1860ae16..e35d6715 100644 --- a/modules/metabat2/jgisummarizebamcontigdepths/main.nf +++ b/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" - } else { - container "quay.io/biocontainers/metabat2:2.15--h986a166_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1' : + 'quay.io/biocontainers/metabat2:2.15--h986a166_1' }" input: tuple val(meta), path(bam), path(bai) @@ -25,20 +15,21 @@ process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ export OMP_NUM_THREADS=$task.cpus jgi_summarize_bam_contig_depths \\ --outputDepth ${prefix}.txt \\ - $options.args \\ + $args \\ $bam bgzip --threads $task.cpus ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + "${task.process}": + metabat2: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) END_VERSIONS """ } diff --git a/modules/metabat2/metabat2/functions.nf b/modules/metabat2/metabat2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/metabat2/metabat2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf index 589e268c..d158af91 100644 --- a/modules/metabat2/metabat2/main.nf +++ b/modules/metabat2/metabat2/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METABAT2_METABAT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1" - } else { - container "quay.io/biocontainers/metabat2:2.15--h986a166_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1' : + 'quay.io/biocontainers/metabat2:2.15--h986a166_1' }" input: tuple val(meta), path(fasta), path(depth) @@ -26,14 +16,15 @@ process METABAT2_METABAT2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def decompress_depth = depth ? "gzip -d -f $depth" : "" def depth_file = depth ? "-a ${depth.baseName}" : "" """ $decompress_depth metabat2 \\ - $options.args \\ + $args \\ -i $fasta \\ $depth_file \\ -t $task.cpus \\ @@ -46,8 +37,8 @@ process METABAT2_METABAT2 { bgzip --threads $task.cpus bins/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + "${task.process}": + metabat2: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) END_VERSIONS """ } diff --git a/modules/metaphlan3/functions.nf b/modules/metaphlan3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/metaphlan3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index c5157b66..9463da6f 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METAPHLAN3 { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::metaphlan=3.0.12' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metaphlan:3.0.12--pyhb7b1952_0" - } else { - container "quay.io/biocontainers/metaphlan:3.0.12--pyhb7b1952_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metaphlan:3.0.12--pyhb7b1952_0' : + 'quay.io/biocontainers/metaphlan:3.0.12--pyhb7b1952_0' }" input: tuple val(meta), path(input) @@ -29,7 +18,8 @@ process METAPHLAN3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_type = ("$input".endsWith(".fastq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" def input_data = ("$input_type".contains("fastq")) && !meta.single_end ? "${input[0]},${input[1]}" : "$input" def bowtie2_out = "$input_type" == "--input_type bowtie2out" || "$input_type" == "--input_type sam" ? '' : "--bowtie2out ${prefix}.bowtie2out.txt" @@ -39,14 +29,14 @@ process METAPHLAN3 { --nproc $task.cpus \\ $input_type \\ $input_data \\ - $options.args \\ + $args \\ $bowtie2_out \\ --bowtie2db ${metaphlan_db} \\ --biom ${prefix}.biom \\ --output_file ${prefix}_profile.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(metaphlan --version 2>&1 | awk '{print \$3}') + "${task.process}": + metaphlan3: \$(metaphlan --version 2>&1 | awk '{print \$3}') END_VERSIONS """ } diff --git a/modules/methyldackel/extract/functions.nf b/modules/methyldackel/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/methyldackel/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/methyldackel/extract/main.nf b/modules/methyldackel/extract/main.nf index 94e4b379..a39c0305 100644 --- a/modules/methyldackel/extract/main.nf +++ b/modules/methyldackel/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METHYLDACKEL_EXTRACT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" - } else { - container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0' : + 'quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,15 +17,16 @@ process METHYLDACKEL_EXTRACT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ MethylDackel extract \\ - $options.args \\ + $args \\ $fasta \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + "${task.process}": + methyldackel: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") END_VERSIONS """ } diff --git a/modules/methyldackel/mbias/functions.nf b/modules/methyldackel/mbias/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/methyldackel/mbias/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index c8fd2fa2..1b4b14c4 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METHYLDACKEL_MBIAS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" - } else { - container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0' : + 'quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,10 +17,11 @@ process METHYLDACKEL_MBIAS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ MethylDackel mbias \\ - $options.args \\ + $args \\ $fasta \\ $bam \\ $prefix \\ @@ -39,8 +29,8 @@ process METHYLDACKEL_MBIAS { > ${prefix}.mbias.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + "${task.process}": + methyldackel: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") END_VERSIONS """ } diff --git a/modules/minia/functions.nf b/modules/minia/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minia/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 140ef9e7..8516ef6e 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIA { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::minia=3.2.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minia:3.2.4--he513fc3_0" - } else { - container "quay.io/biocontainers/minia:3.2.4--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minia:3.2.4--he513fc3_0' : + 'quay.io/biocontainers/minia:3.2.4--he513fc3_0' }" input: tuple val(meta), path(reads) @@ -28,19 +17,20 @@ process MINIA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def read_list = reads.join(",") """ echo "${read_list}" | sed 's/,/\\n/g' > input_files.txt minia \\ - $options.args \\ + $args \\ -nb-cores $task.cpus \\ -in input_files.txt \\ -out $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(minia --version 2>&1 | grep Minia) | sed 's/^.*Minia version //;') + "${task.process}": + minia: \$(echo \$(minia --version 2>&1 | grep Minia) | sed 's/^.*Minia version //;') END_VERSIONS """ } diff --git a/modules/miniasm/functions.nf b/modules/miniasm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/miniasm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/miniasm/main.nf b/modules/miniasm/main.nf index d2652fab..35c2e2c0 100644 --- a/modules/miniasm/main.nf +++ b/modules/miniasm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIASM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::miniasm=0.3_r179" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/miniasm:0.3_r179--h5bf99c6_2" - } else { - container "quay.io/biocontainers/miniasm:0.3_r179--h5bf99c6_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/miniasm:0.3_r179--h5bf99c6_2' : + 'quay.io/biocontainers/miniasm:0.3_r179--h5bf99c6_2' }" input: tuple val(meta), path(reads), path(paf) @@ -27,10 +16,11 @@ process MINIASM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ miniasm \\ - $options.args \\ + $args \\ -f $reads \\ $paf > \\ ${prefix}.gfa @@ -41,8 +31,8 @@ process MINIASM { gzip -n ${prefix}.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( miniasm -V 2>&1 ) + "${task.process}": + miniasm: \$( miniasm -V 2>&1 ) END_VERSIONS """ } diff --git a/modules/minimap2/align/functions.nf b/modules/minimap2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minimap2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index 215e4fb5..c6c0c316 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIMAP2_ALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" - } else { - container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' : + 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: tuple val(meta), path(reads) @@ -27,19 +16,20 @@ process MINIMAP2_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" """ minimap2 \\ - $options.args \\ + $args \\ -t $task.cpus \\ $reference \\ $input_reads \\ > ${prefix}.paf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + "${task.process}": + minimap2: \$(minimap2 --version 2>&1) END_VERSIONS """ } diff --git a/modules/minimap2/index/functions.nf b/modules/minimap2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minimap2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minimap2/index/main.nf b/modules/minimap2/index/main.nf index b154a649..10cdd142 100644 --- a/modules/minimap2/index/main.nf +++ b/modules/minimap2/index/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIMAP2_INDEX { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:['']) } conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" - } else { - container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' : + 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: path fasta @@ -25,16 +14,17 @@ process MINIMAP2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ minimap2 \\ -t $task.cpus \\ -d ${fasta.baseName}.mmi \\ - $options.args \\ + $args \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + "${task.process}": + minimap2: \$(minimap2 --version 2>&1) END_VERSIONS """ } diff --git a/modules/mlst/functions.nf b/modules/mlst/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mlst/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mlst/main.nf b/modules/mlst/main.nf index faac9871..aa338420 100644 --- a/modules/mlst/main.nf +++ b/modules/mlst/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MLST { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mlst=2.19.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mlst:2.19.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/mlst:2.19.0--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mlst:2.19.0--hdfd78af_1' : + 'quay.io/biocontainers/mlst:2.19.0--hdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,7 +15,8 @@ process MLST { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mlst \\ --threads $task.cpus \\ @@ -34,8 +24,8 @@ process MLST { > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(mlst --version 2>&1) | sed 's/mlst //' ) + "${task.process}": + mlst: \$( echo \$(mlst --version 2>&1) | sed 's/mlst //' ) END_VERSIONS """ diff --git a/modules/mosdepth/functions.nf b/modules/mosdepth/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mosdepth/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index 8fe3cfee..b25e6a3d 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MOSDEPTH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::mosdepth=0.3.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mosdepth:0.3.2--h01d7912_0" - } else { - container "quay.io/biocontainers/mosdepth:0.3.2--h01d7912_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mosdepth:0.3.2--h01d7912_0' : + 'quay.io/biocontainers/mosdepth:0.3.2--h01d7912_0' }" input: tuple val(meta), path(bam), path(bai) @@ -34,17 +23,18 @@ process MOSDEPTH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def interval = window_size ? "--by ${window_size}" : "--by ${bed}" """ mosdepth \\ $interval \\ - $options.args \\ + $args \\ $prefix \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(mosdepth --version 2>&1 | sed 's/^.*mosdepth //; s/ .*\$//') + "${task.process}": + mosdepth: \$(mosdepth --version 2>&1 | sed 's/^.*mosdepth //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/msisensor/msi/functions.nf b/modules/msisensor/msi/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/msisensor/msi/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/msisensor/msi/main.nf b/modules/msisensor/msi/main.nf index bd5a0a0e..1eb510a1 100644 --- a/modules/msisensor/msi/main.nf +++ b/modules/msisensor/msi/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MSISENSOR_MSI { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::msisensor=0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2" - } else { - container "quay.io/biocontainers/msisensor:0.5--hb3646a4_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2' : + 'quay.io/biocontainers/msisensor:0.5--hb3646a4_2' }" input: tuple val(meta), path(normal_bam), path(normal_bai), path(tumor_bam), path(tumor_bai), val(metascan), path(homopolymers) @@ -29,7 +18,8 @@ process MSISENSOR_MSI { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ msisensor \\ msi \\ @@ -37,11 +27,11 @@ process MSISENSOR_MSI { -n $normal_bam \\ -t $tumor_bam \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + "${task.process}": + msisensor: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') END_VERSIONS """ } diff --git a/modules/msisensor/scan/functions.nf b/modules/msisensor/scan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/msisensor/scan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/msisensor/scan/main.nf b/modules/msisensor/scan/main.nf index ebd8785a..2419a0a1 100644 --- a/modules/msisensor/scan/main.nf +++ b/modules/msisensor/scan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MSISENSOR_SCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::msisensor=0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2" - } else { - container "quay.io/biocontainers/msisensor:0.5--hb3646a4_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2' : + 'quay.io/biocontainers/msisensor:0.5--hb3646a4_2' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process MSISENSOR_SCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ msisensor \\ scan \\ -d $fasta \\ -o ${prefix}.msisensor_scan.tab \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + "${task.process}": + msisensor: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') END_VERSIONS """ } diff --git a/modules/mtnucratio/functions.nf b/modules/mtnucratio/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mtnucratio/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mtnucratio/main.nf b/modules/mtnucratio/main.nf index 28d08a13..b8663469 100644 --- a/modules/mtnucratio/main.nf +++ b/modules/mtnucratio/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MTNUCRATIO { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mtnucratio=0.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mtnucratio:0.7--hdfd78af_2" - } else { - container "quay.io/biocontainers/mtnucratio:0.7--hdfd78af_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mtnucratio:0.7--hdfd78af_2' : + 'quay.io/biocontainers/mtnucratio:0.7--hdfd78af_2' }" input: tuple val(meta), path(bam) @@ -27,17 +17,18 @@ process MTNUCRATIO { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mtnucratio \\ - $options.args \\ + $args \\ $bam \\ $mt_id cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(mtnucratio --version 2>&1) | head -n1 | sed 's/Version: //') + "${task.process}": + mtnucratio: \$(echo \$(mtnucratio --version 2>&1) | head -n1 | sed 's/Version: //') END_VERSIONS """ } diff --git a/modules/multiqc/functions.nf b/modules/multiqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/multiqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/multiqc/main.nf b/modules/multiqc/main.nf index 0861aa59..3dceb162 100644 --- a/modules/multiqc/main.nf +++ b/modules/multiqc/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MULTIQC { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::multiqc=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" input: path multiqc_files @@ -27,12 +16,13 @@ process MULTIQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - multiqc -f $options.args . + multiqc -f $args . cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) END_VERSIONS """ } diff --git a/modules/mummer/functions.nf b/modules/mummer/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mummer/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mummer/main.nf b/modules/mummer/main.nf index e46fd799..f4f3bb18 100644 --- a/modules/mummer/main.nf +++ b/modules/mummer/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '3.23' +def VERSION = '3.23' // Version information not provided by tool on CLI process MUMMER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mummer=3.23" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" - } else { - container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12' : + 'quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12' }" input: tuple val(meta), path(ref), path(query) @@ -28,7 +17,8 @@ process MUMMER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") @@ -42,14 +32,14 @@ process MUMMER { gzip -c -d $query > $fasta_name_query fi mummer \\ - $options.args \\ + $args \\ $fasta_name_ref \\ $fasta_name_query \\ > ${prefix}.coords cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo $VERSION ) + "${task.process}": + mummer: $VERSION END_VERSIONS """ } diff --git a/modules/muscle/functions.nf b/modules/muscle/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/muscle/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/muscle/main.nf b/modules/muscle/main.nf index 6ffb97ac..a50f5cb3 100644 --- a/modules/muscle/main.nf +++ b/modules/muscle/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MUSCLE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::muscle=3.8.1551" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/muscle:3.8.1551--h7d875b9_6" - } else { - container "quay.io/biocontainers/muscle:3.8.1551--h7d875b9_6" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/muscle:3.8.1551--h7d875b9_6' : + 'quay.io/biocontainers/muscle:3.8.1551--h7d875b9_6' }" input: tuple val(meta), path(fasta) @@ -33,18 +22,18 @@ process MUSCLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def fasta_out = options.args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' - def clw_out = options.args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' - def msf_out = options.args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' - def phys_out = options.args.contains('-phys') ? "-physout ${prefix}_muscle_msa.phys" : '' - def phyi_out = options.args.contains('-phyi') ? "-phyiout ${prefix}_muscle_msa.phyi" : '' - def html_out = options.args.contains('-html') ? "-htmlout ${prefix}_muscle_msa.html" : '' - def tree_out = options.args.contains('-maketree') ? "-out ${prefix}_muscle_msa.tree" : '' - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def fasta_out = args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' + def clw_out = args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' + def msf_out = args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' + def phys_out = args.contains('-phys') ? "-physout ${prefix}_muscle_msa.phys" : '' + def phyi_out = args.contains('-phyi') ? "-phyiout ${prefix}_muscle_msa.phyi" : '' + def html_out = args.contains('-html') ? "-htmlout ${prefix}_muscle_msa.html" : '' + def tree_out = args.contains('-maketree') ? "-out ${prefix}_muscle_msa.tree" : '' """ muscle \\ - $options.args \\ + $args \\ -in $fasta \\ $fasta_out \\ $clw_out \\ @@ -55,8 +44,8 @@ process MUSCLE { $tree_out \\ -loga muscle_msa.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(muscle -version | sed 's/^MUSCLE v//; s/by.*\$//') + "${task.process}": + muscle: \$(muscle -version | sed 's/^MUSCLE v//; s/by.*\$//') END_VERSIONS """ } diff --git a/modules/nanolyse/functions.nf b/modules/nanolyse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nanolyse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nanolyse/main.nf b/modules/nanolyse/main.nf index 271592f7..f29eeb77 100644 --- a/modules/nanolyse/main.nf +++ b/modules/nanolyse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NANOLYSE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::nanolyse=1.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nanolyse:1.2.0--py_0" - } else { - container "quay.io/biocontainers/nanolyse:1.2.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nanolyse:1.2.0--py_0' : + 'quay.io/biocontainers/nanolyse:1.2.0--py_0' }" input: tuple val(meta), path(fastq) @@ -28,14 +17,15 @@ process NANOLYSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ gunzip -c $fastq | NanoLyse -r $fasta | gzip > ${prefix}.fastq.gz mv NanoLyse.log ${prefix}.nanolyse.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(NanoLyse --version 2>&1 | sed -e "s/NanoLyse //g") + "${task.process}": + nanolyse: \$(NanoLyse --version 2>&1 | sed -e "s/NanoLyse //g") END_VERSIONS """ } diff --git a/modules/nanoplot/functions.nf b/modules/nanoplot/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nanoplot/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nanoplot/main.nf b/modules/nanoplot/main.nf index 16e2248c..36577d8a 100644 --- a/modules/nanoplot/main.nf +++ b/modules/nanoplot/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NANOPLOT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::nanoplot=1.38.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nanoplot:1.38.0--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/nanoplot:1.38.0--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nanoplot:1.38.0--pyhdfd78af_0' : + 'quay.io/biocontainers/nanoplot:1.38.0--pyhdfd78af_0' }" input: tuple val(meta), path(ontfile) @@ -29,16 +18,17 @@ process NANOPLOT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def input_file = ("$ontfile".endsWith(".fastq.gz")) ? "--fastq ${ontfile}" : ("$ontfile".endsWith(".txt")) ? "--summary ${ontfile}" : '' """ NanoPlot \\ - $options.args \\ + $args \\ -t $task.cpus \\ $input_file cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//') + "${task.process}": + nanoplot: \$(echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ncbigenomedownload/functions.nf b/modules/ncbigenomedownload/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ncbigenomedownload/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ncbigenomedownload/main.nf b/modules/ncbigenomedownload/main.nf index ffa53871..466c8d09 100644 --- a/modules/ncbigenomedownload/main.nf +++ b/modules/ncbigenomedownload/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NCBIGENOMEDOWNLOAD { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ncbi-genome-download=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ncbi-genome-download:0.3.0--pyh864c0ab_1" - } else { - container "quay.io/biocontainers/ncbi-genome-download:0.3.0--pyh864c0ab_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ncbi-genome-download:0.3.0--pyh864c0ab_1' : + 'quay.io/biocontainers/ncbi-genome-download:0.3.0--pyh864c0ab_1' }" input: val meta @@ -39,18 +28,19 @@ process NCBIGENOMEDOWNLOAD { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def accessions_opt = accessions ? "-A ${accessions}" : "" """ ncbi-genome-download \\ - $options.args \\ + $args \\ $accessions_opt \\ --output-folder ./ \\ --flat-output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( ncbi-genome-download --version ) + "${task.process}": + ncbigenomedownload: \$( ncbi-genome-download --version ) END_VERSIONS """ } diff --git a/modules/nextclade/functions.nf b/modules/nextclade/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/nextclade/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index 6fc6efc4..317d393d 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NEXTCLADE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::nextclade_js=0.14.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nextclade_js:0.14.4--h9ee0642_0" - } else { - container "quay.io/biocontainers/nextclade_js:0.14.4--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nextclade_js:0.14.4--h9ee0642_0' : + 'quay.io/biocontainers/nextclade_js:0.14.4--h9ee0642_0' }" input: tuple val(meta), path(fasta) @@ -30,10 +19,11 @@ process NEXTCLADE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ nextclade \\ - $options.args \\ + $args \\ --jobs $task.cpus \\ --input-fasta $fasta \\ --output-json ${prefix}.json \\ @@ -43,8 +33,8 @@ process NEXTCLADE { --output-tree ${prefix}.tree.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(nextclade --version 2>&1) + "${task.process}": + nextclade: \$(nextclade --version 2>&1) END_VERSIONS """ } diff --git a/modules/ngmaster/functions.nf b/modules/ngmaster/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ngmaster/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ngmaster/main.nf b/modules/ngmaster/main.nf index 1897b5f3..0884b55c 100644 --- a/modules/ngmaster/main.nf +++ b/modules/ngmaster/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NGMASTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ngmaster=0.5.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ngmaster:0.5.8--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/ngmaster:0.5.8--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ngmaster:0.5.8--pyhdfd78af_1' : + 'quay.io/biocontainers/ngmaster:0.5.8--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,16 +15,17 @@ process NGMASTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ngmaster \\ - $options.args \\ + $args \\ $fasta \\ > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(ngmaster --version 2>&1) | sed 's/^.*ngmaster //' ) + "${task.process}": + ngmaster: \$( echo \$(ngmaster --version 2>&1) | sed 's/^.*ngmaster //' ) END_VERSIONS """ } diff --git a/modules/nucmer/functions.nf b/modules/nucmer/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nucmer/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nucmer/main.nf b/modules/nucmer/main.nf index 49a275f4..bb5dcb7d 100644 --- a/modules/nucmer/main.nf +++ b/modules/nucmer/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NUCMER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mummer=3.23" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12" - } else { - container "quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12' : + 'quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12' }" input: tuple val(meta), path(ref), path(query) @@ -27,7 +16,8 @@ process NUCMER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def is_compressed_query = query.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") @@ -43,13 +33,13 @@ process NUCMER { nucmer \\ -p $prefix \\ --coords \\ - $options.args \\ + $args \\ $fasta_name_ref \\ $fasta_name_query cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( nucmer --version 2>&1 | grep "version" | sed -e "s/NUCmer (NUCleotide MUMmer) version //g; s/nucmer//g;" ) + "${task.process}": + nucmer: \$( nucmer --version 2>&1 | grep "version" | sed -e "s/NUCmer (NUCleotide MUMmer) version //g; s/nucmer//g;" ) END_VERSIONS """ } diff --git a/modules/optitype/functions.nf b/modules/optitype/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/optitype/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 083b03a7..24be66a7 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process OPTITYPE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::optitype=1.3.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/optitype:1.3.5--0" - } else { - container "quay.io/biocontainers/optitype:1.3.5--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/optitype:1.3.5--0' : + 'quay.io/biocontainers/optitype:1.3.5--0' }" input: tuple val(meta), path(bam) @@ -26,30 +15,32 @@ process OPTITYPE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - # Create a config for OptiType on a per sample basis with options.args2 + # Create a config for OptiType on a per sample basis with task.ext.args2 #Doing it old school now echo "[mapping]" > config.ini echo "razers3=razers3" >> config.ini echo "threads=$task.cpus" >> config.ini echo "[ilp]" >> config.ini - echo "$options.args2" >> config.ini + echo "$args2" >> config.ini echo "threads=1" >> config.ini echo "[behavior]" >> config.ini echo "deletebam=true" >> config.ini echo "unpaired_weight=0" >> config.ini echo "use_discordant=false" >> config.ini - # Run the actual OptiType typing with options.args - OptiTypePipeline.py -i ${bam} -c config.ini --${meta.seq_type} $options.args --prefix $prefix --outdir $prefix + # Run the actual OptiType typing with args + OptiTypePipeline.py -i ${bam} -c config.ini --${meta.seq_type} $args --prefix $prefix --outdir $prefix #Couldn't find a nicer way of doing this cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g") + "${task.process}": + optitype: \$(cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g") END_VERSIONS """ } diff --git a/modules/pairix/functions.nf b/modules/pairix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairix/main.nf b/modules/pairix/main.nf index 4bfd3b0d..c1b9658c 100644 --- a/modules/pairix/main.nf +++ b/modules/pairix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairix=0.3.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairix:0.3.7--py36h30a8e3e_3" - } else { - container "quay.io/biocontainers/pairix:0.3.7--py36h30a8e3e_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairix:0.3.7--py36h30a8e3e_3' : + 'quay.io/biocontainers/pairix:0.3.7--py36h30a8e3e_3' }" input: tuple val(meta), path(pair) @@ -26,14 +15,15 @@ process PAIRIX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ pairix \\ - $options.args \\ + $args \\ $pair cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//') + "${task.process}": + pairix: \$(echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/pairtools/dedup/functions.nf b/modules/pairtools/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/dedup/main.nf b/modules/pairtools/dedup/main.nf index eabf24dd..5ee9dc43 100644 --- a/modules/pairtools/dedup/main.nf +++ b/modules/pairtools/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_DEDUP { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -27,17 +16,18 @@ process PAIRTOOLS_DEDUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools dedup \\ - $options.args \\ + $args \\ -o ${prefix}.pairs.gz \\ --output-stats ${prefix}.pairs.stat \\ $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/flip/functions.nf b/modules/pairtools/flip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/flip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/flip/main.nf b/modules/pairtools/flip/main.nf index 50cfdfd2..452800cc 100644 --- a/modules/pairtools/flip/main.nf +++ b/modules/pairtools/flip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_FLIP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(sam) @@ -27,18 +16,19 @@ process PAIRTOOLS_FLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools \\ flip \\ -c $chromsizes \\ - $options.args \\ + $args \\ -o ${prefix}.flip.gz \\ $sam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/parse/functions.nf b/modules/pairtools/parse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/parse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/parse/main.nf b/modules/pairtools/parse/main.nf index cd6099e1..1d34d42c 100644 --- a/modules/pairtools/parse/main.nf +++ b/modules/pairtools/parse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_PARSE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(bam) @@ -28,19 +17,20 @@ process PAIRTOOLS_PARSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools \\ parse \\ -c $chromsizes \\ - $options.args \\ + $args \\ --output-stats ${prefix}.pairsam.stat \\ -o ${prefix}.pairsam.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/restrict/functions.nf b/modules/pairtools/restrict/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/restrict/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/restrict/main.nf b/modules/pairtools/restrict/main.nf index b1b21da7..9fcc245c 100644 --- a/modules/pairtools/restrict/main.nf +++ b/modules/pairtools/restrict/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_RESTRICT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(pairs) @@ -27,18 +16,19 @@ process PAIRTOOLS_RESTRICT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools \\ restrict \\ -f $frag \\ - $options.args \\ + $args \\ -o ${prefix}.pairs.gz \\ $pairs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/select/functions.nf b/modules/pairtools/select/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/select/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/select/main.nf b/modules/pairtools/select/main.nf index dec29573..f699afa3 100644 --- a/modules/pairtools/select/main.nf +++ b/modules/pairtools/select/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_SELECT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -27,17 +16,18 @@ process PAIRTOOLS_SELECT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pairtools select \\ - "$options.args" \\ + "$args" \\ -o ${prefix}.selected.pairs.gz \\ --output-rest ${prefix}.unselected.pairs.gz \\ ${input} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/sort/functions.nf b/modules/pairtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/sort/main.nf b/modules/pairtools/sort/main.nf index 996bcb0b..5caa5b74 100644 --- a/modules/pairtools/sort/main.nf +++ b/modules/pairtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_SORT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -26,20 +15,21 @@ process PAIRTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def mem = task.memory.toString().replaceAll(/(\s|\.|B)+/, '') """ pairtools \\ sort \\ - $options.args \\ + $args \\ --nproc $task.cpus \\ --memory "$mem" \\ -o ${prefix}.pairs.gz \\ $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pangolin/functions.nf b/modules/pangolin/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pangolin/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index edf67dd7..99a68e09 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PANGOLIN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::pangolin=3.1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/pangolin:3.1.11--pyhdfd78af_1' - } else { - container 'quay.io/biocontainers/pangolin:3.1.11--pyhdfd78af_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pangolin:3.1.11--pyhdfd78af_1' : + 'quay.io/biocontainers/pangolin:3.1.11--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process PANGOLIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pangolin \\ $fasta\\ --outfile ${prefix}.pangolin.csv \\ --threads $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pangolin --version | sed "s/pangolin //g") + "${task.process}": + pangolin: \$(pangolin --version | sed "s/pangolin //g") END_VERSIONS """ } diff --git a/modules/paraclu/functions.nf b/modules/paraclu/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/paraclu/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/paraclu/main.nf b/modules/paraclu/main.nf index 6d65a784..a2003834 100644 --- a/modules/paraclu/main.nf +++ b/modules/paraclu/main.nf @@ -1,22 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) +def VERSION = '10' // Version information not provided by tool on CLI process PARACLU { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::paraclu=10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1" - } else { - container "quay.io/biocontainers/paraclu:10--h9a82719_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1' : + 'quay.io/biocontainers/paraclu:10--h9a82719_1' }" input: tuple val(meta), path(bed) @@ -27,8 +18,8 @@ process PARACLU { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def VERSION=10 + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ awk -F "\t" '{print\$1"\t"\$6"\t"\$2"\t"\$5}' < $bed > ${bed}_4P @@ -38,8 +29,8 @@ process PARACLU { awk -F '\t' '{print \$1"\t"\$3"\t"\$4"\t"\$1":"\$3".."\$4","\$2"\t"\$6"\t"\$2}' ${prefix}.clustered.simplified > ${prefix}.clustered.simplified.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: $VERSION + "${task.process}": + paraclu: $VERSION END_VERSIONS """ } diff --git a/modules/pbbam/pbmerge/functions.nf b/modules/pbbam/pbmerge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pbbam/pbmerge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pbbam/pbmerge/main.nf b/modules/pbbam/pbmerge/main.nf index 63cd2ffe..970128cb 100644 --- a/modules/pbbam/pbmerge/main.nf +++ b/modules/pbbam/pbmerge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PBBAM_PBMERGE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pbbam=1.7.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pbbam:1.7.0--h058f120_1" - } else { - container "quay.io/biocontainers/pbbam:1.7.0--h058f120_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pbbam:1.7.0--h058f120_1' : + 'quay.io/biocontainers/pbbam:1.7.0--h058f120_1' }" input: tuple val(meta), path(bam) @@ -27,16 +16,17 @@ process PBBAM_PBMERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pbmerge \\ -o ${prefix}.bam \\ - $options.args \\ + $args \\ *.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - pbbam/pbmerge: \$( pbmerge --version|sed 's/pbmerge //' ) + "${task.process}": + pbbam: \$( pbmerge --version|sed 's/pbmerge //' ) END_VERSIONS """ } diff --git a/modules/pbccs/functions.nf b/modules/pbccs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pbccs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 55eacd76..83e56d96 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PBCCS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pbccs=6.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pbccs:6.2.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/pbccs:6.2.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pbccs:6.2.0--h9ee0642_0' : + 'quay.io/biocontainers/pbccs:6.2.0--h9ee0642_0' }" input: tuple val(meta), path(bam), path(pbi) @@ -32,7 +21,8 @@ process PBCCS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ccs \\ $bam \\ @@ -42,11 +32,11 @@ process PBCCS { --metrics-json ${prefix}.chunk${chunk_num}.metrics.json.gz \\ --chunk $chunk_num/$chunk_on \\ -j $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ccs --version 2>&1) | grep 'ccs' | sed 's/^.*ccs //; s/ .*\$//') + "${task.process}": + pbccs: \$(echo \$(ccs --version 2>&1) | grep 'ccs' | sed 's/^.*ccs //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/peddy/functions.nf b/modules/peddy/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/peddy/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/peddy/main.nf b/modules/peddy/main.nf index 4331ed9d..0a6c3384 100644 --- a/modules/peddy/main.nf +++ b/modules/peddy/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PEDDY { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::peddy=0.4.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/peddy:0.4.8--pyh5e36f6f_0" - } else { - container "quay.io/biocontainers/peddy:0.4.8--pyh5e36f6f_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/peddy:0.4.8--pyh5e36f6f_0' : + 'quay.io/biocontainers/peddy:0.4.8--pyh5e36f6f_0' }" input: tuple val(meta), path(vcf), path(vcf_tbi) @@ -30,18 +19,19 @@ process PEDDY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ peddy \\ - $options.args \\ + $args \\ --plot \\ -p $task.cpus \\ $vcf \\ $ped cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( peddy --version 2>&1 | sed 's/peddy, version //' ) + "${task.process}": + peddy: \$( peddy --version 2>&1 | sed 's/peddy, version //' ) END_VERSIONS """ } diff --git a/modules/phantompeakqualtools/functions.nf b/modules/phantompeakqualtools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/phantompeakqualtools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index b390bf7e..f2edabc3 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.2.2' +def VERSION = '1.2.2' // Version information not provided by tool on CLI process PHANTOMPEAKQUALTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::phantompeakqualtools=1.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/phantompeakqualtools:1.2.2--0" - } else { - container "quay.io/biocontainers/phantompeakqualtools:1.2.2--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/phantompeakqualtools:1.2.2--0' : + 'quay.io/biocontainers/phantompeakqualtools:1.2.2--0' }" input: tuple val(meta), path(bam) @@ -30,13 +19,15 @@ process PHANTOMPEAKQUALTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ RUN_SPP=`which run_spp.R` Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + phantompeakqualtools: $VERSION END_VERSIONS """ } diff --git a/modules/phyloflash/functions.nf b/modules/phyloflash/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/phyloflash/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/phyloflash/main.nf b/modules/phyloflash/main.nf index 894c16a2..c507dd14 100644 --- a/modules/phyloflash/main.nf +++ b/modules/phyloflash/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PHYLOFLASH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::phyloflash=3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/phyloflash:3.4--hdfd78af_1" - } else { - container "quay.io/biocontainers/phyloflash:3.4--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/phyloflash:3.4--hdfd78af_1' : + 'quay.io/biocontainers/phyloflash:3.4--hdfd78af_1' }" input: tuple val(meta), path(reads) @@ -28,12 +17,12 @@ process PHYLOFLASH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ phyloFlash.pl \\ - $options.args \\ + $args \\ -read1 ${reads[0]} \\ -lib $prefix \\ -interleaved \\ @@ -44,14 +33,14 @@ process PHYLOFLASH { mv ${prefix}.* $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") END_VERSIONS """ } else { """ phyloFlash.pl \\ - $options.args \\ + $args \\ -read1 ${reads[0]} \\ -read2 ${reads[1]} \\ -lib $prefix \\ @@ -62,24 +51,22 @@ process PHYLOFLASH { mv ${prefix}.* $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") END_VERSIONS """ } stub: - - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ mkdir ${prefix} touch ${prefix}/${prefix}.SSU.collection.fasta touch ${prefix}/${prefix}.phyloFlash cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") END_VERSIONS """ } diff --git a/modules/picard/collecthsmetrics/functions.nf b/modules/picard/collecthsmetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collecthsmetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index 1f7ad8e6..adb82d8c 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTHSMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::picard=2.26.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.26.2--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.2--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -30,7 +19,8 @@ process PICARD_COLLECTHSMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "-R $fasta" : "" def avail_mem = 3 @@ -43,7 +33,7 @@ process PICARD_COLLECTHSMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectHsMetrics \\ - $options.args \\ + $args \\ $reference \\ -BAIT_INTERVALS $bait_intervals \\ -TARGET_INTERVALS $target_intervals \\ @@ -51,8 +41,8 @@ process PICARD_COLLECTHSMETRICS { -OUTPUT ${prefix}_collecthsmetrics.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/collectmultiplemetrics/functions.nf b/modules/picard/collectmultiplemetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collectmultiplemetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index dd8fdaca..f52f5885 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTMULTIPLEMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process PICARD_COLLECTMULTIPLEMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectMultipleMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -39,14 +29,14 @@ process PICARD_COLLECTMULTIPLEMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectMultipleMetrics \\ - $options.args \\ + $args \\ INPUT=$bam \\ OUTPUT=${prefix}.CollectMultipleMetrics \\ REFERENCE_SEQUENCE=$fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/collectwgsmetrics/functions.nf b/modules/picard/collectwgsmetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collectwgsmetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index 6028feef..94745d2d 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTWGSMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -27,7 +16,8 @@ process PICARD_COLLECTWGSMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectWgsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -38,14 +28,14 @@ process PICARD_COLLECTWGSMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectWgsMetrics \\ - $options.args \\ + $args \\ INPUT=$bam \\ OUTPUT=${prefix}.CollectWgsMetrics.coverage_metrics \\ REFERENCE_SEQUENCE=$fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard CollectWgsMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard CollectWgsMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/filtersamreads/functions.nf b/modules/picard/filtersamreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/filtersamreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index 68cee34d..8b1d2e6b 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_FILTERSAMREADS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(readlist) @@ -27,7 +16,8 @@ process PICARD_FILTERSAMREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard FilterSamReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -42,11 +32,11 @@ process PICARD_FILTERSAMREADS { --INPUT $bam \\ --OUTPUT ${prefix}.bam \\ --FILTER $filter \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } else if ( filter == 'includeReadList' || filter == 'excludeReadList' ) { @@ -58,11 +48,11 @@ process PICARD_FILTERSAMREADS { --OUTPUT ${prefix}.bam \\ --FILTER $filter \\ --READ_LIST_FILE $readlist \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/markduplicates/functions.nf b/modules/picard/markduplicates/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/markduplicates/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 130a1e52..d4c5886f 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_MARKDUPLICATES { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process PICARD_MARKDUPLICATES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -39,14 +29,14 @@ process PICARD_MARKDUPLICATES { picard \\ -Xmx${avail_mem}g \\ MarkDuplicates \\ - $options.args \\ + $args \\ I=$bam \\ O=${prefix}.bam \\ M=${prefix}.MarkDuplicates.metrics.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/mergesamfiles/functions.nf b/modules/picard/mergesamfiles/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/mergesamfiles/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 355c0bf3..3a2fc620 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_MERGESAMFILES { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bams) @@ -26,7 +15,8 @@ process PICARD_MERGESAMFILES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def bam_files = bams.sort() def avail_mem = 3 if (!task.memory) { @@ -39,20 +29,20 @@ process PICARD_MERGESAMFILES { picard \\ -Xmx${avail_mem}g \\ MergeSamFiles \\ - $options.args \\ + $args \\ ${'INPUT='+bam_files.join(' INPUT=')} \\ OUTPUT=${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } else { """ ln -s ${bam_files[0]} ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/sortsam/functions.nf b/modules/picard/sortsam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/sortsam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index 939df1c0..b264b927 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -1,23 +1,11 @@ - -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_SORTSAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +16,8 @@ process PICARD_SORTSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard SortSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -44,8 +33,8 @@ process PICARD_SORTSAM { --SORT_ORDER $sort_order cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard SortSam --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard SortSam --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/pirate/functions.nf b/modules/pirate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pirate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pirate/main.nf b/modules/pirate/main.nf index 01a950dd..3bbb1d64 100644 --- a/modules/pirate/main.nf +++ b/modules/pirate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PIRATE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pirate=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pirate%3A1.0.4--hdfd78af_1" - } else { - container "quay.io/biocontainers/pirate:1.0.4--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pirate%3A1.0.4--hdfd78af_1' : + 'quay.io/biocontainers/pirate:1.0.4--hdfd78af_1' }" input: tuple val(meta), path(gff) @@ -27,17 +16,18 @@ process PIRATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ PIRATE \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --input ./ \\ --output results/ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( PIRATE --version 2>&1) | sed 's/PIRATE //' ) + "${task.process}": + pirate: \$( echo \$( PIRATE --version 2>&1) | sed 's/PIRATE //' ) END_VERSIONS """ } diff --git a/modules/plasmidid/functions.nf b/modules/plasmidid/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plasmidid/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 1edc5eeb..290ae549 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLASMIDID { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::plasmidid=1.6.5' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/plasmidid:1.6.5--hdfd78af_0' - } else { - container 'quay.io/biocontainers/plasmidid:1.6.5--hdfd78af_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plasmidid:1.6.5--hdfd78af_0' : + 'quay.io/biocontainers/plasmidid:1.6.5--hdfd78af_0' }" input: tuple val(meta), path(scaffold) @@ -34,19 +23,20 @@ process PLASMIDID { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plasmidID \\ -d $fasta \\ -s $prefix \\ -c $scaffold \\ - $options.args \\ + $args \\ -o . mv NO_GROUP/$prefix ./$prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plasmidID --version 2>&1)) + "${task.process}": + plasmidid: \$(echo \$(plasmidID --version 2>&1)) END_VERSIONS """ } diff --git a/modules/plink/extract/functions.nf b/modules/plink/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plink/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plink/extract/main.nf b/modules/plink/extract/main.nf index 2e18500a..34b12fca 100644 --- a/modules/plink/extract/main.nf +++ b/modules/plink/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLINK_EXTRACT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" - } else { - container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1' : + 'quay.io/biocontainers/plink:1.90b6.21--h779adbc_1' }" input: tuple val(meta), path(bed), path(bim), path(fam), path(variants) @@ -28,20 +17,21 @@ process PLINK_EXTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if( "$bed" == "${prefix}.bed" ) error "Input and output names are the same, use the suffix option to disambiguate" """ plink \\ --bfile ${meta.id} \\ - $options.args \\ + $args \\ --extract $variants \\ --threads $task.cpus \\ --make-bed \\ --out $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plink --version) | sed 's/^PLINK v//;s/64.*//') + "${task.process}": + plink: \$(echo \$(plink --version) | sed 's/^PLINK v//;s/64.*//') END_VERSIONS """ } diff --git a/modules/plink/vcf/functions.nf b/modules/plink/vcf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plink/vcf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index a676b723..b6fd03d7 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLINK_VCF { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" - } else { - container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1' : + 'quay.io/biocontainers/plink:1.90b6.21--h779adbc_1' }" input: tuple val(meta), path(vcf) @@ -29,18 +18,19 @@ process PLINK_VCF { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plink \\ --vcf ${vcf} \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --out ${prefix} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) + "${task.process}": + plink: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) END_VERSIONS """ } diff --git a/modules/plink2/vcf/functions.nf b/modules/plink2/vcf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plink2/vcf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf index 869a5587..8101f7dd 100644 --- a/modules/plink2/vcf/main.nf +++ b/modules/plink2/vcf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLINK2_VCF { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::plink2=2.00a2.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1" - } else { - container "quay.io/biocontainers/plink2:2.00a2.3--h712d239_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1' : + 'quay.io/biocontainers/plink2:2.00a2.3--h712d239_1' }" input: tuple val(meta), path(vcf) @@ -28,16 +17,17 @@ process PLINK2_VCF { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ plink2 \\ - $options.args \\ + $args \\ --vcf $vcf \\ --out ${prefix} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) + "${task.process}": + plink2: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) END_VERSIONS """ } diff --git a/modules/pmdtools/filter/functions.nf b/modules/pmdtools/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pmdtools/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pmdtools/filter/main.nf b/modules/pmdtools/filter/main.nf index 3e363a9c..301f9206 100644 --- a/modules/pmdtools/filter/main.nf +++ b/modules/pmdtools/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PMDTOOLS_FILTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pmdtools=0.60" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pmdtools:0.60--hdfd78af_5" - } else { - container "quay.io/biocontainers/pmdtools:0.60--hdfd78af_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pmdtools:0.60--hdfd78af_5' : + 'quay.io/biocontainers/pmdtools:0.60--hdfd78af_5' }" input: tuple val(meta), path(bam), path (bai) @@ -28,8 +17,11 @@ process PMDTOOLS_FILTER { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' def split_cpus = Math.floor(task.cpus/2) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if ("$bam" == "${prefix}.bam") error "[pmdtools/filter] Input and output names are the same, use the suffix option to disambiguate!" //threshold and header flags activate filtering function of pmdtools """ @@ -37,22 +29,22 @@ process PMDTOOLS_FILTER { calmd \\ $bam \\ $reference \\ - $options.args \\ + $args \\ -@ ${split_cpus} \\ | pmdtools \\ --threshold $threshold \\ --header \\ - $options.args2 \\ + $args2 \\ | samtools \\ view \\ - $options.args3 \\ + $args3 \\ -Sb \\ - \\ -@ ${split_cpus} \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": pmdtools: \$( pmdtools --version | cut -f2 -d ' ' | sed 's/v//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS diff --git a/modules/porechop/functions.nf b/modules/porechop/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/porechop/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/porechop/main.nf b/modules/porechop/main.nf index cf564938..2edc5c78 100644 --- a/modules/porechop/main.nf +++ b/modules/porechop/main.nf @@ -1,42 +1,32 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PORECHOP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::porechop=0.2.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2" - } else { - container "quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2' : + 'quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2' }" input: tuple val(meta), path(reads) output: - tuple val(meta), path("*.fastq.gz") , emit: reads - path "versions.yml" , emit: versions + tuple val(meta), path("*.fastq.gz"), emit: reads + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ porechop \\ - -i ${reads} \\ - -t ${task.cpus} \\ - ${options.args} \\ + -i $reads \\ + -t $task.cpus \\ + $args \\ -o ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( porechop --version ) + "${task.process}": + porechop: \$( porechop --version ) END_VERSIONS """ } diff --git a/modules/preseq/lcextrap/functions.nf b/modules/preseq/lcextrap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/preseq/lcextrap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index f551a549..43f86cf8 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PRESEQ_LCEXTRAP { tag "$meta.id" label 'process_medium' label 'error_ignore' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::preseq=3.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/preseq:3.1.2--h06ef8b0_1" - } else { - container "quay.io/biocontainers/preseq:3.1.2--h06ef8b0_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/preseq:3.1.2--h06ef8b0_1' : + 'quay.io/biocontainers/preseq:3.1.2--h06ef8b0_1' }" input: tuple val(meta), path(bam) @@ -28,20 +17,21 @@ process PRESEQ_LCEXTRAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' """ preseq \\ lc_extrap \\ - $options.args \\ + $args \\ $paired_end \\ -output ${prefix}.ccurve.txt \\ $bam cp .command.err ${prefix}.command.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') + "${task.process}": + preseq: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') END_VERSIONS """ } diff --git a/modules/prodigal/functions.nf b/modules/prodigal/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/prodigal/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/prodigal/main.nf b/modules/prodigal/main.nf index 572ffe92..b09da13c 100644 --- a/modules/prodigal/main.nf +++ b/modules/prodigal/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PRODIGAL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::prodigal=2.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/prodigal:2.6.3--h516909a_2" - } else { - container "quay.io/biocontainers/prodigal:2.6.3--h516909a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/prodigal:2.6.3--h516909a_2' : + 'quay.io/biocontainers/prodigal:2.6.3--h516909a_2' }" input: tuple val(meta), path(genome) @@ -30,10 +19,11 @@ process PRODIGAL { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ prodigal -i "${genome}" \\ - $options.args \\ + $args \\ -f $output_format \\ -d "${prefix}.fna" \\ -o "${prefix}.${output_format}" \\ @@ -41,8 +31,8 @@ process PRODIGAL { -s "${prefix}_all.txt" cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(prodigal -v 2>&1 | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p') + "${task.process}": + prodigal: \$(prodigal -v 2>&1 | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p') END_VERSIONS """ } diff --git a/modules/prokka/functions.nf b/modules/prokka/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/prokka/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index fb86078c..8fae6367 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PROKKA { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::prokka=1.14.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/prokka:1.14.6--pl526_0" - } else { - container "quay.io/biocontainers/prokka:1.14.6--pl526_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/prokka:1.14.6--pl526_0' : + 'quay.io/biocontainers/prokka:1.14.6--pl526_0' }" input: tuple val(meta), path(fasta) @@ -38,12 +28,13 @@ process PROKKA { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigaltf ${prodigal_tf[0]}" : "" """ prokka \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --prefix $prefix \\ $proteins_opt \\ @@ -51,8 +42,8 @@ process PROKKA { $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(prokka --version 2>&1) | sed 's/^.*prokka //') + "${task.process}": + prokka: \$(echo \$(prokka --version 2>&1) | sed 's/^.*prokka //') END_VERSIONS """ } diff --git a/modules/pycoqc/functions.nf b/modules/pycoqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pycoqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pycoqc/main.nf b/modules/pycoqc/main.nf index 2c263d61..e966b31c 100644 --- a/modules/pycoqc/main.nf +++ b/modules/pycoqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYCOQC { tag "$summary" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::pycoqc=2.5.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pycoqc:2.5.2--py_0" - } else { - container "quay.io/biocontainers/pycoqc:2.5.2--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pycoqc:2.5.2--py_0' : + 'quay.io/biocontainers/pycoqc:2.5.2--py_0' }" input: path summary @@ -27,16 +16,17 @@ process PYCOQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ pycoQC \\ - $options.args \\ + $args \\ -f $summary \\ -o pycoqc.html \\ -j pycoqc.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pycoQC --version 2>&1 | sed 's/^.*pycoQC v//; s/ .*\$//') + "${task.process}": + pycoqc: \$(pycoQC --version 2>&1 | sed 's/^.*pycoQC v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/pydamage/analyze/functions.nf b/modules/pydamage/analyze/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pydamage/analyze/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index 9cfb8a1a..c55616db 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYDAMAGE_ANALYZE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,17 +15,18 @@ process PYDAMAGE_ANALYZE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pydamage \\ analyze \\ - $options.args \\ + $args \\ -p $task.cpus \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') + "${task.process}": + pydamage: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/pydamage/filter/functions.nf b/modules/pydamage/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pydamage/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 6cd7ae7a..2e0afac9 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYDAMAGE_FILTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" input: tuple val(meta), path(csv) @@ -26,17 +15,18 @@ process PYDAMAGE_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ pydamage \\ filter \\ - $options.args \\ + $args \\ $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') + "${task.process}": + pydamage: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/qcat/functions.nf b/modules/qcat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qcat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qcat/main.nf b/modules/qcat/main.nf index b650fb8c..9f53f0cb 100644 --- a/modules/qcat/main.nf +++ b/modules/qcat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QCAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qcat=1.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qcat:1.1.0--py_0" - } else { - container "quay.io/biocontainers/qcat:1.1.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qcat:1.1.0--py_0' : + 'quay.io/biocontainers/qcat:1.1.0--py_0' }" input: tuple val(meta), path(reads) @@ -27,7 +16,8 @@ process QCAT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ ## Unzip fastq file ## qcat doesn't support zipped files yet @@ -47,8 +37,8 @@ process QCAT { gzip fastq/* cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(qcat --version 2>&1 | sed 's/^.*qcat //; s/ .*\$//') + "${task.process}": + qcat: \$(qcat --version 2>&1 | sed 's/^.*qcat //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/qualimap/bamqc/functions.nf b/modules/qualimap/bamqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qualimap/bamqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index d33f1e67..a47fde7e 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUALIMAP_BAMQC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qualimap=2.2.2d" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1" - } else { - container "quay.io/biocontainers/qualimap:2.2.2d--1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1' : + 'quay.io/biocontainers/qualimap:2.2.2d--1' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process QUALIMAP_BAMQC { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def collect_pairs = meta.single_end ? '' : '--collect-overlap-pairs' def memory = task.memory.toGiga() + "G" @@ -47,7 +37,7 @@ process QUALIMAP_BAMQC { qualimap \\ --java-mem-size=$memory \\ bamqc \\ - $options.args \\ + $args \\ -bam $bam \\ $regions \\ -p $strandedness \\ @@ -56,8 +46,8 @@ process QUALIMAP_BAMQC { -nt $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') + "${task.process}": + qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } diff --git a/modules/qualimap/rnaseq/functions.nf b/modules/qualimap/rnaseq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qualimap/rnaseq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index 9492cec6..459f3da5 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUALIMAP_RNASEQ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qualimap=2.2.2d" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1" - } else { - container "quay.io/biocontainers/qualimap:2.2.2d--1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1' : + 'quay.io/biocontainers/qualimap:2.2.2d--1' }" input: tuple val(meta), path(bam) @@ -27,7 +16,8 @@ process QUALIMAP_RNASEQ { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' def memory = task.memory.toGiga() + "G" @@ -44,7 +34,7 @@ process QUALIMAP_RNASEQ { qualimap \\ --java-mem-size=$memory \\ rnaseq \\ - $options.args \\ + $args \\ -bam $bam \\ -gtf $gtf \\ -p $strandedness \\ @@ -52,8 +42,8 @@ process QUALIMAP_RNASEQ { -outdir $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') + "${task.process}": + qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } diff --git a/modules/quast/functions.nf b/modules/quast/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/quast/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/quast/main.nf b/modules/quast/main.nf index 072d649d..43caca3d 100644 --- a/modules/quast/main.nf +++ b/modules/quast/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUAST { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::quast=5.0.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/quast:5.0.2--py37pl526hb5aa323_2' - } else { - container 'quay.io/biocontainers/quast:5.0.2--py37pl526hb5aa323_2' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/quast:5.0.2--py37pl526hb5aa323_2' : + 'quay.io/biocontainers/quast:5.0.2--py37pl526hb5aa323_2' }" input: path consensus @@ -30,7 +19,8 @@ process QUAST { path "versions.yml" , emit: versions script: - prefix = options.suffix ?: software + def args = task.ext.args ?: '' + prefix = task.ext.suffix ?: 'quast' def features = use_gff ? "--features $gff" : '' def reference = use_fasta ? "-r $fasta" : '' """ @@ -39,12 +29,14 @@ process QUAST { $reference \\ $features \\ --threads $task.cpus \\ - $options.args \\ + $args \\ ${consensus.join(' ')} + ln -s ${prefix}/report.tsv + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(quast.py --version 2>&1 | sed 's/^.*QUAST v//; s/ .*\$//') + "${task.process}": + quast: \$(quast.py --version 2>&1 | sed 's/^.*QUAST v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/racon/functions.nf b/modules/racon/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/racon/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/racon/main.nf b/modules/racon/main.nf index 60a5061e..5936fac0 100644 --- a/modules/racon/main.nf +++ b/modules/racon/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RACON { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::racon=1.4.20" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/racon:1.4.20--h9a82719_1" - } else { - container "quay.io/biocontainers/racon:1.4.20--h9a82719_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/racon:1.4.20--h9a82719_1' : + 'quay.io/biocontainers/racon:1.4.20--h9a82719_1' }" input: tuple val(meta), path(reads), path(assembly), path(paf) @@ -26,20 +15,21 @@ process RACON { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - racon -t "${task.cpus}" \\ + racon -t "$task.cpus" \\ "${reads}" \\ "${paf}" \\ - $options.args \\ + $args \\ "${assembly}" > \\ ${prefix}_assembly_consensus.fasta gzip -n ${prefix}_assembly_consensus.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( racon --version 2>&1 | sed 's/^.*v//' ) + "${task.process}": + racon: \$( racon --version 2>&1 | sed 's/^.*v//' ) END_VERSIONS """ } diff --git a/modules/rapidnj/functions.nf b/modules/rapidnj/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rapidnj/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rapidnj/main.nf b/modules/rapidnj/main.nf index aa23b56e..04a08227 100644 --- a/modules/rapidnj/main.nf +++ b/modules/rapidnj/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.3.2' // No version information printed +def VERSION = '2.3.2' // Version information not provided by tool on CLI process RAPIDNJ { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::rapidnj=2.3.2 conda-forge::biopython=1.78" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0" - } else { - container "quay.io/biocontainers/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0' : + 'quay.io/biocontainers/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0' }" input: path alignment @@ -28,20 +17,21 @@ process RAPIDNJ { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ python \\ -c 'from Bio import SeqIO; SeqIO.convert("$alignment", "fasta", "alignment.sth", "stockholm")' rapidnj \\ alignment.sth \\ - $options.args \\ + $args \\ -i sth \\ -c $task.cpus \\ -x rapidnj_phylogeny.tre cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + rapidnj: $VERSION biopython: \$(python -c "import Bio; print(Bio.__version__)") END_VERSIONS """ diff --git a/modules/rasusa/functions.nf b/modules/rasusa/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rasusa/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rasusa/main.nf b/modules/rasusa/main.nf index b9ba0b13..b43792ee 100644 --- a/modules/rasusa/main.nf +++ b/modules/rasusa/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RASUSA { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rasusa=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rasusa:0.3.0--h779adbc_1" - } else { - container "quay.io/biocontainers/rasusa:0.3.0--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rasusa:0.3.0--h779adbc_1' : + 'quay.io/biocontainers/rasusa:0.3.0--h779adbc_1' }" input: tuple val(meta), path(reads), val(genome_size) @@ -27,18 +16,19 @@ process RASUSA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def output = meta.single_end ? "--output ${prefix}.fastq.gz" : "--output ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz" """ rasusa \\ - $options.args \\ + $args \\ --coverage $depth_cutoff \\ --genome-size $genome_size \\ --input $reads \\ $output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rasusa --version 2>&1 | sed -e "s/rasusa //g") + "${task.process}": + rasusa: \$(rasusa --version 2>&1 | sed -e "s/rasusa //g") END_VERSIONS """ } diff --git a/modules/raxmlng/functions.nf b/modules/raxmlng/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/raxmlng/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/raxmlng/main.nf b/modules/raxmlng/main.nf index f607b506..62b6c78a 100644 --- a/modules/raxmlng/main.nf +++ b/modules/raxmlng/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RAXMLNG { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::raxml-ng=1.0.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/raxml-ng:1.0.3--h32fcf60_0" - } else { - container "quay.io/biocontainers/raxml-ng:1.0.3--h32fcf60_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/raxml-ng:1.0.3--h32fcf60_0' : + 'quay.io/biocontainers/raxml-ng:1.0.3--h32fcf60_0' }" input: path alignment @@ -26,16 +15,17 @@ process RAXMLNG { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ raxml-ng \\ - $options.args \\ + $args \\ --msa $alignment \\ --threads $task.cpus \\ --prefix output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//') + "${task.process}": + raxmlng: \$(echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//') END_VERSIONS """ } diff --git a/modules/rmarkdownnotebook/functions.nf b/modules/rmarkdownnotebook/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rmarkdownnotebook/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rmarkdownnotebook/main.nf b/modules/rmarkdownnotebook/main.nf index 4bded58c..9a7db505 100644 --- a/modules/rmarkdownnotebook/main.nf +++ b/modules/rmarkdownnotebook/main.nf @@ -1,29 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' -include { dump_params_yml; indent_code_block } from "./parametrize" - -params.options = [:] -options = initOptions(params.options) -params.parametrize = true -params.implicit_params = true -params.meta_params = true +include { dump_params_yml; indent_code_block } from "./parametrize" process RMARKDOWNNOTEBOOK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } //NB: You likely want to override this with a container containing all required //dependencies for your analysis. The container at least needs to contain the //yaml and rmarkdown R packages. conda (params.enable_conda ? "r-base=4.1.0 r-rmarkdown=2.9 r-yaml=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5%3A0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" - } else { - container "quay.io/biocontainers/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5:0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5%3A0e852a1e4063fdcbe3f254ac2c7469747a60e361-0' : + 'quay.io/biocontainers/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5:0e852a1e4063fdcbe3f254ac2c7469747a60e361-0' }" input: tuple val(meta), path(notebook) @@ -37,7 +24,11 @@ process RMARKDOWNNOTEBOOK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize + def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params + def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params // Dump parameters to yaml file. // Using a yaml file over using the CLI params because @@ -45,14 +36,14 @@ process RMARKDOWNNOTEBOOK { // * allows to pass nested maps instead of just single values def params_cmd = "" def render_cmd = "" - if (params.parametrize) { + if (parametrize) { nb_params = [:] - if (params.implicit_params) { + if (implicit_params) { nb_params["cpus"] = task.cpus nb_params["artifact_dir"] = "artifacts" nb_params["input_dir"] = "./" } - if (params.meta_params) { + if (meta_params) { nb_params["meta"] = meta } nb_params += parameters @@ -73,9 +64,9 @@ process RMARKDOWNNOTEBOOK { mkdir artifacts # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources - export MKL_NUM_THREADS="${task.cpus}" - export OPENBLAS_NUM_THREADS="${task.cpus}" - export OMP_NUM_THREADS="${task.cpus}" + export MKL_NUM_THREADS="$task.cpus" + export OPENBLAS_NUM_THREADS="$task.cpus" + export OMP_NUM_THREADS="$task.cpus" # Work around https://github.com/rstudio/rmarkdown/issues/1508 # If the symbolic link is not replaced by a physical file @@ -90,7 +81,7 @@ process RMARKDOWNNOTEBOOK { EOF cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": rmarkdown: \$(Rscript -e "cat(paste(packageVersion('rmarkdown'), collapse='.'))") END_VERSIONS """ diff --git a/modules/roary/functions.nf b/modules/roary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/roary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/roary/main.nf b/modules/roary/main.nf index 9dc948fb..a05973eb 100644 --- a/modules/roary/main.nf +++ b/modules/roary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ROARY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::roary=3.13.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/roary:3.13.0--pl526h516909a_0" - } else { - container "quay.io/biocontainers/roary:3.13.0--pl526h516909a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/roary:3.13.0--pl526h516909a_0' : + 'quay.io/biocontainers/roary:3.13.0--pl526h516909a_0' }" input: tuple val(meta), path(gff) @@ -27,17 +16,18 @@ process ROARY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ roary \\ - $options.args \\ + $args \\ -p $task.cpus \\ -f results/ \\ $gff cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( roary --version ) + "${task.process}": + roary: \$( roary --version ) END_VERSIONS """ } diff --git a/modules/rsem/calculateexpression/functions.nf b/modules/rsem/calculateexpression/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rsem/calculateexpression/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index f19392f7..659082fa 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEM_CALCULATEEXPRESSION { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } else { - container "quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" input: tuple val(meta), path(reads) @@ -34,7 +23,8 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("${prefix}.transcript.bam"), optional:true, emit: bam_transcript script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -50,14 +40,14 @@ process RSEM_CALCULATEEXPRESSION { --temporary-folder ./tmp/ \\ $strandedness \\ $paired_end \\ - $options.args \\ + $args \\ $reads \\ \$INDEX \\ $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ diff --git a/modules/rsem/preparereference/functions.nf b/modules/rsem/preparereference/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rsem/preparereference/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index 7e671207..95597b74 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEM_PREPAREREFERENCE { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } else { - container "quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" input: path fasta, stageAs: "rsem/*" @@ -28,9 +17,11 @@ process RSEM_PREPAREREFERENCE { path "versions.yml" , emit: versions script: - def args = options.args.tokenize() - if (args.contains('--star')) { - args.removeIf { it.contains('--star') } + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args_list = args.tokenize() + if (args_list.contains('--star')) { + args_list.removeIf { it.contains('--star') } def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' """ STAR \\ @@ -40,18 +31,18 @@ process RSEM_PREPAREREFERENCE { --sjdbGTFfile $gtf \\ --runThreadN $task.cpus \\ $memory \\ - $options.args2 + $args2 rsem-prepare-reference \\ --gtf $gtf \\ --num-threads $task.cpus \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ $fasta \\ rsem/genome cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ @@ -60,13 +51,13 @@ process RSEM_PREPAREREFERENCE { rsem-prepare-reference \\ --gtf $gtf \\ --num-threads $task.cpus \\ - $options.args \\ + $args \\ $fasta \\ rsem/genome cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ diff --git a/modules/rseqc/bamstat/functions.nf b/modules/rseqc/bamstat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/bamstat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index 64939add..d9d3fa36 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_BAMSTAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process RSEQC_BAMSTAT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bam_stat.py \\ -i $bam \\ - $options.args \\ + $args \\ > ${prefix}.bam_stat.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bam_stat.py --version | sed -e "s/bam_stat.py //g") + "${task.process}": + rseqc: \$(bam_stat.py --version | sed -e "s/bam_stat.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/inferexperiment/functions.nf b/modules/rseqc/inferexperiment/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/inferexperiment/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index c5e94943..3b879cfb 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_INFEREXPERIMENT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -27,17 +16,18 @@ process RSEQC_INFEREXPERIMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ infer_experiment.py \\ -i $bam \\ -r $bed \\ - $options.args \\ + $args \\ > ${prefix}.infer_experiment.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(infer_experiment.py --version | sed -e "s/infer_experiment.py //g") + "${task.process}": + rseqc: \$(infer_experiment.py --version | sed -e "s/infer_experiment.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/innerdistance/functions.nf b/modules/rseqc/innerdistance/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/innerdistance/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index 622cd5cd..88bec499 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_INNERDISTANCE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -31,27 +20,28 @@ process RSEQC_INNERDISTANCE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (!meta.single_end) { """ inner_distance.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args \\ + $args \\ > stdout.txt head -n 2 stdout.txt > ${prefix}.inner_distance_mean.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + "${task.process}": + rseqc: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") END_VERSIONS """ } else { """ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + "${task.process}": + rseqc: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/junctionannotation/functions.nf b/modules/rseqc/junctionannotation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/junctionannotation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index 1b75d915..b6949641 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_JUNCTIONANNOTATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -33,18 +22,19 @@ process RSEQC_JUNCTIONANNOTATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ junction_annotation.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args \\ + $args \\ 2> ${prefix}.junction_annotation.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(junction_annotation.py --version | sed -e "s/junction_annotation.py //g") + "${task.process}": + rseqc: \$(junction_annotation.py --version | sed -e "s/junction_annotation.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/junctionsaturation/functions.nf b/modules/rseqc/junctionsaturation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/junctionsaturation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index fa435aea..58451d2e 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_JUNCTIONSATURATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -28,17 +17,18 @@ process RSEQC_JUNCTIONSATURATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ junction_saturation.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(junction_saturation.py --version | sed -e "s/junction_saturation.py //g") + "${task.process}": + rseqc: \$(junction_saturation.py --version | sed -e "s/junction_saturation.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/readdistribution/functions.nf b/modules/rseqc/readdistribution/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/readdistribution/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index 0c83fdf0..74af618d 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_READDISTRIBUTION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -27,7 +16,8 @@ process RSEQC_READDISTRIBUTION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ read_distribution.py \\ -i $bam \\ @@ -35,8 +25,8 @@ process RSEQC_READDISTRIBUTION { > ${prefix}.read_distribution.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(read_distribution.py --version | sed -e "s/read_distribution.py //g") + "${task.process}": + rseqc: \$(read_distribution.py --version | sed -e "s/read_distribution.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/readduplication/functions.nf b/modules/rseqc/readduplication/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/readduplication/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index bee82682..80fcb150 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_READDUPLICATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -29,16 +18,17 @@ process RSEQC_READDUPLICATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ read_duplication.py \\ -i $bam \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(read_duplication.py --version | sed -e "s/read_duplication.py //g") + "${task.process}": + rseqc: \$(read_duplication.py --version | sed -e "s/read_duplication.py //g") END_VERSIONS """ } diff --git a/modules/salmon/index/functions.nf b/modules/salmon/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/salmon/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/salmon/index/main.nf b/modules/salmon/index/main.nf index c3fcef01..b0a2f973 100644 --- a/modules/salmon/index/main.nf +++ b/modules/salmon/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SALMON_INDEX { tag "$transcript_fasta" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" - } else { - container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0' : + 'quay.io/biocontainers/salmon:1.5.2--h84f40af_0' }" input: path genome_fasta @@ -27,6 +16,7 @@ process SALMON_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def get_decoy_ids = "grep '^>' $genome_fasta | cut -d ' ' -f 1 > decoys.txt" def gentrome = "gentrome.fa" if (genome_fasta.endsWith('.gz')) { @@ -43,11 +33,11 @@ process SALMON_INDEX { --threads $task.cpus \\ -t $gentrome \\ -d decoys.txt \\ - $options.args \\ + $args \\ -i salmon cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") + "${task.process}": + salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/salmon/quant/functions.nf b/modules/salmon/quant/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/salmon/quant/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 7c2e0e17..9557fd24 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SALMON_QUANT { tag "$meta.id" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" - } else { - container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0' : + 'quay.io/biocontainers/salmon:1.5.2--h84f40af_0' }" input: tuple val(meta), path(reads) @@ -31,7 +20,8 @@ process SALMON_QUANT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = "--index $index" def input_reads = meta.single_end ? "-r $reads" : "-1 ${reads[0]} -2 ${reads[1]}" @@ -68,12 +58,12 @@ process SALMON_QUANT { --libType=$strandedness \\ $reference \\ $input_reads \\ - $options.args \\ + $args \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") + "${task.process}": + salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/samblaster/functions.nf b/modules/samblaster/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samblaster/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf index 4481d8cd..8445b0d0 100644 --- a/modules/samblaster/main.nf +++ b/modules/samblaster/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMBLASTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samblaster=0.1.26 bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" - } else { - container "quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' : + 'quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' }" input: tuple val(meta), path(bam) @@ -26,16 +15,19 @@ process SAMBLASTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if( "$bam" == "${prefix}.bam" ) error "Input and output names are the same, use the suffix option to disambiguate" """ - samtools view -h $options.args2 $bam | \\ - samblaster $options.args | \\ - samtools view $options.args3 -Sb - >${prefix}.bam + samtools view -h $args2 $bam | \\ + samblaster $args | \\ + samtools view $args3 -Sb - >${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( samblaster -h 2>&1 | head -n 1 | sed 's/^samblaster: Version //' ) + "${task.process}": + samblaster: \$( samblaster -h 2>&1 | head -n 1 | sed 's/^samblaster: Version //' ) samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/samtools/ampliconclip/functions.nf b/modules/samtools/ampliconclip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/ampliconclip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 4cf98d3f..87d6ff8b 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_AMPLICONCLIP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -31,13 +20,14 @@ process SAMTOOLS_AMPLICONCLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def rejects = save_cliprejects ? "--rejects-file ${prefix}.cliprejects.bam" : "" def stats = save_clipstats ? "-f ${prefix}.clipstats.txt" : "" """ samtools \\ ampliconclip \\ - $options.args \\ + $args \\ $rejects \\ $stats \\ -b $bed \\ @@ -45,8 +35,8 @@ process SAMTOOLS_AMPLICONCLIP { $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/bam2fq/functions.nf b/modules/samtools/bam2fq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/bam2fq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf index 48e3249c..20e83a14 100644 --- a/modules/samtools/bam2fq/main.nf +++ b/modules/samtools/bam2fq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_BAM2FQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(inputbam) @@ -27,13 +16,14 @@ process SAMTOOLS_BAM2FQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (split){ """ samtools \\ bam2fq \\ - $options.args \\ + $args \\ -@ $task.cpus \\ -1 ${prefix}_1.fq.gz \\ -2 ${prefix}_2.fq.gz \\ @@ -42,21 +32,21 @@ process SAMTOOLS_BAM2FQ { $inputbam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } else { """ samtools \\ bam2fq \\ - $options.args \\ + $args \\ -@ $task.cpus \\ $inputbam >${prefix}_interleaved.fq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/depth/functions.nf b/modules/samtools/depth/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/depth/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf index 9c46b011..f336547f 100644 --- a/modules/samtools/depth/main.nf +++ b/modules/samtools/depth/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_DEPTH { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,18 +15,19 @@ process SAMTOOLS_DEPTH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ samtools \\ depth \\ - $options.args \\ + $args \\ -o ${prefix}.tsv \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/faidx/functions.nf b/modules/samtools/faidx/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/faidx/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 80708084..c53373a9 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FAIDX { tag "$fasta" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: path fasta @@ -26,11 +15,12 @@ process SAMTOOLS_FAIDX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ samtools faidx $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/fastq/functions.nf b/modules/samtools/fastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/fastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index fb7e3904..bdbf53e4 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FASTQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,18 +15,19 @@ process SAMTOOLS_FASTQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def endedness = meta.single_end ? "-0 ${prefix}.fastq.gz" : "-1 ${prefix}_1.fastq.gz -2 ${prefix}_2.fastq.gz" """ samtools fastq \\ - $options.args \\ + $args \\ --threads ${task.cpus-1} \\ $endedness \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/fixmate/functions.nf b/modules/samtools/fixmate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/fixmate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index af1cf829..180833f4 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FIXMATE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,20 +15,21 @@ process SAMTOOLS_FIXMATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if ("$bam" == "${prefix}.bam") error "Input and output names are the same, use the suffix option to disambiguate!" """ samtools \\ fixmate \\ - $options.args \\ + $args \\ --threads ${task.cpus-1} \\ $bam \\ ${prefix}.bam \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/flagstat/functions.nf b/modules/samtools/flagstat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/flagstat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index 072a135f..03721d0b 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FLAGSTAT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,11 +15,12 @@ process SAMTOOLS_FLAGSTAT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ samtools flagstat --threads ${task.cpus-1} $bam > ${bam}.flagstat cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/idxstats/functions.nf b/modules/samtools/idxstats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/idxstats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index fa0e7dc3..cd068679 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_IDXSTATS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,11 +15,12 @@ process SAMTOOLS_IDXSTATS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ samtools idxstats $bam > ${bam}.idxstats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/index/functions.nf b/modules/samtools/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index d66e4513..b033e225 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_INDEX { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input) @@ -28,12 +17,13 @@ process SAMTOOLS_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - samtools index -@ ${task.cpus-1} $options.args $input + samtools index -@ ${task.cpus-1} $args $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/merge/functions.nf b/modules/samtools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index ab641bb9..8eeb64a2 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_MERGE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input_files) @@ -28,15 +17,16 @@ process SAMTOOLS_MERGE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def file_type = input_files[0].getExtension() def reference = fasta ? "--reference ${fasta}" : "" """ - samtools merge --threads ${task.cpus-1} $options.args ${reference} ${prefix}.${file_type} $input_files + samtools merge --threads ${task.cpus-1} $args ${reference} ${prefix}.${file_type} $input_files cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/mpileup/functions.nf b/modules/samtools/mpileup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/mpileup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 081682ed..5f6e2d49 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -27,16 +16,17 @@ process SAMTOOLS_MPILEUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ samtools mpileup \\ --fasta-ref $fasta \\ --output ${prefix}.mpileup \\ - $options.args \\ + $args \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/sort/functions.nf b/modules/samtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index f980b472..623f10b6 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_SORT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,12 +15,13 @@ process SAMTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - samtools sort $options.args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam + samtools sort $args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/stats/functions.nf b/modules/samtools/stats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/stats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index e0a2b50d..83c87002 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_STATS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input), path(input_index) @@ -27,13 +16,14 @@ process SAMTOOLS_STATS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def reference = fasta ? "--reference ${fasta}" : "" """ samtools stats --threads ${task.cpus-1} ${reference} ${input} > ${input}.stats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/view/functions.nf b/modules/samtools/view/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/view/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index e5ff5546..464edd09 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_VIEW { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input) @@ -28,15 +17,16 @@ process SAMTOOLS_VIEW { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "--reference ${fasta} -C" : "" def file_type = input.getExtension() """ - samtools view --threads ${task.cpus-1} ${reference} $options.args $input > ${prefix}.${file_type} + samtools view --threads ${task.cpus-1} ${reference} $args $input > ${prefix}.${file_type} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/scoary/functions.nf b/modules/scoary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/scoary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/scoary/main.nf b/modules/scoary/main.nf index 5720b4e5..8fed0119 100644 --- a/modules/scoary/main.nf +++ b/modules/scoary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SCOARY { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::scoary=1.6.16" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/scoary:1.6.16--py_2" - } else { - container "quay.io/biocontainers/scoary:1.6.16--py_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/scoary:1.6.16--py_2' : + 'quay.io/biocontainers/scoary:1.6.16--py_2' }" input: tuple val(meta), path(genes), path(traits) @@ -27,19 +16,20 @@ process SCOARY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def newick_tree = tree ? "-n ${tree}" : "" """ scoary \\ - $options.args \\ + $args \\ --no-time \\ --threads $task.cpus \\ --traits $traits \\ --genes $genes cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( scoary --version 2>&1 ) + "${task.process}": + scoary: \$( scoary --version 2>&1 ) END_VERSIONS """ } diff --git a/modules/seacr/callpeak/functions.nf b/modules/seacr/callpeak/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seacr/callpeak/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 97bf1c0b..328e4e6c 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.3' +def VERSION = '1.3' // Version information not provided by tool on CLI process SEACR_CALLPEAK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seacr=1.3 conda-forge::r-base=4.0.2 bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0" - } else { - container 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' : + 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' }" input: tuple val(meta), path(bedgraph), path(ctrlbedgraph) @@ -29,17 +18,19 @@ process SEACR_CALLPEAK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def function_switch = ctrlbedgraph ? "$ctrlbedgraph" : "$threshold" """ SEACR_1.3.sh \\ $bedgraph \\ $function_switch \\ - $options.args \\ + $args \\ $prefix + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + seacr: $VERSION bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") r-base: \$(echo \$(R --version 2>&1) | sed 's/^.*R version //; s/ .*\$//') END_VERSIONS diff --git a/modules/seqkit/split2/functions.nf b/modules/seqkit/split2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqkit/split2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 80f55bb6..fc027793 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQKIT_SPLIT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::seqkit=0.16.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0" - } else { - container "quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0' : + 'quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0' }" input: tuple val(meta), path(reads) @@ -26,34 +15,35 @@ process SEQKIT_SPLIT2 { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if(meta.single_end){ """ seqkit \\ split2 \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -1 $reads \\ --out-dir $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqkit: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { """ seqkit \\ split2 \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -1 ${reads[0]} \\ -2 ${reads[1]} \\ --out-dir $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqkit: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqsero2/functions.nf b/modules/seqsero2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqsero2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqsero2/main.nf b/modules/seqsero2/main.nf index 3748a6e4..a8dd731e 100644 --- a/modules/seqsero2/main.nf +++ b/modules/seqsero2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQSERO2 { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqsero2=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqsero2:1.2.1--py_0" - } else { - container "quay.io/biocontainers/seqsero2:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqsero2:1.2.1--py_0' : + 'quay.io/biocontainers/seqsero2:1.2.1--py_0' }" input: tuple val(meta), path(seqs) @@ -28,18 +17,19 @@ process SEQSERO2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ SeqSero2_package.py \\ - $options.args \\ + $args \\ -d results/ \\ -n $prefix \\ -p $task.cpus \\ -i $seqs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( SeqSero2_package.py --version 2>&1) | sed 's/^.*SeqSero2_package.py //' ) + "${task.process}": + seqsero2: \$( echo \$( SeqSero2_package.py --version 2>&1) | sed 's/^.*SeqSero2_package.py //' ) END_VERSIONS """ } diff --git a/modules/seqtk/mergepe/functions.nf b/modules/seqtk/mergepe/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/mergepe/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/mergepe/main.nf b/modules/seqtk/mergepe/main.nf index fb8eb382..954bed5c 100644 --- a/modules/seqtk/mergepe/main.nf +++ b/modules/seqtk/mergepe/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_MERGEPE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: tuple val(meta), path(reads) @@ -26,27 +15,28 @@ process SEQTK_MERGEPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ ln -s ${reads} ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { """ seqtk \\ mergepe \\ - $options.args \\ + $args \\ ${reads} \\ | gzip -n >> ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqtk/sample/functions.nf b/modules/seqtk/sample/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/sample/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index 3b039fb9..83a107d0 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_SAMPLE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: tuple val(meta), path(reads) @@ -27,43 +16,44 @@ process SEQTK_SAMPLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ seqtk \\ sample \\ - $options.args \\ + $args \\ $reads \\ $sample_size \\ | gzip --no-name > ${prefix}.fastq.gz \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { - if (!(options.args ==~ /.*-s[0-9]+.*/)) { - options.args = options.args + " -s100" + if (!(args ==~ /.*-s[0-9]+.*/)) { + args += " -s100" } """ seqtk \\ sample \\ - $options.args \\ + $args \\ ${reads[0]} \\ $sample_size \\ | gzip --no-name > ${prefix}_1.fastq.gz \\ seqtk \\ sample \\ - $options.args \\ + $args \\ ${reads[1]} \\ $sample_size \\ | gzip --no-name > ${prefix}_2.fastq.gz \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqtk/subseq/functions.nf b/modules/seqtk/subseq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/subseq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index df8783de..1d93b061 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_SUBSEQ { tag '$sequences' label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: path sequences @@ -27,7 +16,8 @@ process SEQTK_SUBSEQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ?: '' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ?: '' def ext = "fa" if ("$sequences" ==~ /.+\.fq|.+\.fq.gz|.+\.fastq|.+\.fastq.gz/) { ext = "fq" @@ -35,14 +25,14 @@ process SEQTK_SUBSEQ { """ seqtk \\ subseq \\ - $options.args \\ + $args \\ $sequences \\ $filter_list | \\ gzip --no-name > ${sequences}${prefix}.${ext}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/bam2seqz/functions.nf b/modules/sequenzautils/bam2seqz/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/sequenzautils/bam2seqz/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index 61ca70c6..9082d426 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQUENZAUTILS_BAM2SEQZ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sequenza-utils=3.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2" - } else { - container "quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2' : + 'quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2' }" input: tuple val(meta), path(normalbam), path(tumourbam) @@ -28,11 +17,12 @@ process SEQUENZAUTILS_BAM2SEQZ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ sequenza-utils \\ bam2seqz \\ - $options.args \\ + $args \\ -n $normalbam \\ -t $tumourbam \\ --fasta $fasta \\ @@ -40,8 +30,8 @@ process SEQUENZAUTILS_BAM2SEQZ { -o ${prefix}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') + "${task.process}": + sequenzautils: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/gcwiggle/functions.nf b/modules/sequenzautils/gcwiggle/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/sequenzautils/gcwiggle/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index c952bb70..43358c43 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQUENZAUTILS_GCWIGGLE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sequenza-utils=3.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2" - } else { - container "quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2' : + 'quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process SEQUENZAUTILS_GCWIGGLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ sequenza-utils \\ gc_wiggle \\ - $options.args \\ + $args \\ --fasta $fasta \\ -o ${prefix}.wig.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') + "${task.process}": + sequenzautils: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqwish/induce/functions.nf b/modules/seqwish/induce/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqwish/induce/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index aaabce51..fb25a96e 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -1,24 +1,14 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.7.1' +def VERSION = '0.7.1' // Version information not provided by tool on CLI process SEQWISH_INDUCE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::seqwish=0.7.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0" - } else { - container "quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0' : + 'quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0' }" input: tuple val(meta), path(paf), path(fasta) @@ -27,20 +17,20 @@ process SEQWISH_INDUCE { tuple val(meta), path("*.gfa"), emit: gfa path "versions.yml" , emit: versions - script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ seqwish \\ --threads $task.cpus \\ --paf-alns=$paf \\ --seqs=$fasta \\ --gfa=${prefix}.gfa \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + seqwish: $VERSION END_VERSIONS """ } diff --git a/modules/shovill/functions.nf b/modules/shovill/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/shovill/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/shovill/main.nf b/modules/shovill/main.nf index 48425f9f..1a56df27 100644 --- a/modules/shovill/main.nf +++ b/modules/shovill/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SHOVILL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::shovill=1.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/shovill:1.1.0--0" - } else { - container "quay.io/biocontainers/shovill:1.1.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/shovill:1.1.0--0' : + 'quay.io/biocontainers/shovill:1.1.0--0' }" input: tuple val(meta), path(reads) @@ -30,20 +19,21 @@ process SHOVILL { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def memory = task.memory.toGiga() """ shovill \\ --R1 ${reads[0]} \\ --R2 ${reads[1]} \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --ram $memory \\ --outdir ./ \\ --force cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(shovill --version 2>&1) | sed 's/^.*shovill //') + "${task.process}": + shovill: \$(echo \$(shovill --version 2>&1) | sed 's/^.*shovill //') END_VERSIONS """ } diff --git a/modules/snpdists/functions.nf b/modules/snpdists/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpdists/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf index 506a922a..de79e89b 100644 --- a/modules/snpdists/main.nf +++ b/modules/snpdists/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SNPDISTS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::snp-dists=0.8.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snp-dists:0.8.2--h5bf99c6_0" - } else { - container "quay.io/biocontainers/snp-dists:0.8.2--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snp-dists:0.8.2--h5bf99c6_0' : + 'quay.io/biocontainers/snp-dists:0.8.2--h5bf99c6_0' }" input: tuple val(meta), path(alignment) @@ -26,15 +15,16 @@ process SNPDISTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ snp-dists \\ - $options.args \\ + $args \\ $alignment > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(snp-dists -v 2>&1 | sed 's/snp-dists //;') + "${task.process}": + snpdists: \$(snp-dists -v 2>&1 | sed 's/snp-dists //;') END_VERSIONS """ } diff --git a/modules/snpeff/functions.nf b/modules/snpeff/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpeff/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 3a1f6a52..2cd023f6 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -1,26 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) -params.use_cache = false -params.snpeff_tag = "" - process SNPEFF { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - if (params.use_cache) { - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/snpeff:5.0--hdfd78af_1" - } + if (task.ext.use_cache) { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" } else { - container "nfcore/snpeff:${params.snpeff_tag}" + container "nfcore/snpeff:${task.ext.snpeff_tag}" } input: @@ -34,26 +21,28 @@ process SNPEFF { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[snpEff] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - cache = params.use_cache ? "-dataDir \${PWD}/${snpeff_cache}" : "" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def dir_cache = task.ext.use_cache ? "-dataDir \${PWD}/${cache}" : "" """ - snpEff -Xmx${avail_mem}g \\ + snpEff \\ + -Xmx${avail_mem}g \\ $db \\ - $options.args \\ + $args \\ -csvStats ${prefix}.csv \\ - $cache \\ + $dir_cache \\ $vcf \\ > ${prefix}.ann.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(snpEff -version 2>&1) | cut -f 2 -d ' ') + "${task.process}": + snpeff: \$(echo \$(snpEff -version 2>&1) | cut -f 2 -d ' ') END_VERSIONS """ } diff --git a/modules/snpsites/functions.nf b/modules/snpsites/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpsites/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpsites/main.nf b/modules/snpsites/main.nf index 543ee01c..60e694ac 100644 --- a/modules/snpsites/main.nf +++ b/modules/snpsites/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SNPSITES { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::snp-sites=2.5.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snp-sites:2.5.1--hed695b0_0" - } else { - container "quay.io/biocontainers/snp-sites:2.5.1--hed695b0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snp-sites:2.5.1--hed695b0_0' : + 'quay.io/biocontainers/snp-sites:2.5.1--hed695b0_0' }" input: path alignment @@ -27,10 +16,11 @@ process SNPSITES { env CONSTANT_SITES, emit: constant_sites_string script: + def args = task.ext.args ?: '' """ snp-sites \\ $alignment \\ - $options.args \\ + $args \\ > filtered_alignment.fas echo \$(snp-sites -C $alignment) > constant.sites.txt @@ -38,8 +28,8 @@ process SNPSITES { CONSTANT_SITES=\$(cat constant.sites.txt) cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(snp-sites -V 2>&1 | sed 's/snp-sites //') + "${task.process}": + snpsites: \$(snp-sites -V 2>&1 | sed 's/snp-sites //') END_VERSIONS """ } diff --git a/modules/sortmerna/functions.nf b/modules/sortmerna/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sortmerna/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 9602bb53..83cd8092 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SORTMERNA { tag "$meta.id" label "process_high" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sortmerna=4.3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0" - } else { - container "quay.io/biocontainers/sortmerna:4.3.4--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0' : + 'quay.io/biocontainers/sortmerna:4.3.4--h9ee0642_0' }" input: tuple val(meta), path(reads) @@ -28,7 +17,8 @@ process SORTMERNA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ sortmerna \\ @@ -38,14 +28,14 @@ process SORTMERNA { --workdir . \\ --aligned rRNA_reads \\ --other non_rRNA_reads \\ - $options.args + $args mv non_rRNA_reads.fq.gz ${prefix}.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + "${task.process}": + sortmerna: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } else { @@ -60,15 +50,15 @@ process SORTMERNA { --other non_rRNA_reads \\ --paired_in \\ --out2 \\ - $options.args + $args mv non_rRNA_reads_fwd.fq.gz ${prefix}_1.fastq.gz mv non_rRNA_reads_rev.fq.gz ${prefix}_2.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + "${task.process}": + sortmerna: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } diff --git a/modules/spades/functions.nf b/modules/spades/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/spades/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/spades/main.nf b/modules/spades/main.nf index 836efbda..4663ec55 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SPADES { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::spades=3.15.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/spades:3.15.3--h95f258a_0" - } else { - container "quay.io/biocontainers/spades:3.15.3--h95f258a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/spades:3.15.3--h95f258a_0' : + 'quay.io/biocontainers/spades:3.15.3--h95f258a_0' }" input: tuple val(meta), path(illumina), path(pacbio), path(nanopore) @@ -32,7 +21,8 @@ process SPADES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def maxmem = task.memory.toGiga() def illumina_reads = illumina ? ( meta.single_end ? "-s $illumina" : "-1 ${illumina[0]} -2 ${illumina[1]}" ) : "" def pacbio_reads = pacbio ? "--pacbio $pacbio" : "" @@ -40,7 +30,7 @@ process SPADES { def custom_hmms = hmm ? "--custom-hmms $hmm" : "" """ spades.py \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --memory $maxmem \\ $custom_hmms \\ @@ -73,8 +63,8 @@ process SPADES { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(spades.py --version 2>&1 | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//') + "${task.process}": + spades: \$(spades.py --version 2>&1 | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/spatyper/functions.nf b/modules/spatyper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/spatyper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf index 34207dbf..d7c75ba6 100644 --- a/modules/spatyper/main.nf +++ b/modules/spatyper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SPATYPER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::spatyper=0.3.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/spatyper%3A0.3.3--pyhdfd78af_3" - } else { - container "quay.io/biocontainers/spatyper:0.3.3--pyhdfd78af_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/spatyper%3A0.3.3--pyhdfd78af_3' : + 'quay.io/biocontainers/spatyper:0.3.3--pyhdfd78af_3' }" input: tuple val(meta), path(fasta) @@ -28,18 +17,19 @@ process SPATYPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" """ spaTyper \\ - $options.args \\ + $args \\ $input_args \\ --fasta $fasta \\ --output ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(spaTyper --version 2>&1) | sed 's/^.*spaTyper //' ) + "${task.process}": + spatyper: \$( echo \$(spaTyper --version 2>&1) | sed 's/^.*spaTyper //' ) END_VERSIONS """ } diff --git a/modules/sratools/fasterqdump/functions.nf b/modules/sratools/fasterqdump/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sratools/fasterqdump/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sratools/fasterqdump/main.nf b/modules/sratools/fasterqdump/main.nf index 08ef9045..73e3673d 100644 --- a/modules/sratools/fasterqdump/main.nf +++ b/modules/sratools/fasterqdump/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SRATOOLS_FASTERQDUMP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0 conda-forge::pigz=2.6' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' - } else { - container 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' : + 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' }" input: tuple val(meta), path(sra) @@ -26,6 +15,8 @@ process SRATOOLS_FASTERQDUMP { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" // Paired-end data extracted by fasterq-dump (--split-3 the default) always creates // *_1.fastq *_2.fastq files but sometimes also an additional *.fastq file @@ -39,19 +30,19 @@ process SRATOOLS_FASTERQDUMP { fi fasterq-dump \\ - ${options.args} \\ + $args \\ --threads $task.cpus \\ ${sra.name} pigz \\ - ${options.args2} \\ + $args2 \\ --no-name \\ --processes $task.cpus \\ *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') + "${task.process}": + sratools: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/sratools/prefetch/functions.nf b/modules/sratools/prefetch/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sratools/prefetch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sratools/prefetch/main.nf b/modules/sratools/prefetch/main.nf index 207d1e10..1e1eb802 100644 --- a/modules/sratools/prefetch/main.nf +++ b/modules/sratools/prefetch/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SRATOOLS_PREFETCH { tag "$id" label 'process_low' label 'error_retry' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5262h314213e_0' - } else { - container 'quay.io/biocontainers/sra-tools:2.11.0--pl5262h314213e_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5262h314213e_0' : + 'quay.io/biocontainers/sra-tools:2.11.0--pl5262h314213e_0' }" input: tuple val(meta), val(id) @@ -27,6 +16,7 @@ process SRATOOLS_PREFETCH { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" """ eval "\$(vdb-config -o n NCBI_SETTINGS | sed 's/[" ]//g')" @@ -36,15 +26,15 @@ process SRATOOLS_PREFETCH { fi prefetch \\ - $options.args \\ + $args \\ --progress \\ $id vdb-validate $id cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(prefetch --version 2>&1 | grep -Eo '[0-9.]+') + "${task.process}": + sratools: \$(prefetch --version 2>&1 | grep -Eo '[0-9.]+') END_VERSIONS """ } diff --git a/modules/staphopiasccmec/functions.nf b/modules/staphopiasccmec/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/staphopiasccmec/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf index 08def401..f33634ae 100644 --- a/modules/staphopiasccmec/main.nf +++ b/modules/staphopiasccmec/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAPHOPIASCCMEC { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::staphopia-sccmec=1.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/staphopia-sccmec:1.0.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/staphopia-sccmec:1.0.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/staphopia-sccmec:1.0.0--hdfd78af_0' : + 'quay.io/biocontainers/staphopia-sccmec:1.0.0--hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -26,13 +15,14 @@ process STAPHOPIASCCMEC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - staphopia-sccmec --assembly $fasta $options.args > ${prefix}.tsv + staphopia-sccmec --assembly $fasta $args > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(staphopia-sccmec --version 2>&1 | sed 's/^.*staphopia-sccmec //') + "${task.process}": + staphopiasccmec: \$(staphopia-sccmec --version 2>&1 | sed 's/^.*staphopia-sccmec //') END_VERSIONS """ } diff --git a/modules/star/align/functions.nf b/modules/star/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/star/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index e0ccba8c..46023d3e 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -1,28 +1,20 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAR_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? 'bioconda::star=2.7.9a' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' - } else { - container 'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' : + 'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' }" input: tuple val(meta), path(reads) path index path gtf + val star_ignore_sjdbgtf + val seq_platform + val seq_center output: tuple val(meta), path('*d.out.bam') , emit: bam @@ -39,12 +31,13 @@ process STAR_ALIGN { tuple val(meta), path('*.out.junction') , optional:true, emit: junction script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def ignore_gtf = params.star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" - def seq_platform = params.seq_platform ? "'PL:$params.seq_platform'" : "" - def seq_center = params.seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$params.seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " - def out_sam_type = (options.args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' - def mv_unsorted_bam = (options.args.contains('--outSAMtype BAM Unsorted SortedByCoordinate')) ? "mv ${prefix}.Aligned.out.bam ${prefix}.Aligned.unsort.out.bam" : '' + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def ignore_gtf = star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" + def seq_platform = seq_platform ? "'PL:$seq_platform'" : "" + def seq_center = seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " + def out_sam_type = (args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' + def mv_unsorted_bam = (args.contains('--outSAMtype BAM Unsorted SortedByCoordinate')) ? "mv ${prefix}.Aligned.out.bam ${prefix}.Aligned.unsort.out.bam" : '' """ STAR \\ --genomeDir $index \\ @@ -54,7 +47,7 @@ process STAR_ALIGN { $out_sam_type \\ $ignore_gtf \\ $seq_center \\ - $options.args + $args $mv_unsorted_bam @@ -68,8 +61,8 @@ process STAR_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ } diff --git a/modules/star/genomegenerate/functions.nf b/modules/star/genomegenerate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/star/genomegenerate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index c932fafe..ad32c0dd 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAR_GENOMEGENERATE { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.13 conda-forge::gawk=5.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" - } else { - container "quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' : + 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' }" input: path fasta @@ -28,9 +17,10 @@ process STAR_GENOMEGENERATE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args_list = args.tokenize() def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' - def args = options.args.tokenize() - if (args.contains('--genomeSAindexNbases')) { + if (args_list.contains('--genomeSAindexNbases')) { """ mkdir star STAR \\ @@ -40,11 +30,11 @@ process STAR_GENOMEGENERATE { --sjdbGTFfile $gtf \\ --runThreadN $task.cpus \\ $memory \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS @@ -63,11 +53,11 @@ process STAR_GENOMEGENERATE { --runThreadN $task.cpus \\ --genomeSAindexNbases \$NUM_BASES \\ $memory \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS diff --git a/modules/strelka/germline/functions.nf b/modules/strelka/germline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/strelka/germline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index 5e913c40..e991db67 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRELKA_GERMLINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::strelka=2.9.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/strelka:2.9.10--0" - } else { - container "quay.io/biocontainers/strelka:2.9.10--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/strelka:2.9.10--0' : + 'quay.io/biocontainers/strelka:2.9.10--0' }" input: tuple val(meta), path(input), path(input_index) @@ -25,7 +14,6 @@ process STRELKA_GERMLINE { path target_bed path target_bed_tbi - output: tuple val(meta), path("*variants.vcf.gz") , emit: vcf tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi @@ -34,14 +22,15 @@ process STRELKA_GERMLINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def regions = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaGermlineWorkflow.py \\ --bam $input \\ --referenceFasta $fasta \\ $regions \\ - $options.args \\ + $args \\ --runDir strelka python strelka/runWorkflow.py -m local -j $task.cpus @@ -51,8 +40,8 @@ process STRELKA_GERMLINE { mv strelka/results/variants/variants.vcf.gz.tbi ${prefix}.variants.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaGermlineWorkflow.py --version ) + "${task.process}": + strelka: \$( configureStrelkaGermlineWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/strelka/somatic/functions.nf b/modules/strelka/somatic/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/strelka/somatic/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index 633b0a2c..fa138633 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRELKA_SOMATIC { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::strelka=2.9.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1" - } else { - container "quay.io/biocontainers/strelka:2.9.10--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1' : + 'quay.io/biocontainers/strelka:2.9.10--h9ee0642_1' }" input: tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor), path(manta_candidate_small_indels), path(manta_candidate_small_indels_tbi) @@ -33,7 +22,8 @@ process STRELKA_SOMATIC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def options_target_bed = target_bed ? "--exome --callRegions ${target_bed}" : "" def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : "" """ @@ -43,7 +33,7 @@ process STRELKA_SOMATIC { --referenceFasta $fasta \\ $options_target_bed \\ $options_manta \\ - $options.args \\ + $args \\ --runDir strelka python strelka/runWorkflow.py -m local -j $task.cpus @@ -54,8 +44,8 @@ process STRELKA_SOMATIC { mv strelka/results/variants/somatic.snvs.vcf.gz.tbi ${prefix}.somatic_snvs.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaSomaticWorkflow.py --version ) + "${task.process}": + strelka: \$( configureStrelkaSomaticWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/stringtie/merge/functions.nf b/modules/stringtie/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/stringtie/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/stringtie/merge/main.nf b/modules/stringtie/merge/main.nf index 371533bb..756dc6ec 100644 --- a/modules/stringtie/merge/main.nf +++ b/modules/stringtie/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRINGTIE_MERGE { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" - } else { - container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : + 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" input: path stringtie_gtf @@ -27,6 +16,7 @@ process STRINGTIE_MERGE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ stringtie \\ --merge $stringtie_gtf \\ @@ -34,8 +24,8 @@ process STRINGTIE_MERGE { -o stringtie.merged.gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + "${task.process}": + stringtie: \$(stringtie --version 2>&1) END_VERSIONS """ } diff --git a/modules/stringtie/stringtie/functions.nf b/modules/stringtie/stringtie/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/stringtie/stringtie/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index 3579e47c..4367a84d 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRINGTIE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" - } else { - container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : + 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" input: tuple val(meta), path(bam) @@ -30,7 +19,8 @@ process STRINGTIE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -48,11 +38,11 @@ process STRINGTIE { -C ${prefix}.coverage.gtf \\ -b ${prefix}.ballgown \\ -p $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + "${task.process}": + stringtie: \$(stringtie --version 2>&1) END_VERSIONS """ } diff --git a/modules/subread/featurecounts/functions.nf b/modules/subread/featurecounts/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/subread/featurecounts/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index 0a0285db..43a7f8cd 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SUBREAD_FEATURECOUNTS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::subread=2.0.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/subread:2.0.1--hed695b0_0" - } else { - container "quay.io/biocontainers/subread:2.0.1--hed695b0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/subread:2.0.1--hed695b0_0' : + 'quay.io/biocontainers/subread:2.0.1--hed695b0_0' }" input: tuple val(meta), path(bams), path(annotation) @@ -27,7 +16,8 @@ process SUBREAD_FEATURECOUNTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired_end = meta.single_end ? '' : '-p' def strandedness = 0 @@ -38,7 +28,7 @@ process SUBREAD_FEATURECOUNTS { } """ featureCounts \\ - $options.args \\ + $args \\ $paired_end \\ -T $task.cpus \\ -a $annotation \\ @@ -47,8 +37,8 @@ process SUBREAD_FEATURECOUNTS { ${bams.join(' ')} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") + "${task.process}": + subread: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") END_VERSIONS """ } diff --git a/modules/tabix/bgzip/functions.nf b/modules/tabix/bgzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/bgzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index 43726f17..13f9a942 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_BGZIP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(input) @@ -26,13 +15,14 @@ process TABIX_BGZIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - bgzip -c $options.args $input > ${prefix}.${input.getExtension()}.gz + bgzip -c $args $input > ${prefix}.${input.getExtension()}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/bgziptabix/functions.nf b/modules/tabix/bgziptabix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/bgziptabix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index e44a7226..9a633d2e 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_BGZIPTABIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(input) @@ -26,14 +15,16 @@ process TABIX_BGZIPTABIX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - bgzip -c $options.args $input > ${prefix}.gz - tabix $options.args2 ${prefix}.gz + bgzip -c $args $input > ${prefix}.gz + tabix $args2 ${prefix}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/tabix/functions.nf b/modules/tabix/tabix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/tabix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index 1574c0b5..c721a554 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_TABIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(tab) @@ -26,12 +15,13 @@ process TABIX_TABIX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - tabix $options.args $tab + tabix $args $tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tbprofiler/profile/functions.nf b/modules/tbprofiler/profile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tbprofiler/profile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tbprofiler/profile/main.nf b/modules/tbprofiler/profile/main.nf index afd78b05..3f6bffc3 100644 --- a/modules/tbprofiler/profile/main.nf +++ b/modules/tbprofiler/profile/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TBPROFILER_PROFILE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tb-profiler=3.0.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tb-profiler:3.0.8--pypyh5e36f6f_0" - } else { - container "quay.io/biocontainers/tb-profiler:3.0.8--pypyh5e36f6f_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tb-profiler:3.0.8--pypyh5e36f6f_0' : + 'quay.io/biocontainers/tb-profiler:3.0.8--pypyh5e36f6f_0' }" input: tuple val(meta), path(reads) @@ -30,19 +19,20 @@ process TBPROFILER_PROFILE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def input_reads = meta.single_end ? "--read1 $reads" : "--read1 ${reads[0]} --read2 ${reads[1]}" """ tb-profiler \\ profile \\ - $options.args \\ + $args \\ --prefix ${prefix} \\ --threads $task.cpus \\ $input_reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(tb-profiler --version 2>&1) | sed 's/TBProfiler version //') + "${task.process}": + tbprofiler: \$( echo \$(tb-profiler --version 2>&1) | sed 's/TBProfiler version //') END_VERSIONS """ } diff --git a/modules/tiddit/cov/functions.nf b/modules/tiddit/cov/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tiddit/cov/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf index a3a8a171..e9bb9b5d 100644 --- a/modules/tiddit/cov/main.nf +++ b/modules/tiddit/cov/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TIDDIT_COV { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0" - } else { - container "quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' : + 'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }" input: tuple val(meta), path(bam) @@ -25,25 +14,23 @@ process TIDDIT_COV { output: tuple val(meta), path("*.tab"), optional: true, emit: cov tuple val(meta), path("*.wig"), optional: true, emit: wig - path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta ? "--ref $fasta" : "" """ tiddit \\ --cov \\ -o $prefix \\ - $options.args \\ + $args \\ --bam $bam \\ $reference cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tiddit/sv/functions.nf b/modules/tiddit/sv/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tiddit/sv/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index e262221a..83a46f82 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TIDDIT_SV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0" - } else { - container "quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' : + 'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }" input: tuple val(meta), path(bam) @@ -30,19 +19,20 @@ process TIDDIT_SV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" """ tiddit \\ --sv \\ - $options.args \\ + $args \\ --bam $bam \\ $reference \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/trimgalore/functions.nf b/modules/trimgalore/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/trimgalore/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 8e77f1f7..86761ad8 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TRIMGALORE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::trim-galore=0.6.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/trim-galore:0.6.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/trim-galore:0.6.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/trim-galore:0.6.7--hdfd78af_0' : + 'quay.io/biocontainers/trim-galore:0.6.7--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -30,6 +19,7 @@ process TRIMGALORE { tuple val(meta), path("*.zip") , emit: zip optional true script: + def args = task.ext.args ?: '' // Calculate number of --cores for TrimGalore based on value of task.cpus // See: https://github.com/FelixKrueger/TrimGalore/blob/master/Changelog.md#version-060-release-on-1-mar-2019 // See: https://github.com/nf-core/atacseq/pull/65 @@ -48,20 +38,20 @@ process TRIMGALORE { def tpc_r2 = params.three_prime_clip_r2 > 0 ? "--three_prime_clip_r2 ${params.three_prime_clip_r2}" : '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz trim_galore \\ - $options.args \\ + $args \\ --cores $cores \\ --gzip \\ $c_r1 \\ $tpc_r1 \\ ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + "${task.process}": + trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') cutadapt: \$(cutadapt --version) END_VERSIONS """ @@ -70,7 +60,7 @@ process TRIMGALORE { [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz trim_galore \\ - $options.args \\ + $args \\ --cores $cores \\ --paired \\ --gzip \\ @@ -81,8 +71,8 @@ process TRIMGALORE { ${prefix}_1.fastq.gz \\ ${prefix}_2.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + "${task.process}": + trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') cutadapt: \$(cutadapt --version) END_VERSIONS """ diff --git a/modules/ucsc/bed12tobigbed/functions.nf b/modules/ucsc/bed12tobigbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/bed12tobigbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bed12tobigbed/main.nf b/modules/ucsc/bed12tobigbed/main.nf index 81f39a6f..937eabd6 100644 --- a/modules/ucsc/bed12tobigbed/main.nf +++ b/modules/ucsc/bed12tobigbed/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BED12TOBIGBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedtobigbed=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedtobigbed:377--h446ed27_1" - } else { - container "quay.io/biocontainers/ucsc-bedtobigbed:377--h446ed27_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedtobigbed:377--h446ed27_1' : + 'quay.io/biocontainers/ucsc-bedtobigbed:377--h446ed27_1' }" input: tuple val(meta), path(bed) @@ -29,7 +18,8 @@ process UCSC_BED12TOBIGBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedToBigBed \\ $bed \\ @@ -37,8 +27,8 @@ process UCSC_BED12TOBIGBED { ${prefix}.bigBed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bedclip/functions.nf b/modules/ucsc/bedclip/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/bedclip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bedclip/main.nf b/modules/ucsc/bedclip/main.nf index 5fbc2b3b..1d46342c 100755 --- a/modules/ucsc/bedclip/main.nf +++ b/modules/ucsc/bedclip/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BEDCLIP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedclip=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedclip:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-bedclip:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedclip:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-bedclip:377--h0b8a92a_2' }" input: tuple val(meta), path(bedgraph) @@ -29,7 +18,8 @@ process UCSC_BEDCLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedClip \\ $bedgraph \\ @@ -37,8 +27,8 @@ process UCSC_BEDCLIP { ${prefix}.bedGraph cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bedgraphtobigwig/functions.nf b/modules/ucsc/bedgraphtobigwig/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/bedgraphtobigwig/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index f55cdb07..e18b41bc 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BEDGRAPHTOBIGWIG { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedgraphtobigwig=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedgraphtobigwig:377--h446ed27_1" - } else { - container "quay.io/biocontainers/ucsc-bedgraphtobigwig:377--h446ed27_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedgraphtobigwig:377--h446ed27_1' : + 'quay.io/biocontainers/ucsc-bedgraphtobigwig:377--h446ed27_1' }" input: tuple val(meta), path(bedgraph) @@ -29,7 +18,8 @@ process UCSC_BEDGRAPHTOBIGWIG { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ bedGraphToBigWig \\ $bedgraph \\ @@ -37,8 +27,8 @@ process UCSC_BEDGRAPHTOBIGWIG { ${prefix}.bigWig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bigwigaverageoverbed/functions.nf b/modules/ucsc/bigwigaverageoverbed/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/bigwigaverageoverbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index 72491443..8c6f1178 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BIGWIGAVERAGEOVERBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bigwigaverageoverbed=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bigwigaverageoverbed:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-bigwigaverageoverbed:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bigwigaverageoverbed:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-bigwigaverageoverbed:377--h0b8a92a_2' }" input: tuple val(meta), path(bed) @@ -29,18 +18,19 @@ process UCSC_BIGWIGAVERAGEOVERBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + // BUG: bigWigAverageOverBed cannot handle ensembl seqlevels style """ - # there is a bug that bigWigAverageOverBed can not handle ensembl seqlevels style. bigWigAverageOverBed \\ - $options.args \\ + $args \\ $bigwig \\ $bed \\ ${prefix}.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/liftover/functions.nf b/modules/ucsc/liftover/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/liftover/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/liftover/main.nf b/modules/ucsc/liftover/main.nf index 3739a1e5..1c667262 100644 --- a/modules/ucsc/liftover/main.nf +++ b/modules/ucsc/liftover/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_LIFTOVER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-liftover=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-liftover:377--h0b8a92a_3" - } else { - container "quay.io/biocontainers/ucsc-liftover:377--h0b8a92a_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-liftover:377--h0b8a92a_3' : + 'quay.io/biocontainers/ucsc-liftover:377--h0b8a92a_3' }" input: tuple val(meta), path(bed) @@ -30,19 +19,20 @@ process UCSC_LIFTOVER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ liftOver \\ - $options.args \ + $args \ $bed \\ $chain \\ ${prefix}.lifted.bed \\ ${prefix}.unlifted.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/wigtobigwig/functions.nf b/modules/ucsc/wigtobigwig/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/wigtobigwig/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/wigtobigwig/main.nf b/modules/ucsc/wigtobigwig/main.nf index d03a2c4a..4c596c9a 100644 --- a/modules/ucsc/wigtobigwig/main.nf +++ b/modules/ucsc/wigtobigwig/main.nf @@ -1,45 +1,34 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' // No version information printed +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_WIGTOBIGWIG { tag '$wig' label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::ucsc-wigtobigwig=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-wigtobigwig:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-wigtobigwig:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-wigtobigwig:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-wigtobigwig:377--h0b8a92a_2' }" input: path wig - path chromsizes + path sizes output: - path "*.bw" , emit: bw - path "versions.yml" , emit: versions + path "*.bw" , emit: bw + path "versions.yml", emit: versions script: - + def args = task.ext.args ?: '' """ wigToBigWig \\ - $options.args \\ + $args \\ $wig \\ - $chromsizes \\ + $sizes \\ ${wig.getSimpleName()}.bw cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ultra/pipeline/functions.nf b/modules/ultra/pipeline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ultra/pipeline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf index b61518e6..5df34121 100644 --- a/modules/ultra/pipeline/main.nf +++ b/modules/ultra/pipeline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ULTRA_PIPELINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" - } else { - container "quay.io/biocontainers/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0' : + 'quay.io/biocontainers/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0' }" input: tuple val(meta), path(reads) @@ -28,21 +17,22 @@ process ULTRA_PIPELINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ uLTRA \\ pipeline \\ --t $task.cpus \\ --prefix $prefix \\ - $options.args \\ + $args \\ $genome \\ $gtf \\ $reads \\ ./ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( uLTRA --version|sed 's/uLTRA //g' ) + "${task.process}": + ultra: \$( uLTRA --version|sed 's/uLTRA //g' ) END_VERSIONS """ } diff --git a/modules/umitools/dedup/functions.nf b/modules/umitools/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/umitools/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 0ec9741b..287bb8c2 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UMITOOLS_DEDUP { tag "$meta.id" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" - } else { - container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0' : + 'quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,18 +15,19 @@ process UMITOOLS_DEDUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def paired = meta.single_end ? "" : "--paired" """ umi_tools dedup \\ -I $bam \\ -S ${prefix}.bam \\ $paired \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } diff --git a/modules/umitools/extract/functions.nf b/modules/umitools/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/umitools/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index d90a3ba8..3c2402e2 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UMITOOLS_EXTRACT { tag "$meta.id" label "process_low" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" - } else { - container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0' : + 'quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0' }" input: tuple val(meta), path(reads) @@ -27,19 +16,20 @@ process UMITOOLS_EXTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ umi_tools \\ extract \\ -I $reads \\ -S ${prefix}.umi_extract.fastq.gz \\ - $options.args \\ + $args \\ > ${prefix}.umi_extract.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } else { @@ -50,12 +40,12 @@ process UMITOOLS_EXTRACT { --read2-in=${reads[1]} \\ -S ${prefix}.umi_extract_1.fastq.gz \\ --read2-out=${prefix}.umi_extract_2.fastq.gz \\ - $options.args \\ + $args \\ > ${prefix}.umi_extract.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } diff --git a/modules/unicycler/functions.nf b/modules/unicycler/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/unicycler/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 3629d730..14319dc1 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNICYCLER { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::unicycler=0.4.8' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/unicycler:0.4.8--py38h8162308_3" - } else { - container "quay.io/biocontainers/unicycler:0.4.8--py38h8162308_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/unicycler:0.4.8--py38h8162308_3' : + 'quay.io/biocontainers/unicycler:0.4.8--py38h8162308_3' }" input: tuple val(meta), path(shortreads), path(longreads) @@ -28,13 +17,14 @@ process UNICYCLER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" def short_reads = shortreads ? ( meta.single_end ? "-s $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" ) : "" def long_reads = longreads ? "-l $longreads" : "" """ unicycler \\ --threads $task.cpus \\ - $options.args \\ + $args \\ $short_reads \\ $long_reads \\ --out ./ @@ -46,8 +36,8 @@ process UNICYCLER { mv unicycler.log ${prefix}.unicycler.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//') + "${task.process}": + unicycler: \$(echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/untar/functions.nf b/modules/untar/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/untar/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/untar/main.nf b/modules/untar/main.nf index efb9d825..6d1996e7 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNTAR { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: path archive @@ -26,16 +15,19 @@ process UNTAR { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' untar = archive.toString() - '.tar.gz' """ tar \\ -xzvf \\ - $options.args \\ - $archive + $args \\ + $archive \\ + $args2 \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + "${task.process}": + untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/unzip/functions.nf b/modules/unzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/unzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index f39e75e8..294ac0b0 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -1,23 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNZIP { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::p7zip=15.09" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/p7zip:15.09--h2d50403_4" - } else { - container "quay.io/biocontainers/p7zip:15.09--h2d50403_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/p7zip:15.09--h2d50403_4' : + 'quay.io/biocontainers/p7zip:15.09--h2d50403_4' }" input: path archive @@ -27,17 +15,17 @@ process UNZIP { path "versions.yml" , emit: versions script: - + def args = task.ext.args ?: '' if ( archive instanceof List && archive.name.size > 1 ) { exit 1, "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } """ 7za \\ e \\ -o"${archive.baseName}"/ \\ - $options.args \\ + $args \\ $archive cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": 7za: \$(echo \$(7za --help) | sed 's/.*p7zip Version //; s/(.*//') END_VERSIONS """ diff --git a/modules/variantbam/functions.nf b/modules/variantbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/variantbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf index e73b8bf1..3d354016 100644 --- a/modules/variantbam/main.nf +++ b/modules/variantbam/main.nf @@ -1,43 +1,33 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.4.4a' +def VERSION = '1.4.4a' // Version information not provided by tool on CLI process VARIANTBAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::variantbam=1.4.4a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/variantbam:1.4.4a--h7d7f7ad_5" - } else { - container "quay.io/biocontainers/variantbam:1.4.4a--h7d7f7ad_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/variantbam:1.4.4a--h7d7f7ad_5' : + 'quay.io/biocontainers/variantbam:1.4.4a--h7d7f7ad_5' }" input: tuple val(meta), path(bam) output: - tuple val(meta), path("*.bam") , emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ variant \\ $bam \\ -o ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + variantbam: $VERSION END_VERSIONS """ } diff --git a/modules/vcftools/functions.nf b/modules/vcftools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/vcftools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index 768d5a23..62fff0cf 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process VCFTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::vcftools=0.1.16" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/vcftools:0.1.16--he513fc3_4" - } else { - container "quay.io/biocontainers/vcftools:0.1.16--he513fc3_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/vcftools:0.1.16--he513fc3_4' : + 'quay.io/biocontainers/vcftools:0.1.16--he513fc3_4' }" input: // Owing to the nature of vcftools we here provide solutions to working with optional bed files and optional @@ -93,22 +82,23 @@ process VCFTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args_list = args.tokenize() - def bed_arg = (options.args.contains('--bed')) ? "--bed ${bed}" : - (options.args.contains('--exclude-bed')) ? "--exclude-bed ${bed}" : - (options.args.contains('--hapcount')) ? "--hapcount ${bed}" : '' - args.removeIf { it.contains('--bed') } - args.removeIf { it.contains('--exclude-bed') } - args.removeIf { it.contains('--hapcount') } + def bed_arg = (args.contains('--bed')) ? "--bed ${bed}" : + (args.contains('--exclude-bed')) ? "--exclude-bed ${bed}" : + (args.contains('--hapcount')) ? "--hapcount ${bed}" : '' + args_list.removeIf { it.contains('--bed') } + args_list.removeIf { it.contains('--exclude-bed') } + args_list.removeIf { it.contains('--hapcount') } - def diff_variant_arg = (options.args.contains('--diff')) ? "--diff ${diff_variant_file}" : - (options.args.contains('--gzdiff')) ? "--gzdiff ${diff_variant_file}" : - (options.args.contains('--diff-bcf')) ? "--diff-bcf ${diff_variant_file}" : '' - args.removeIf { it.contains('--diff') } - args.removeIf { it.contains('--gzdiff') } - args.removeIf { it.contains('--diff-bcf') } + def diff_variant_arg = (args.contains('--diff')) ? "--diff ${diff_variant_file}" : + (args.contains('--gzdiff')) ? "--gzdiff ${diff_variant_file}" : + (args.contains('--diff-bcf')) ? "--diff-bcf ${diff_variant_file}" : '' + args_list.removeIf { it.contains('--diff') } + args_list.removeIf { it.contains('--gzdiff') } + args_list.removeIf { it.contains('--diff-bcf') } def input_file = ("$variant_file".endsWith(".vcf")) ? "--vcf ${variant_file}" : ("$variant_file".endsWith(".vcf.gz")) ? "--gzvcf ${variant_file}" : @@ -118,13 +108,13 @@ process VCFTOOLS { vcftools \\ $input_file \\ --out $prefix \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ $bed_arg \\ $diff_variant_arg cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(vcftools --version 2>&1) | sed 's/^.*VCFtools (//;s/).*//') + "${task.process}": + vcftools: \$(echo \$(vcftools --version 2>&1) | sed 's/^.*VCFtools (//;s/).*//') END_VERSIONS """ } diff --git a/modules/yara/index/functions.nf b/modules/yara/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/yara/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/yara/index/main.nf b/modules/yara/index/main.nf index 51ae8a32..77122c78 100644 --- a/modules/yara/index/main.nf +++ b/modules/yara/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process YARA_INDEX { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::yara=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/yara:1.0.2--2" - } else { - container "quay.io/biocontainers/yara:1.0.2--2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/yara:1.0.2--2' : + 'quay.io/biocontainers/yara:1.0.2--2' }" input: path fasta @@ -26,6 +15,7 @@ process YARA_INDEX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ mkdir yara @@ -38,8 +28,8 @@ process YARA_INDEX { cp $fasta yara/yara.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_indexer --version 2>&1) | sed 's/^.*yara_indexer version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_indexer --version 2>&1) | sed 's/^.*yara_indexer version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/yara/mapper/functions.nf b/modules/yara/mapper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/yara/mapper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 3d69674c..6e7f433b 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process YARA_MAPPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::yara=1.0.2 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0" - } else { - container "quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' : + 'quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' }" input: tuple val(meta), path(reads) @@ -27,27 +16,28 @@ process YARA_MAPPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" if (meta.single_end) { """ yara_mapper \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f bam \\ ${index}/yara \\ $reads | samtools view -@ $task.cpus -hb -F4 > ${prefix}.mapped.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } else { """ yara_mapper \\ - $options.args \\ - -t ${task.cpus} \\ + $args \\ + -t $task.cpus \\ -f bam \\ ${index}/yara \\ ${reads[0]} \\ @@ -57,8 +47,8 @@ process YARA_MAPPER { samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config index cd22dde8..741edf5e 100644 --- a/tests/config/nextflow.config +++ b/tests/config/nextflow.config @@ -6,9 +6,9 @@ params { } process { - cpus = 2 - memory = 3.GB - time = 2.h + cpus = 2 + memory = 3.GB + time = 2.h } if ("$PROFILE" == "singularity") { @@ -28,5 +28,5 @@ conda { createTimeout = "120 min" } includeConfig 'test_data.config' manifest { - nextflowVersion = '!>=21.04.0' + nextflowVersion = '!>=21.10.3' } diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d68d64d0..aa59b7c9 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -305,7 +305,7 @@ cooler/digest: cooler/cload: - modules/cooler/cload/** - tests/modules/cooler/cload/** - + cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** @@ -349,7 +349,7 @@ dastool/dastool: dastool/scaffolds2bin: - modules/dastool/scaffolds2bin/** - tests/modules/dastool/scaffolds2bin/** - + dedup: - modules/dedup/** - tests/modules/dedup/** @@ -490,7 +490,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: &gatk4_calculatecontamination +gatk4/calculatecontamination: #&gatk4_calculatecontamination - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -498,7 +498,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: &gatk4_createsomaticpanelofnormals +gatk4/createsomaticpanelofnormals: #&gatk4_createsomaticpanelofnormals - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -510,11 +510,11 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: &gatk4_filtermutectcalls +gatk4/filtermutectcalls: #&gatk4_filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** -gatk4/genomicsdbimport: &gatk4_genomicsdbimport +gatk4/genomicsdbimport: #&gatk4_genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -522,7 +522,7 @@ gatk4/genotypegvcfs: - modules/gatk4/genotypegvcfs/** - tests/modules/gatk4/genotypegvcfs/** -gatk4/getpileupsummaries: &gatk4_getpileupsummaries +gatk4/getpileupsummaries: #&gatk4_getpileupsummaries - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -538,7 +538,7 @@ gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: &gatk4_learnreadorientationmodel +gatk4/learnreadorientationmodel: #&gatk4_learnreadorientationmodel - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -554,7 +554,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: &gatk4_mutect2 +gatk4/mutect2: #&gatk4_mutect2 - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -746,13 +746,13 @@ kraken2/kraken2: - modules/untar/** - tests/modules/kraken2/kraken2/** -kronatools/kronadb: - - modules/kronatools/kronadb/** - - tests/modules/kronatools/kronadb/** +krona/kronadb: + - modules/krona/kronadb/** + - tests/modules/krona/kronadb/** -kronatools/ktimporttaxonomy: - - modules/kronatools/ktimporttaxonomy/** - - tests/modules/kronatools/ktimporttaxonomy/** +krona/ktimporttaxonomy: + - modules/krona/ktimporttaxonomy/** + - tests/modules/krona/ktimporttaxonomy/** last/dotplot: - modules/last/dotplot/** @@ -1063,6 +1063,10 @@ plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** +plink2/vcf: + - modules/plink2/vcf/** + - tests/modules/plink2/vcf/** + pmdtools/filter: - modules/pmdtools/filter/** - tests/modules/pmdtools/filter/** @@ -1211,7 +1215,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: &samtools_index +samtools/index: #&samtools_index - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1223,7 +1227,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: &samtools_sort +samtools/sort: #&samtools_sort - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1246,7 +1250,7 @@ seacr/callpeak: seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** - + seqsero2: - modules/seqsero2/** - tests/modules/seqsero2/** @@ -1299,11 +1303,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: &sratools_fasterqdump +sratools/fasterqdump: #&sratools_fasterqdump - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: &sratools_prefetch +sratools/prefetch: #&sratools_prefetch - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1423,47 +1427,47 @@ yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** -subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools - - subworkflows/nf-core/bam_stats_samtools/** - - tests/subworkflows/nf-core/bam_stats_samtools/** +# subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools +# - subworkflows/nf-core/bam_stats_samtools/** +# - tests/subworkflows/nf-core/bam_stats_samtools/** -subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools - - subworkflows/nf-core/bam_sort_samtools/** - - tests/subworkflows/nf-core/bam_sort_samtools/** - - *samtools_sort - - *samtools_index - - *subworkflows_bam_stats_samtools +# subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools +# - subworkflows/nf-core/bam_sort_samtools/** +# - tests/subworkflows/nf-core/bam_sort_samtools/** +# - *samtools_sort +# - *samtools_index +# - *subworkflows_bam_stats_samtools -subworkflows/align_bowtie2: - - subworkflows/nf-core/align_bowtie2/** - - tests/subworkflows/nf-core/align_bowtie2/** - - *subworkflows_bam_sort_samtools +# subworkflows/align_bowtie2: +# - subworkflows/nf-core/align_bowtie2/** +# - tests/subworkflows/nf-core/align_bowtie2/** +# - *subworkflows_bam_sort_samtools -subworkflows/sra_fastq: - - subworkflows/nf-core/sra_fastq/** - - tests/subworkflows/nf-core/sra_fastq/** - - *sratools_fasterqdump - - *sratools_prefetch +# subworkflows/sra_fastq: +# - subworkflows/nf-core/sra_fastq/** +# - tests/subworkflows/nf-core/sra_fastq/** +# - *sratools_fasterqdump +# - *sratools_prefetch -subworkflows/gatk_create_som_pon: - - subworkflows/nf-core/gatk_create_som_pon/** - - tests/subworkflows/nf-core/gatk_create_som_pon/** - - *gatk4_genomicsdbimport - - *gatk4_createsomaticpanelofnormals +# subworkflows/gatk_create_som_pon: +# - subworkflows/nf-core/gatk_create_som_pon/** +# - tests/subworkflows/nf-core/gatk_create_som_pon/** +# - *gatk4_genomicsdbimport +# - *gatk4_createsomaticpanelofnormals -subworkflows/gatk_tumor_normal_somatic_variant_calling: - - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** - - *gatk4_mutect2 - - *gatk4_learnreadorientationmodel - - *gatk4_getpileupsummaries - - *gatk4_calculatecontamination - - *gatk4_filtermutectcalls - -subworkflows/gatk_tumor_only_somatic_variant_calling: - - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** - - *gatk4_mutect2 - - *gatk4_getpileupsummaries - - *gatk4_calculatecontamination - - *gatk4_filtermutectcalls +# subworkflows/gatk_tumor_normal_somatic_variant_calling: +# - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** +# - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** +# - *gatk4_mutect2 +# - *gatk4_learnreadorientationmodel +# - *gatk4_getpileupsummaries +# - *gatk4_calculatecontamination +# - *gatk4_filtermutectcalls + +# subworkflows/gatk_tumor_only_somatic_variant_calling: +# - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** +# - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** +# - *gatk4_mutect2 +# - *gatk4_getpileupsummaries +# - *gatk4_calculatecontamination +# - *gatk4_filtermutectcalls diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 0c7ce2fc..31e17618 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -221,8 +221,8 @@ params { test_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test.narrowPeak" test2_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test2.narrowPeak" - test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_1.fastq.gz" - test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_2.fastq.gz" + test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test_10x_S1_L001_R1_001.fastq.gz" + test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test_10x_S1_L001_R2_001.fastq.gz" test_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test.yak" test2_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test2.yak" @@ -252,6 +252,56 @@ params { filelist = "${test_data_dir}/genomics/homo_sapiens/pacbio/txt/filelist.txt" } } + 'bacteroides_fragilis' { + 'genome' { + genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz" + genome_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.paf" + } + 'illumina' { + test1_contigs_fa_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" + test1_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test1_1.fastq.gz" + test1_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" + test2_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" + test2_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" + test1_paired_end_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.bam" + test1_paired_end_sorted_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.sorted.bam" + test1_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.sorted.bam.bai" + test2_paired_end_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.bam" + test2_paired_end_sorted_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.sorted.bam" + test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.sorted.bam.bai" + } + 'nanopore' { + test_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" + overlap_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/nanopore/overlap.paf" + } + } + 'candidatus_portiera_aleyrodidarum' { + 'genome' { + genome_fasta = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.fasta" + genome_sizes = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.sizes" + genome_aln_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.aln.gz" + genome_aln_nwk = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.aln.nwk" + proteome_fasta = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/proteome.fasta" + test1_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test1.gff" + test2_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test2.gff" + test3_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test3.gff" + } + 'illumina' { + test_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fasta/test_1.fastq.gz" + test_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fastq/test_2.fastq.gz" + test_se_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fastq/test_se.fastq.gz" + } + 'nanopore' { + test_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/nanopore/fastq/test.fastq.gz" + } + } + 'haemophilus_influenzae' { + 'genome' { + genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.fna.gz" + genome_aln_gz = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.aln.gz" + genome_aln_nwk = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.aln.nwk" + } + } 'generic' { 'csv' { test_csv = "${test_data_dir}/generic/csv/test.csv" @@ -282,28 +332,5 @@ params { } } - 'bacteroides_fragilis'{ - 'genome' { - genome_fna_gz = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.fna.gz" - genome_paf = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.paf" - } - 'illumina' { - test1_contigs_fa_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" - test1_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_1.fastq.gz" - test1_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" - test2_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" - test2_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" - test1_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.bam" - test1_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam" - test1_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test1.sorted.bam.bai" - test2_paired_end_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.bam" - test2_paired_end_sorted_bam = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam" - test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/bam/test2.sorted.bam.bai" - } - 'nanopore' { - test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" - overlap_paf = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/overlap.paf" - } - } } } diff --git a/tests/modules/abacas/main.nf b/tests/modules/abacas/main.nf index dc58ed61..542a67af 100644 --- a/tests/modules/abacas/main.nf +++ b/tests/modules/abacas/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ABACAS } from '../../../modules/abacas/main.nf' addParams ( options: ['args' : '-m -p nucmer'] ) +include { ABACAS } from '../../../modules/abacas/main.nf' workflow test_abacas { diff --git a/tests/modules/abacas/nextflow.config b/tests/modules/abacas/nextflow.config new file mode 100644 index 00000000..17296503 --- /dev/null +++ b/tests/modules/abacas/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ABACAS { + ext.args = '-m -p nucmer' + } + +} diff --git a/tests/modules/abacas/test.yml b/tests/modules/abacas/test.yml index 899bc4db..c466a6ed 100644 --- a/tests/modules/abacas/test.yml +++ b/tests/modules/abacas/test.yml @@ -1,5 +1,5 @@ - name: abacas - command: nextflow run ./tests/modules/abacas -entry test_abacas -c tests/config/nextflow.config + command: nextflow run ./tests/modules/abacas -entry test_abacas -c ./tests/config/nextflow.config -c ./tests/modules/abacas/nextflow.config tags: - abacas files: diff --git a/tests/modules/adapterremoval/main.nf b/tests/modules/adapterremoval/main.nf index 9dd37aa9..ee7f1c44 100644 --- a/tests/modules/adapterremoval/main.nf +++ b/tests/modules/adapterremoval/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' addParams( options: [:] ) +include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' workflow test_adapterremoval_single_end { input = [ [ id:'test', single_end:true, collapse:false ], // meta map diff --git a/tests/modules/adapterremoval/nextflow.config b/tests/modules/adapterremoval/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/adapterremoval/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index 318e7866..a6c4a6cf 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -1,5 +1,5 @@ - name: adapterremoval test_adapterremoval_single_end - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_single_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: @@ -9,7 +9,7 @@ md5sum: 62139afee94defad5b83bdd0b8475a1f - name: adapterremoval test_adapterremoval_paired_end - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: @@ -21,7 +21,7 @@ md5sum: de7b38e2c881bced8671acb1ab452d78 - name: adapterremoval test_adapterremoval_paired_end_collapse - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: diff --git a/tests/modules/agrvate/main.nf b/tests/modules/agrvate/main.nf index 58058fe3..ac682bef 100644 --- a/tests/modules/agrvate/main.nf +++ b/tests/modules/agrvate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { AGRVATE } from '../../../modules/agrvate/main.nf' addParams( options: ["args": "--mummer"] ) +include { AGRVATE } from '../../../modules/agrvate/main.nf' workflow test_agrvate { diff --git a/tests/modules/agrvate/nextflow.config b/tests/modules/agrvate/nextflow.config new file mode 100644 index 00000000..7f127e5e --- /dev/null +++ b/tests/modules/agrvate/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: AGRVATE { + ext.args = '--mummer' + } + +} diff --git a/tests/modules/agrvate/test.yml b/tests/modules/agrvate/test.yml index ec413663..36e8886c 100644 --- a/tests/modules/agrvate/test.yml +++ b/tests/modules/agrvate/test.yml @@ -1,5 +1,5 @@ - name: agrvate - command: nextflow run ./tests/modules/agrvate -entry test_agrvate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/agrvate -entry test_agrvate -c ./tests/config/nextflow.config -c ./tests/modules/agrvate/nextflow.config tags: - agrvate files: diff --git a/tests/modules/allelecounter/main.nf b/tests/modules/allelecounter/main.nf index b938ab94..3fe11be3 100644 --- a/tests/modules/allelecounter/main.nf +++ b/tests/modules/allelecounter/main.nf @@ -1,7 +1,7 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -include { ALLELECOUNTER } from '../../../modules/allelecounter/main.nf' addParams( options: [:] ) +include { ALLELECOUNTER } from '../../../modules/allelecounter/main.nf' workflow test_allelecounter_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/allelecounter/nextflow.config b/tests/modules/allelecounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/allelecounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/allelecounter/test.yml b/tests/modules/allelecounter/test.yml index bbef0ecc..a0afbc12 100644 --- a/tests/modules/allelecounter/test.yml +++ b/tests/modules/allelecounter/test.yml @@ -1,5 +1,5 @@ - name: allelecounter test_allelecounter_bam - command: nextflow run tests/modules/allelecounter -entry test_allelecounter_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/allelecounter -entry test_allelecounter_bam -c ./tests/config/nextflow.config -c ./tests/modules/allelecounter/nextflow.config tags: - allelecounter files: @@ -7,7 +7,7 @@ md5sum: 2bbe9d7331b78bdac30fe30dbc5fdaf3 - name: allelecounter test_allelecounter_cram - command: nextflow run tests/modules/allelecounter -entry test_allelecounter_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/allelecounter -entry test_allelecounter_cram -c ./tests/config/nextflow.config -c ./tests/modules/allelecounter/nextflow.config tags: - allelecounter files: diff --git a/tests/modules/amps/main.nf b/tests/modules/amps/main.nf index 7d7a40d1..15572096 100644 --- a/tests/modules/amps/main.nf +++ b/tests/modules/amps/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) -include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [args: "-f def_anc"] ) -include { AMPS } from '../../../modules/amps/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../modules/malt/run/main.nf' +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' +include { AMPS } from '../../../modules/amps/main.nf' workflow test_amps { diff --git a/tests/modules/amps/nextflow.config b/tests/modules/amps/nextflow.config new file mode 100644 index 00000000..b58ac3fe --- /dev/null +++ b/tests/modules/amps/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MALTEXTRACT { + ext.args = '-f def_anc' + } + +} diff --git a/tests/modules/amps/test.yml b/tests/modules/amps/test.yml index 04691f18..f38320e4 100644 --- a/tests/modules/amps/test.yml +++ b/tests/modules/amps/test.yml @@ -1,5 +1,5 @@ - name: amps - command: nextflow run ./tests/modules/amps -entry test_amps -c tests/config/nextflow.config + command: nextflow run ./tests/modules/amps -entry test_amps -c ./tests/config/nextflow.config -c ./tests/modules/amps/nextflow.config tags: - amps files: diff --git a/tests/modules/arriba/main.nf b/tests/modules/arriba/main.nf index 833742d6..60741275 100644 --- a/tests/modules/arriba/main.nf +++ b/tests/modules/arriba/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 11'] ) -include { STAR_ALIGN } from '../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) -include { ARRIBA } from '../../../modules/arriba/main.nf' addParams( options: [:] ) +include { STAR_GENOMEGENERATE } from '../../../modules/star/genomegenerate/main.nf' +include { STAR_ALIGN } from '../../../modules/star/align/main.nf' +include { ARRIBA } from '../../../modules/arriba/main.nf' workflow test_arriba_single_end { @@ -14,9 +14,12 @@ workflow test_arriba_single_end { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) } @@ -29,8 +32,11 @@ workflow test_arriba_paired_end { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) } diff --git a/tests/modules/arriba/nextflow.config b/tests/modules/arriba/nextflow.config new file mode 100644 index 00000000..1b66d8df --- /dev/null +++ b/tests/modules/arriba/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAR_GENOMEGENERATE { + ext.args = '--genomeSAindexNbases 11' + } + + withName: STAR_ALIGN { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50' + } + +} diff --git a/tests/modules/arriba/test.yml b/tests/modules/arriba/test.yml index c1dc7c1e..52743167 100644 --- a/tests/modules/arriba/test.yml +++ b/tests/modules/arriba/test.yml @@ -1,5 +1,5 @@ - name: arriba test_arriba_single_end - command: nextflow run tests/modules/arriba -entry test_arriba_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/arriba -entry test_arriba_single_end -c ./tests/config/nextflow.config -c ./tests/modules/arriba/nextflow.config tags: - arriba files: @@ -7,46 +7,45 @@ md5sum: cad8c215b938d1e45b747a5b7898a4c2 - path: output/arriba/test.fusions.tsv md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 9f085c626553b1c52f2827421972ac10 - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 9e42067b1ec70b773257529230dd7b3a - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: 29c99195dcc79ff4df1f754ff16aac78 - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab - name: arriba test_arriba_paired_end - command: nextflow run tests/modules/arriba -entry test_arriba_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/arriba -entry test_arriba_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/arriba/nextflow.config tags: - arriba files: @@ -54,39 +53,38 @@ md5sum: 85e36c887464e4deaa65f45174d3b8fd - path: output/arriba/test.fusions.tsv md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 9f085c626553b1c52f2827421972ac10 - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 9e42067b1ec70b773257529230dd7b3a - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: d724ca90a102347b9c5052a33ea4d308 - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out diff --git a/tests/modules/artic/guppyplex/main.nf b/tests/modules/artic/guppyplex/main.nf index 972a6e66..89f67c74 100644 --- a/tests/modules/artic/guppyplex/main.nf +++ b/tests/modules/artic/guppyplex/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ARTIC_GUPPYPLEX } from '../../../../modules/artic/guppyplex/main.nf' addParams( options: [:] ) +include { ARTIC_GUPPYPLEX } from '../../../../modules/artic/guppyplex/main.nf' process STAGE_FASTQ_DIR { input: diff --git a/tests/modules/artic/guppyplex/nextflow.config b/tests/modules/artic/guppyplex/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/artic/guppyplex/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/artic/guppyplex/test.yml b/tests/modules/artic/guppyplex/test.yml index 133f0b15..6fd10898 100644 --- a/tests/modules/artic/guppyplex/test.yml +++ b/tests/modules/artic/guppyplex/test.yml @@ -1,5 +1,5 @@ - name: artic guppyplex - command: nextflow run tests/modules/artic/guppyplex -entry test_artic_guppyplex -c tests/config/nextflow.config + command: nextflow run ./tests/modules/artic/guppyplex -entry test_artic_guppyplex -c ./tests/config/nextflow.config -c ./tests/modules/artic/guppyplex/nextflow.config tags: - artic - artic/guppyplex diff --git a/tests/modules/artic/minion/main.nf b/tests/modules/artic/minion/main.nf index f4993289..ca66ede0 100644 --- a/tests/modules/artic/minion/main.nf +++ b/tests/modules/artic/minion/main.nf @@ -3,17 +3,19 @@ nextflow.enable.dsl = 2 include { UNTAR } from '../../../../modules/untar/main.nf' -include { ARTIC_MINION } from '../../../../modules/artic/minion/main.nf' addParams( fast5_dir: true, sequencing_summary: true, artic_minion_medaka_model:false ) +include { ARTIC_MINION } from '../../../../modules/artic/minion/main.nf' workflow test_artic_minion { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] - fast5_tar = [ file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) ] - sequencing_summary = [ file(params.test_data['sarscov2']['nanopore']['test_sequencing_summary'], checkIfExists: true) ] - fasta = [ file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta', checkIfExists: true) ] - bed = [ file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed', checkIfExists: true) ] - dummy_file = [ ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) + ] + fast5_tar = file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) + sequencing_summary = file(params.test_data['sarscov2']['nanopore']['test_sequencing_summary'], checkIfExists: true) + fasta = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta', checkIfExists: true) + bed = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed', checkIfExists: true) + dummy_file = [] fast5_dir = UNTAR ( fast5_tar ).untar diff --git a/tests/modules/artic/minion/nextflow.config b/tests/modules/artic/minion/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/artic/minion/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/artic/minion/test.yml b/tests/modules/artic/minion/test.yml index b3c5f0f1..8b36b224 100644 --- a/tests/modules/artic/minion/test.yml +++ b/tests/modules/artic/minion/test.yml @@ -1,5 +1,5 @@ - name: artic minion - command: nextflow run tests/modules/artic/minion -entry test_artic_minion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/artic/minion -entry test_artic_minion -c ./tests/config/nextflow.config -c ./tests/modules/artic/minion/nextflow.config tags: - artic - artic/minion diff --git a/tests/modules/assemblyscan/main.nf b/tests/modules/assemblyscan/main.nf index 6f3cbb5e..7cd5f393 100644 --- a/tests/modules/assemblyscan/main.nf +++ b/tests/modules/assemblyscan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' addParams( options: [:] ) +include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' workflow test_assemblyscan { diff --git a/tests/modules/assemblyscan/nextflow.config b/tests/modules/assemblyscan/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/assemblyscan/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/assemblyscan/test.yml b/tests/modules/assemblyscan/test.yml index 0eb4ad66..4a3ba5ec 100644 --- a/tests/modules/assemblyscan/test.yml +++ b/tests/modules/assemblyscan/test.yml @@ -1,5 +1,5 @@ - name: assemblyscan test_assemblyscan - command: nextflow run tests/modules/assemblyscan -entry test_assemblyscan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/assemblyscan -entry test_assemblyscan -c ./tests/config/nextflow.config -c ./tests/modules/assemblyscan/nextflow.config tags: - assemblyscan files: diff --git a/tests/modules/ataqv/ataqv/main.nf b/tests/modules/ataqv/ataqv/main.nf index 2f2a62eb..b1103350 100644 --- a/tests/modules/ataqv/ataqv/main.nf +++ b/tests/modules/ataqv/ataqv/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { ATAQV_ATAQV } from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: [:] ) -include { ATAQV_ATAQV as ATAQV_ATAQV_PROBLEM_READS} from '../../../../modules/ataqv/ataqv/main.nf' addParams( options: ['args': '--log-problematic-reads'] ) +include { ATAQV_ATAQV } from '../../../../modules/ataqv/ataqv/main.nf' +include { ATAQV_ATAQV as ATAQV_ATAQV_PROBLEM_READS} from '../../../../modules/ataqv/ataqv/main.nf' workflow test_ataqv_ataqv { diff --git a/tests/modules/ataqv/ataqv/nextflow.config b/tests/modules/ataqv/ataqv/nextflow.config new file mode 100644 index 00000000..31700510 --- /dev/null +++ b/tests/modules/ataqv/ataqv/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ATAQV_ATAQV_PROBLEM_READS { + ext.args = '--log-problematic-reads' + } + +} diff --git a/tests/modules/ataqv/ataqv/test.yml b/tests/modules/ataqv/ataqv/test.yml index 77452f6f..f9f2a888 100644 --- a/tests/modules/ataqv/ataqv/test.yml +++ b/tests/modules/ataqv/ataqv/test.yml @@ -1,5 +1,5 @@ - name: ataqv ataqv test_ataqv_ataqv - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -9,7 +9,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_problem_reads - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_problem_reads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_problem_reads -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -21,7 +21,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_peak - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_peak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_peak -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -31,7 +31,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_tss - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_tss -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_tss -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv @@ -41,7 +41,7 @@ - '"forward_mate_reads": 101' - name: ataqv ataqv test_ataqv_ataqv_excluded_regs - command: nextflow run tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_excluded_regs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_excluded_regs -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config tags: - ataqv - ataqv/ataqv diff --git a/tests/modules/bakta/main.nf b/tests/modules/bakta/main.nf index 531099f1..1bc00622 100644 --- a/tests/modules/bakta/main.nf +++ b/tests/modules/bakta/main.nf @@ -2,12 +2,14 @@ nextflow.enable.dsl = 2 -include { BAKTA } from '../../../modules/bakta/main.nf' addParams( options: [:] ) +include { BAKTA } from '../../../modules/bakta/main.nf' workflow test_bakta { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] BAKTA ( input, [], [], [] ) } diff --git a/tests/modules/bakta/nextflow.config b/tests/modules/bakta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bakta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamaligncleaner/main.nf b/tests/modules/bamaligncleaner/main.nf index 94ee005f..c9d517ae 100644 --- a/tests/modules/bamaligncleaner/main.nf +++ b/tests/modules/bamaligncleaner/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMALIGNCLEANER } from '../../../modules/bamaligncleaner/main.nf' addParams( options: [:] ) +include { BAMALIGNCLEANER } from '../../../modules/bamaligncleaner/main.nf' workflow test_bamaligncleaner { diff --git a/tests/modules/bamaligncleaner/nextflow.config b/tests/modules/bamaligncleaner/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bamaligncleaner/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamaligncleaner/test.yml b/tests/modules/bamaligncleaner/test.yml index 568925b0..4207b8c2 100644 --- a/tests/modules/bamaligncleaner/test.yml +++ b/tests/modules/bamaligncleaner/test.yml @@ -1,5 +1,5 @@ - name: bamaligncleaner - command: nextflow run ./tests/modules/bamaligncleaner -entry test_bamaligncleaner -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamaligncleaner -entry test_bamaligncleaner -c ./tests/config/nextflow.config -c ./tests/modules/bamaligncleaner/nextflow.config tags: - bamaligncleaner files: diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index 5538c86f..eb0bed01 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' addParams( options: [args:"-reference"] ) +include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' workflow test_bamtools_split { diff --git a/tests/modules/bamtools/split/nextflow.config b/tests/modules/bamtools/split/nextflow.config new file mode 100644 index 00000000..e7de5477 --- /dev/null +++ b/tests/modules/bamtools/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BAMTOOLS_SPLIT { + ext.args = '-reference' + } + +} diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index f28a9bcf..4f52e9ce 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -1,5 +1,5 @@ - name: bamtools split test_bamtools_split - command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamtools/split -entry test_bamtools_split -c ./tests/config/nextflow.config -c ./tests/modules/bamtools/split/nextflow.config tags: - bamtools/split - bamtools diff --git a/tests/modules/bamutil/trimbam/main.nf b/tests/modules/bamutil/trimbam/main.nf index 3699756c..2967b038 100644 --- a/tests/modules/bamutil/trimbam/main.nf +++ b/tests/modules/bamutil/trimbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMUTIL_TRIMBAM } from '../../../../modules/bamutil/trimbam/main.nf' addParams( options: [:] ) +include { BAMUTIL_TRIMBAM } from '../../../../modules/bamutil/trimbam/main.nf' workflow test_bamutil_trimbam { diff --git a/tests/modules/bamutil/trimbam/nextflow.config b/tests/modules/bamutil/trimbam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bamutil/trimbam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamutil/trimbam/test.yml b/tests/modules/bamutil/trimbam/test.yml index 95ddc3b3..443a4ded 100644 --- a/tests/modules/bamutil/trimbam/test.yml +++ b/tests/modules/bamutil/trimbam/test.yml @@ -1,5 +1,5 @@ - name: bamutil trimbam test_bamutil_trimbam - command: nextflow run tests/modules/bamutil/trimbam -entry test_bamutil_trimbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamutil/trimbam -entry test_bamutil_trimbam -c ./tests/config/nextflow.config -c ./tests/modules/bamutil/trimbam/nextflow.config tags: - bamutil/trimbam - bamutil diff --git a/tests/modules/bandage/image/main.nf b/tests/modules/bandage/image/main.nf index 524066b0..15f01ab1 100644 --- a/tests/modules/bandage/image/main.nf +++ b/tests/modules/bandage/image/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BANDAGE_IMAGE } from '../../../../modules/bandage/image/main.nf' addParams( options: [:] ) +include { BANDAGE_IMAGE } from '../../../../modules/bandage/image/main.nf' workflow test_bandage_image { input = [ diff --git a/tests/modules/bandage/image/nextflow.config b/tests/modules/bandage/image/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bandage/image/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bandage/image/test.yml b/tests/modules/bandage/image/test.yml index 437eca05..2abdd175 100644 --- a/tests/modules/bandage/image/test.yml +++ b/tests/modules/bandage/image/test.yml @@ -1,5 +1,5 @@ - name: bandage image - command: nextflow run ./tests/modules/bandage/image -entry test_bandage_image -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bandage/image -entry test_bandage_image -c ./tests/config/nextflow.config -c ./tests/modules/bandage/image/nextflow.config tags: - bandage - bandage/image diff --git a/tests/modules/bbmap/align/main.nf b/tests/modules/bbmap/align/main.nf index c3bf43ba..c7a02e2a 100644 --- a/tests/modules/bbmap/align/main.nf +++ b/tests/modules/bbmap/align/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) -include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' addParams( options: [:] ) -include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' addParams( options: [args: "unpigz=t" ] ) +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' +include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' +include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' workflow test_bbmap_align_paired_end_fasta_ref { diff --git a/tests/modules/bbmap/align/nextflow.config b/tests/modules/bbmap/align/nextflow.config new file mode 100644 index 00000000..fe0afd72 --- /dev/null +++ b/tests/modules/bbmap/align/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BBMAP_ALIGN_PIGZ { + ext.args = 'unpigz=t' + } + +} diff --git a/tests/modules/bbmap/align/test.yml b/tests/modules/bbmap/align/test.yml index a30713c9..d9f9a862 100644 --- a/tests/modules/bbmap/align/test.yml +++ b/tests/modules/bbmap/align/test.yml @@ -1,5 +1,5 @@ - name: bbmap align paired end fasta ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_fasta_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_fasta_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align @@ -9,7 +9,7 @@ - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align @@ -19,7 +19,7 @@ - path: output/bbmap/test.bbmap.log - name: bbmap align single end index ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align @@ -29,7 +29,7 @@ - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref pigz - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align diff --git a/tests/modules/bbmap/bbduk/main.nf b/tests/modules/bbmap/bbduk/main.nf index 911ca391..e1f0c2de 100644 --- a/tests/modules/bbmap/bbduk/main.nf +++ b/tests/modules/bbmap/bbduk/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BBMAP_BBDUK } from '../../../../modules/bbmap/bbduk/main.nf' addParams( options: [ 'args' : 'trimq=10 qtrim=r', 'suffix' : '.trim' ] ) +include { BBMAP_BBDUK } from '../../../../modules/bbmap/bbduk/main.nf' workflow test_bbmap_bbduk_single_end { diff --git a/tests/modules/bbmap/bbduk/nextflow.config b/tests/modules/bbmap/bbduk/nextflow.config new file mode 100644 index 00000000..8940a9be --- /dev/null +++ b/tests/modules/bbmap/bbduk/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BBMAP_BBDUK { + ext.args = 'trimq=10 qtrim=r' + ext.suffix = '.trim' + } + +} diff --git a/tests/modules/bbmap/bbduk/test.yml b/tests/modules/bbmap/bbduk/test.yml index 4d2b8604..7ab5b963 100644 --- a/tests/modules/bbmap/bbduk/test.yml +++ b/tests/modules/bbmap/bbduk/test.yml @@ -1,5 +1,5 @@ - name: bbmap bbduk test_bbmap_bbduk_single_end - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -10,7 +10,7 @@ md5sum: a87d0cbd5ced7df8bf1751e4cb407482 - name: bbmap bbduk test_bbmap_bbduk_paired_end - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -23,7 +23,7 @@ md5sum: 406e068fbe198f02b48e7e210cc0c69f - name: bbmap bbduk test_bbmap_bbduk_se_ref - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_se_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_se_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -34,7 +34,7 @@ md5sum: 3970e82605c7d109bb348fc94e9eecc0 - name: bbmap bbduk test_bbmap_bbduk_pe_ref - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_pe_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_pe_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: diff --git a/tests/modules/bbmap/bbsplit/main.nf b/tests/modules/bbmap/bbsplit/main.nf index 1d3c30c1..d1236061 100644 --- a/tests/modules/bbmap/bbsplit/main.nf +++ b/tests/modules/bbmap/bbsplit/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_INDEX } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) -include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_SPLIT } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_INDEX } from '../../../../modules/bbmap/bbsplit/main.nf' +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_SPLIT } from '../../../../modules/bbmap/bbsplit/main.nf' workflow test_bbmap_bbsplit { diff --git a/tests/modules/bbmap/bbsplit/nextflow.config b/tests/modules/bbmap/bbsplit/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bbmap/bbsplit/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bbmap/bbsplit/test.yml b/tests/modules/bbmap/bbsplit/test.yml index 87bdebea..add9b519 100644 --- a/tests/modules/bbmap/bbsplit/test.yml +++ b/tests/modules/bbmap/bbsplit/test.yml @@ -1,5 +1,5 @@ - name: bbmap bbsplit test_bbmap_bbsplit - command: nextflow run tests/modules/bbmap/bbsplit -entry test_bbmap_bbsplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbsplit -entry test_bbmap_bbsplit -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbsplit/nextflow.config tags: - bbmap/bbsplit - bbmap diff --git a/tests/modules/bbmap/index/main.nf b/tests/modules/bbmap/index/main.nf index 0d912615..a6f111f4 100644 --- a/tests/modules/bbmap/index/main.nf +++ b/tests/modules/bbmap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' workflow test_bbmap_index { diff --git a/tests/modules/bbmap/index/nextflow.config b/tests/modules/bbmap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bbmap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bbmap/index/test.yml b/tests/modules/bbmap/index/test.yml index 32684ad4..4e8d7196 100644 --- a/tests/modules/bbmap/index/test.yml +++ b/tests/modules/bbmap/index/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml bbmap/index - name: bbmap index - command: nextflow run ./tests/modules/bbmap/index -entry test_bbmap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/index -entry test_bbmap_index -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/index/nextflow.config tags: - bbmap - bbmap/index diff --git a/tests/modules/bcftools/concat/main.nf b/tests/modules/bcftools/concat/main.nf index 8869a3d7..8441d488 100644 --- a/tests/modules/bcftools/concat/main.nf +++ b/tests/modules/bcftools/concat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' workflow test_bcftools_concat { diff --git a/tests/modules/bcftools/concat/nextflow.config b/tests/modules/bcftools/concat/nextflow.config new file mode 100644 index 00000000..3f0d064a --- /dev/null +++ b/tests/modules/bcftools/concat/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_CONCAT { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/concat/test.yml b/tests/modules/bcftools/concat/test.yml index 413fe798..fee6158f 100644 --- a/tests/modules/bcftools/concat/test.yml +++ b/tests/modules/bcftools/concat/test.yml @@ -1,5 +1,5 @@ - name: bcftools concat test_bcftools_concat - command: nextflow run tests/modules/bcftools/concat -entry test_bcftools_concat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/concat -entry test_bcftools_concat -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/concat/nextflow.config tags: - bcftools/concat - bcftools diff --git a/tests/modules/bcftools/consensus/main.nf b/tests/modules/bcftools/consensus/main.nf index 13f7b39e..ab00fbce 100644 --- a/tests/modules/bcftools/consensus/main.nf +++ b/tests/modules/bcftools/consensus/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_CONSENSUS } from '../../../../modules/bcftools/consensus/main.nf' addParams( options: [:] ) +include { BCFTOOLS_CONSENSUS } from '../../../../modules/bcftools/consensus/main.nf' workflow test_bcftools_consensus { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/consensus/nextflow.config b/tests/modules/bcftools/consensus/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bcftools/consensus/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bcftools/consensus/test.yml b/tests/modules/bcftools/consensus/test.yml index b3760fcd..7fa4ecae 100644 --- a/tests/modules/bcftools/consensus/test.yml +++ b/tests/modules/bcftools/consensus/test.yml @@ -1,5 +1,5 @@ - name: bcftools consensus test_bcftools_consensus - command: nextflow run tests/modules/bcftools/consensus -entry test_bcftools_consensus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/consensus -entry test_bcftools_consensus -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/consensus/nextflow.config tags: - bcftools/consensus - bcftools diff --git a/tests/modules/bcftools/filter/main.nf b/tests/modules/bcftools/filter/main.nf index bd419e3a..85fbf950 100644 --- a/tests/modules/bcftools/filter/main.nf +++ b/tests/modules/bcftools/filter/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 //keep --no-verson argument, otherwise md5 will change on each execution -include { BCFTOOLS_FILTER } from '../../../../modules/bcftools/filter/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_FILTER } from '../../../../modules/bcftools/filter/main.nf' workflow test_bcftools_filter { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/filter/nextflow.config b/tests/modules/bcftools/filter/nextflow.config new file mode 100644 index 00000000..68cac7bb --- /dev/null +++ b/tests/modules/bcftools/filter/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_FILTER { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/filter/test.yml b/tests/modules/bcftools/filter/test.yml index 0f8e48eb..da842538 100644 --- a/tests/modules/bcftools/filter/test.yml +++ b/tests/modules/bcftools/filter/test.yml @@ -1,5 +1,5 @@ - name: bcftools filter test_bcftools_filter - command: nextflow run tests/modules/bcftools/filter -entry test_bcftools_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/filter -entry test_bcftools_filter -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/filter/nextflow.config tags: - bcftools/filter - bcftools diff --git a/tests/modules/bcftools/index/main.nf b/tests/modules/bcftools/index/main.nf index 73909d66..839cd988 100644 --- a/tests/modules/bcftools/index/main.nf +++ b/tests/modules/bcftools/index/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_CSI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [:] ) -include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_TBI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [args: '-t'] ) +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_CSI } from '../../../../modules/bcftools/index/main.nf' +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_TBI } from '../../../../modules/bcftools/index/main.nf' workflow test_bcftools_index_csi { diff --git a/tests/modules/bcftools/index/nextflow.config b/tests/modules/bcftools/index/nextflow.config new file mode 100644 index 00000000..9a060ba2 --- /dev/null +++ b/tests/modules/bcftools/index/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_INDEX_TBI { + ext.args = '-t' + } + +} diff --git a/tests/modules/bcftools/index/test.yml b/tests/modules/bcftools/index/test.yml index 36c5f3c0..f1a29437 100644 --- a/tests/modules/bcftools/index/test.yml +++ b/tests/modules/bcftools/index/test.yml @@ -1,5 +1,5 @@ - name: bcftools index - command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_csi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/index/nextflow.config tags: - bcftools - bcftools/index @@ -8,7 +8,7 @@ md5sum: 5f930522d2b9dcdba2807b7da4dfa3fd - name: bcftools index tbi - command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_tbi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_tbi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/index/nextflow.config tags: - bcftools - bcftools/index diff --git a/tests/modules/bcftools/isec/main.nf b/tests/modules/bcftools/isec/main.nf index 1b0c2c07..0b8ffc5c 100644 --- a/tests/modules/bcftools/isec/main.nf +++ b/tests/modules/bcftools/isec/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_ISEC } from '../../../../modules/bcftools/isec/main.nf' addParams( options: ['args': '--nfiles +2 --output-type z --no-version'] ) +include { BCFTOOLS_ISEC } from '../../../../modules/bcftools/isec/main.nf' workflow test_bcftools_isec { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/isec/nextflow.config b/tests/modules/bcftools/isec/nextflow.config new file mode 100644 index 00000000..770e4674 --- /dev/null +++ b/tests/modules/bcftools/isec/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_ISEC { + ext.args = '--nfiles +2 --output-type z --no-version' + } + +} diff --git a/tests/modules/bcftools/isec/test.yml b/tests/modules/bcftools/isec/test.yml index 92186c89..fc887d9d 100644 --- a/tests/modules/bcftools/isec/test.yml +++ b/tests/modules/bcftools/isec/test.yml @@ -1,5 +1,5 @@ - name: bcftools isec test_bcftools_isec - command: nextflow run tests/modules/bcftools/isec -entry test_bcftools_isec -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/isec -entry test_bcftools_isec -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/isec/nextflow.config tags: - bcftools - bcftools/isec diff --git a/tests/modules/bcftools/merge/main.nf b/tests/modules/bcftools/merge/main.nf index a672a9a7..119e237a 100644 --- a/tests/modules/bcftools/merge/main.nf +++ b/tests/modules/bcftools/merge/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 //keep --no-verson argument, otherwise md5 will change on each execution -include { BCFTOOLS_MERGE } from '../../../../modules/bcftools/merge/main.nf' addParams( options: ['args': '--force-samples --no-version'] ) +include { BCFTOOLS_MERGE } from '../../../../modules/bcftools/merge/main.nf' workflow test_bcftools_merge { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/merge/nextflow.config b/tests/modules/bcftools/merge/nextflow.config new file mode 100644 index 00000000..e11e50b6 --- /dev/null +++ b/tests/modules/bcftools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_MERGE { + ext.args = '--force-samples --no-version' + } + +} diff --git a/tests/modules/bcftools/merge/test.yml b/tests/modules/bcftools/merge/test.yml index d3cdd74a..6c9dd556 100644 --- a/tests/modules/bcftools/merge/test.yml +++ b/tests/modules/bcftools/merge/test.yml @@ -1,5 +1,5 @@ - name: bcftools merge test_bcftools_merge - command: nextflow run tests/modules/bcftools/merge -entry test_bcftools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/merge -entry test_bcftools_merge -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/merge/nextflow.config tags: - bcftools/merge - bcftools diff --git a/tests/modules/bcftools/mpileup/main.nf b/tests/modules/bcftools/mpileup/main.nf index 2225c5e0..813ca408 100644 --- a/tests/modules/bcftools/mpileup/main.nf +++ b/tests/modules/bcftools/mpileup/main.nf @@ -2,8 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_MPILEUP } from '../../../../modules/bcftools/mpileup/main.nf' addParams( options: ['args2': '--no-version --ploidy 1 --multiallelic-caller', - 'args3': '--no-version' ] ) +include { BCFTOOLS_MPILEUP } from '../../../../modules/bcftools/mpileup/main.nf' workflow test_bcftools_mpileup { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/mpileup/nextflow.config b/tests/modules/bcftools/mpileup/nextflow.config new file mode 100644 index 00000000..c21fef8d --- /dev/null +++ b/tests/modules/bcftools/mpileup/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_MPILEUP { + ext.args2 = '--no-version --ploidy 1 --multiallelic-caller' + ext.args3 = '--no-version' + } + +} diff --git a/tests/modules/bcftools/mpileup/test.yml b/tests/modules/bcftools/mpileup/test.yml index 71877e29..f081c543 100644 --- a/tests/modules/bcftools/mpileup/test.yml +++ b/tests/modules/bcftools/mpileup/test.yml @@ -1,5 +1,5 @@ - name: bcftools mpileup test_bcftools_mpileup - command: nextflow run tests/modules/bcftools/mpileup -entry test_bcftools_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/mpileup -entry test_bcftools_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/mpileup/nextflow.config tags: - bcftools/mpileup - bcftools diff --git a/tests/modules/bcftools/norm/main.nf b/tests/modules/bcftools/norm/main.nf index 046c0b3c..ac056bea 100644 --- a/tests/modules/bcftools/norm/main.nf +++ b/tests/modules/bcftools/norm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_NORM } from '../../../../modules/bcftools/norm/main.nf' addParams( options: ['args': '-m -any --no-version'] ) +include { BCFTOOLS_NORM } from '../../../../modules/bcftools/norm/main.nf' workflow test_bcftools_norm { diff --git a/tests/modules/bcftools/norm/nextflow.config b/tests/modules/bcftools/norm/nextflow.config new file mode 100644 index 00000000..e4d27a73 --- /dev/null +++ b/tests/modules/bcftools/norm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_NORM { + ext.args = '-m -any --no-version' + } + +} diff --git a/tests/modules/bcftools/norm/test.yml b/tests/modules/bcftools/norm/test.yml index 40d0cc7e..bb4f9aca 100644 --- a/tests/modules/bcftools/norm/test.yml +++ b/tests/modules/bcftools/norm/test.yml @@ -1,5 +1,5 @@ - name: bcftools norm - command: nextflow run ./tests/modules/bcftools/norm -entry test_bcftools_norm -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/norm -entry test_bcftools_norm -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/norm/nextflow.config tags: - bcftools - bcftools/norm diff --git a/tests/modules/bcftools/query/main.nf b/tests/modules/bcftools/query/main.nf index a16ceddf..733cae17 100644 --- a/tests/modules/bcftools/query/main.nf +++ b/tests/modules/bcftools/query/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_QUERY } from '../../../../modules/bcftools/query/main.nf' addParams( options: ['args': "-f '%CHROM %POS %REF %ALT[%SAMPLE=%GT]'" ] ) +include { BCFTOOLS_QUERY } from '../../../../modules/bcftools/query/main.nf' workflow test_bcftools_query { diff --git a/tests/modules/bcftools/query/nextflow.config b/tests/modules/bcftools/query/nextflow.config new file mode 100644 index 00000000..e4105006 --- /dev/null +++ b/tests/modules/bcftools/query/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_QUERY { + ext.args = "-f '%CHROM %POS %REF %ALT[%SAMPLE=%GT]'" + } + +} diff --git a/tests/modules/bcftools/query/test.yml b/tests/modules/bcftools/query/test.yml index fbfda92b..aaa9af7b 100644 --- a/tests/modules/bcftools/query/test.yml +++ b/tests/modules/bcftools/query/test.yml @@ -1,5 +1,5 @@ - name: bcftools query - command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/query/nextflow.config tags: - bcftools - bcftools/query @@ -8,7 +8,7 @@ md5sum: c32a6d28f185822d8fe1eeb7e42ec155 - name: bcftools query with optional files - command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query_with_optional_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query_with_optional_files -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/query/nextflow.config tags: - bcftools - bcftools/query diff --git a/tests/modules/bcftools/reheader/main.nf b/tests/modules/bcftools/reheader/main.nf index 40863331..d1dcd8b8 100644 --- a/tests/modules/bcftools/reheader/main.nf +++ b/tests/modules/bcftools/reheader/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_REHEADER } from '../../../../modules/bcftools/reheader/main.nf' addParams( options: [suffix: '.updated'] ) +include { BCFTOOLS_REHEADER } from '../../../../modules/bcftools/reheader/main.nf' workflow test_bcftools_reheader_update_sequences { diff --git a/tests/modules/bcftools/reheader/nextflow.config b/tests/modules/bcftools/reheader/nextflow.config new file mode 100644 index 00000000..a377b26d --- /dev/null +++ b/tests/modules/bcftools/reheader/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_REHEADER { + ext.suffix = '.updated' + } + +} diff --git a/tests/modules/bcftools/reheader/test.yml b/tests/modules/bcftools/reheader/test.yml index 78337206..1ce0b104 100644 --- a/tests/modules/bcftools/reheader/test.yml +++ b/tests/modules/bcftools/reheader/test.yml @@ -1,5 +1,5 @@ - name: bcftools reheader test_bcftools_reheader_update_sequences - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_update_sequences -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_update_sequences -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools @@ -8,7 +8,7 @@ md5sum: 9e29f28038bfce77ee00022627209ed6 - name: bcftools reheader test_bcftools_reheader_new_header - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools @@ -17,7 +17,7 @@ md5sum: f7f536d889bbf5be40243252c394ee1f - name: bcftools reheader test_bcftools_reheader_new_header_update_sequences - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header_update_sequences -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header_update_sequences -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools diff --git a/tests/modules/bcftools/stats/main.nf b/tests/modules/bcftools/stats/main.nf index 4039c080..808a3330 100644 --- a/tests/modules/bcftools/stats/main.nf +++ b/tests/modules/bcftools/stats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_STATS } from '../../../../modules/bcftools/stats/main.nf' addParams( options: [:] ) +include { BCFTOOLS_STATS } from '../../../../modules/bcftools/stats/main.nf' workflow test_bcftools_stats { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/stats/nextflow.config b/tests/modules/bcftools/stats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bcftools/stats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bcftools/stats/test.yml b/tests/modules/bcftools/stats/test.yml index cd25fe66..d3587f95 100644 --- a/tests/modules/bcftools/stats/test.yml +++ b/tests/modules/bcftools/stats/test.yml @@ -1,5 +1,5 @@ - name: bcftools stats test_bcftools_stats - command: nextflow run tests/modules/bcftools/stats -entry test_bcftools_stats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/stats -entry test_bcftools_stats -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/stats/nextflow.config tags: - bcftools - bcftools/stats diff --git a/tests/modules/bcftools/view/main.nf b/tests/modules/bcftools/view/main.nf index a8ac3b31..f45d0284 100644 --- a/tests/modules/bcftools/view/main.nf +++ b/tests/modules/bcftools/view/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_VIEW } from '../../../../modules/bcftools/view/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_VIEW } from '../../../../modules/bcftools/view/main.nf' workflow test_bcftools_view { diff --git a/tests/modules/bcftools/view/nextflow.config b/tests/modules/bcftools/view/nextflow.config new file mode 100644 index 00000000..e1723b89 --- /dev/null +++ b/tests/modules/bcftools/view/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_VIEW { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/view/test.yml b/tests/modules/bcftools/view/test.yml index 179e9a1c..fa926dd6 100644 --- a/tests/modules/bcftools/view/test.yml +++ b/tests/modules/bcftools/view/test.yml @@ -1,5 +1,5 @@ - name: bcftools view - command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/view/nextflow.config tags: - bcftools - bcftools/view @@ -8,7 +8,7 @@ md5sum: fc178eb342a91dc0d1d568601ad8f8e2 - name: bcftools view with optional files - command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view_with_optional_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view_with_optional_files -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/view/nextflow.config tags: - bcftools - bcftools/view diff --git a/tests/modules/bedtools/bamtobed/main.nf b/tests/modules/bedtools/bamtobed/main.nf index 41cf460a..e7635a3d 100644 --- a/tests/modules/bedtools/bamtobed/main.nf +++ b/tests/modules/bedtools/bamtobed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_BAMTOBED } from '../../../../modules/bedtools/bamtobed/main.nf' addParams( options: [:] ) +include { BEDTOOLS_BAMTOBED } from '../../../../modules/bedtools/bamtobed/main.nf' workflow test_bedtools_bamtobed { input = [ [ id:'test'], //meta map diff --git a/tests/modules/bedtools/bamtobed/nextflow.config b/tests/modules/bedtools/bamtobed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/bamtobed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/bamtobed/test.yml b/tests/modules/bedtools/bamtobed/test.yml index 106d125d..b038467c 100644 --- a/tests/modules/bedtools/bamtobed/test.yml +++ b/tests/modules/bedtools/bamtobed/test.yml @@ -1,5 +1,5 @@ - name: bedtools bamtobed - command: nextflow run ./tests/modules/bedtools/bamtobed -entry test_bedtools_bamtobed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/bamtobed -entry test_bedtools_bamtobed -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/bamtobed/nextflow.config tags: - bedtools - bedtools/bamtobed diff --git a/tests/modules/bedtools/complement/main.nf b/tests/modules/bedtools/complement/main.nf index 6456fe60..a1cca033 100644 --- a/tests/modules/bedtools/complement/main.nf +++ b/tests/modules/bedtools/complement/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_COMPLEMENT } from '../../../../modules/bedtools/complement/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_COMPLEMENT } from '../../../../modules/bedtools/complement/main.nf' workflow test_bedtools_complement { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/complement/nextflow.config b/tests/modules/bedtools/complement/nextflow.config new file mode 100644 index 00000000..561fdead --- /dev/null +++ b/tests/modules/bedtools/complement/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_COMPLEMENT { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/complement/test.yml b/tests/modules/bedtools/complement/test.yml index 2ebc6419..9dbeb36f 100644 --- a/tests/modules/bedtools/complement/test.yml +++ b/tests/modules/bedtools/complement/test.yml @@ -1,5 +1,5 @@ - name: bedtools complement - command: nextflow run ./tests/modules/bedtools/complement -entry test_bedtools_complement -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/complement -entry test_bedtools_complement -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/complement/nextflow.config tags: - bedtools - bedtools/complement diff --git a/tests/modules/bedtools/genomecov/main.nf b/tests/modules/bedtools/genomecov/main.nf index 445ed078..b507a2cd 100644 --- a/tests/modules/bedtools/genomecov/main.nf +++ b/tests/modules/bedtools/genomecov/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' workflow test_bedtools_genomecov_noscale { input = [ diff --git a/tests/modules/bedtools/genomecov/nextflow.config b/tests/modules/bedtools/genomecov/nextflow.config new file mode 100644 index 00000000..bc0e4aaf --- /dev/null +++ b/tests/modules/bedtools/genomecov/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_GENOMECOV { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/genomecov/test.yml b/tests/modules/bedtools/genomecov/test.yml index 477e6555..8f63bde9 100644 --- a/tests/modules/bedtools/genomecov/test.yml +++ b/tests/modules/bedtools/genomecov/test.yml @@ -1,5 +1,5 @@ - name: bedtools genomecov test_bedtools_genomecov_noscale - command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -8,7 +8,7 @@ md5sum: 66083198daca6c001d328ba9616e9b53 - name: bedtools genomecov test_bedtools_genomecov_nonbam_noscale - command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -17,7 +17,7 @@ md5sum: f47b58840087426e5b643d8dfd155c1f - name: bedtools genomecov test_bedtools_genomecov_scale - command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -26,7 +26,7 @@ md5sum: 01291b6e1beab72e046653e709eb0e10 - name: bedtools genomecov test_bedtools_genomecov_nonbam_scale - command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov diff --git a/tests/modules/bedtools/getfasta/main.nf b/tests/modules/bedtools/getfasta/main.nf index 194597ae..425c49d5 100644 --- a/tests/modules/bedtools/getfasta/main.nf +++ b/tests/modules/bedtools/getfasta/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_GETFASTA } from '../../../../modules/bedtools/getfasta/main.nf' addParams( options: [:] ) +include { BEDTOOLS_GETFASTA } from '../../../../modules/bedtools/getfasta/main.nf' workflow test_bedtools_getfasta { bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) diff --git a/tests/modules/bedtools/getfasta/nextflow.config b/tests/modules/bedtools/getfasta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/getfasta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/getfasta/test.yml b/tests/modules/bedtools/getfasta/test.yml index adf10da5..a455f861 100644 --- a/tests/modules/bedtools/getfasta/test.yml +++ b/tests/modules/bedtools/getfasta/test.yml @@ -1,5 +1,5 @@ - name: bedtools getfasta - command: nextflow run ./tests/modules/bedtools/getfasta -entry test_bedtools_getfasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/getfasta -entry test_bedtools_getfasta -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/getfasta/nextflow.config tags: - bedtools - bedtools/getfasta diff --git a/tests/modules/bedtools/intersect/main.nf b/tests/modules/bedtools/intersect/main.nf index 73a9b30c..c17d03e6 100644 --- a/tests/modules/bedtools/intersect/main.nf +++ b/tests/modules/bedtools/intersect/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_INTERSECT } from '../../../../modules/bedtools/intersect/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_INTERSECT } from '../../../../modules/bedtools/intersect/main.nf' workflow test_bedtools_intersect { input = [ diff --git a/tests/modules/bedtools/intersect/nextflow.config b/tests/modules/bedtools/intersect/nextflow.config new file mode 100644 index 00000000..c7d0c826 --- /dev/null +++ b/tests/modules/bedtools/intersect/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_INTERSECT { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/intersect/test.yml b/tests/modules/bedtools/intersect/test.yml index c8c3ad6e..86fe70cd 100644 --- a/tests/modules/bedtools/intersect/test.yml +++ b/tests/modules/bedtools/intersect/test.yml @@ -1,5 +1,5 @@ - name: bedtools intersect test_bedtools_intersect - command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/intersect/nextflow.config tags: - bedtools - bedtools/intersect @@ -8,7 +8,7 @@ md5sum: afcbf01c2f2013aad71dbe8e34f2c15c - name: bedtools intersect test_bedtools_intersect_bam - command: nextflow run tests/modules/bedtools/intersect -entry test_bedtools_intersect_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect_bam -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/intersect/nextflow.config tags: - bedtools - bedtools/intersect diff --git a/tests/modules/bedtools/makewindows/main.nf b/tests/modules/bedtools/makewindows/main.nf index 23c40a75..ce37de72 100644 --- a/tests/modules/bedtools/makewindows/main.nf +++ b/tests/modules/bedtools/makewindows/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -test_options = ['args': '-w 50 '] -include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' addParams( options: test_options ) +include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' workflow test_bedtools_makewindows { - - input = [ [ id:'test'], - file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)] + + input = [ + [ id:'test'], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] BEDTOOLS_MAKEWINDOWS ( input, true ) } diff --git a/tests/modules/bedtools/makewindows/nextflow.config b/tests/modules/bedtools/makewindows/nextflow.config new file mode 100644 index 00000000..e8b8c3ea --- /dev/null +++ b/tests/modules/bedtools/makewindows/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_MAKEWINDOWS { + ext.args = '-w 50 ' + } + +} diff --git a/tests/modules/bedtools/makewindows/test.yml b/tests/modules/bedtools/makewindows/test.yml index c39d1c08..8accaa36 100644 --- a/tests/modules/bedtools/makewindows/test.yml +++ b/tests/modules/bedtools/makewindows/test.yml @@ -1,5 +1,5 @@ - name: bedtools makewindows test_bedtools_makewindows - command: nextflow run tests/modules/bedtools/makewindows -entry test_bedtools_makewindows -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/makewindows -entry test_bedtools_makewindows -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/makewindows/nextflow.config tags: - bedtools/makewindows - bedtools diff --git a/tests/modules/bedtools/maskfasta/main.nf b/tests/modules/bedtools/maskfasta/main.nf index 8c30fbdc..0da02ad3 100644 --- a/tests/modules/bedtools/maskfasta/main.nf +++ b/tests/modules/bedtools/maskfasta/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_MASKFASTA } from '../../../../modules/bedtools/maskfasta/main.nf' addParams( options: [:] ) +include { BEDTOOLS_MASKFASTA } from '../../../../modules/bedtools/maskfasta/main.nf' workflow test_bedtools_maskfasta { bed = [ [ id:'test'], diff --git a/tests/modules/bedtools/maskfasta/nextflow.config b/tests/modules/bedtools/maskfasta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/maskfasta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/maskfasta/test.yml b/tests/modules/bedtools/maskfasta/test.yml index f536a6eb..f1e8f35a 100644 --- a/tests/modules/bedtools/maskfasta/test.yml +++ b/tests/modules/bedtools/maskfasta/test.yml @@ -1,5 +1,5 @@ - name: bedtools maskfasta - command: nextflow run ./tests/modules/bedtools/maskfasta -entry test_bedtools_maskfasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/maskfasta -entry test_bedtools_maskfasta -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/maskfasta/nextflow.config tags: - bedtools - bedtools/maskfasta diff --git a/tests/modules/bedtools/merge/main.nf b/tests/modules/bedtools/merge/main.nf index f11b804a..5fca0526 100644 --- a/tests/modules/bedtools/merge/main.nf +++ b/tests/modules/bedtools/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_MERGE } from '../../../../modules/bedtools/merge/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_MERGE } from '../../../../modules/bedtools/merge/main.nf' workflow test_bedtools_merge { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/merge/nextflow.config b/tests/modules/bedtools/merge/nextflow.config new file mode 100644 index 00000000..e7d635dd --- /dev/null +++ b/tests/modules/bedtools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_MERGE { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/merge/test.yml b/tests/modules/bedtools/merge/test.yml index 62bc6860..5fc8b034 100644 --- a/tests/modules/bedtools/merge/test.yml +++ b/tests/modules/bedtools/merge/test.yml @@ -1,5 +1,5 @@ - name: bedtools merge - command: nextflow run ./tests/modules/bedtools/merge -entry test_bedtools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/merge -entry test_bedtools_merge -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/merge/nextflow.config tags: - bedtools - bedtools/merge diff --git a/tests/modules/bedtools/slop/main.nf b/tests/modules/bedtools/slop/main.nf index 47c19781..e7136fdc 100644 --- a/tests/modules/bedtools/slop/main.nf +++ b/tests/modules/bedtools/slop/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SLOP } from '../../../../modules/bedtools/slop/main.nf' addParams( options: [args: '-l 15 -r 30', suffix: '_out'] ) +include { BEDTOOLS_SLOP } from '../../../../modules/bedtools/slop/main.nf' workflow test_bedtools_slop { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/slop/nextflow.config b/tests/modules/bedtools/slop/nextflow.config new file mode 100644 index 00000000..5dc03727 --- /dev/null +++ b/tests/modules/bedtools/slop/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_SLOP { + ext.args = '-l 15 -r 30' + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/slop/test.yml b/tests/modules/bedtools/slop/test.yml index 859b569e..0d49e66b 100644 --- a/tests/modules/bedtools/slop/test.yml +++ b/tests/modules/bedtools/slop/test.yml @@ -1,5 +1,5 @@ - name: bedtools slop - command: nextflow run ./tests/modules/bedtools/slop -entry test_bedtools_slop -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/slop -entry test_bedtools_slop -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/slop/nextflow.config tags: - bedtools - bedtools/slop diff --git a/tests/modules/bedtools/sort/main.nf b/tests/modules/bedtools/sort/main.nf index b5d34e2f..342b4116 100644 --- a/tests/modules/bedtools/sort/main.nf +++ b/tests/modules/bedtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SORT } from '../../../../modules/bedtools/sort/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_SORT } from '../../../../modules/bedtools/sort/main.nf' workflow test_bedtools_sort { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/sort/nextflow.config b/tests/modules/bedtools/sort/nextflow.config new file mode 100644 index 00000000..6bb73232 --- /dev/null +++ b/tests/modules/bedtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_SORT { + ext.suffix = '_out' + } + +} diff --git a/tests/modules/bedtools/sort/test.yml b/tests/modules/bedtools/sort/test.yml index 1dd04507..173f0587 100644 --- a/tests/modules/bedtools/sort/test.yml +++ b/tests/modules/bedtools/sort/test.yml @@ -1,5 +1,5 @@ - name: bedtools sort - command: nextflow run ./tests/modules/bedtools/sort -entry test_bedtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/sort -entry test_bedtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/sort/nextflow.config tags: - bedtools - bedtools/sort diff --git a/tests/modules/bedtools/subtract/main.nf b/tests/modules/bedtools/subtract/main.nf index 9997f08c..2a0e6eab 100644 --- a/tests/modules/bedtools/subtract/main.nf +++ b/tests/modules/bedtools/subtract/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SUBTRACT } from '../../../../modules/bedtools/subtract/main.nf' addParams( options: [:] ) +include { BEDTOOLS_SUBTRACT } from '../../../../modules/bedtools/subtract/main.nf' workflow test_bedtools_subtract { input = [ diff --git a/tests/modules/bedtools/subtract/nextflow.config b/tests/modules/bedtools/subtract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/subtract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/subtract/test.yml b/tests/modules/bedtools/subtract/test.yml index fd8660fc..52b57436 100644 --- a/tests/modules/bedtools/subtract/test.yml +++ b/tests/modules/bedtools/subtract/test.yml @@ -1,5 +1,5 @@ - name: bedtools subtract - command: nextflow run ./tests/modules/bedtools/subtract -entry test_bedtools_subtract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/subtract -entry test_bedtools_subtract -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/subtract/nextflow.config tags: - bedtools - bedtools/subtract diff --git a/tests/modules/bismark/align/main.nf b/tests/modules/bismark/align/main.nf index 1f1fcdce..fe6d616a 100644 --- a/tests/modules/bismark/align/main.nf +++ b/tests/modules/bismark/align/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN as BISMARK_ALIGN_SE } from '../../../../modules/bismark/align/main.nf' addParams( options: [ publish_dir:'test_single_end' ] ) -include { BISMARK_ALIGN as BISMARK_ALIGN_PE } from '../../../../modules/bismark/align/main.nf' addParams( options: [ publish_dir:'test_paired_end' ] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN as BISMARK_ALIGN_SE } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_ALIGN as BISMARK_ALIGN_PE } from '../../../../modules/bismark/align/main.nf' // // Test with single-end data diff --git a/tests/modules/bismark/align/nextflow.config b/tests/modules/bismark/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/align/test.yml b/tests/modules/bismark/align/test.yml index 42dc44b3..ffae05af 100644 --- a/tests/modules/bismark/align/test.yml +++ b/tests/modules/bismark/align/test.yml @@ -1,19 +1,19 @@ - name: bismark align single-end test workflow - command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bismark/align/nextflow.config tags: - bismark - bismark/align files: - - path: output/test_single_end/test.methylated_1_bismark_bt2.bam + - path: output/bismark/test.methylated_1_bismark_bt2.bam md5sum: dca4ba9ff705b70446f812e59bdb1a32 - - path: output/test_single_end/test.methylated_1_bismark_bt2_SE_report.txt + - path: output/bismark/test.methylated_1_bismark_bt2_SE_report.txt - name: bismark align paired-end test workflow - command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bismark/align/nextflow.config tags: - bismark - bismark/align files: - - path: output/test_paired_end/test.methylated_1_bismark_bt2_pe.bam + - path: output/bismark/test.methylated_1_bismark_bt2_pe.bam md5sum: 43943b1f30d056fcbd9ed26061ea0583 - - path: output/test_paired_end/test.methylated_1_bismark_bt2_PE_report.txt + - path: output/bismark/test.methylated_1_bismark_bt2_PE_report.txt diff --git a/tests/modules/bismark/deduplicate/main.nf b/tests/modules/bismark/deduplicate/main.nf index fc44745c..ad97d66a 100644 --- a/tests/modules/bismark/deduplicate/main.nf +++ b/tests/modules/bismark/deduplicate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' workflow test_bismark_deduplicate { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/deduplicate/nextflow.config b/tests/modules/bismark/deduplicate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/deduplicate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/deduplicate/test.yml b/tests/modules/bismark/deduplicate/test.yml index 604c1023..12099750 100644 --- a/tests/modules/bismark/deduplicate/test.yml +++ b/tests/modules/bismark/deduplicate/test.yml @@ -1,5 +1,5 @@ - name: bismark deduplicate test workflow - command: nextflow run ./tests/modules/bismark/deduplicate -entry test_bismark_deduplicate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/deduplicate -entry test_bismark_deduplicate -c ./tests/config/nextflow.config -c ./tests/modules/bismark/deduplicate/nextflow.config tags: - bismark - bismark/deduplicate diff --git a/tests/modules/bismark/genomepreparation/main.nf b/tests/modules/bismark/genomepreparation/main.nf index ab847171..a9111af3 100644 --- a/tests/modules/bismark/genomepreparation/main.nf +++ b/tests/modules/bismark/genomepreparation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' workflow test_bismark_genomepreparation { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bismark/genomepreparation/nextflow.config b/tests/modules/bismark/genomepreparation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/genomepreparation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/genomepreparation/test.yml b/tests/modules/bismark/genomepreparation/test.yml index 15a7e7d6..a0d3c072 100644 --- a/tests/modules/bismark/genomepreparation/test.yml +++ b/tests/modules/bismark/genomepreparation/test.yml @@ -1,5 +1,5 @@ - name: bismark genomepreparation test workflow - command: nextflow run ./tests/modules/bismark/genomepreparation -entry test_bismark_genomepreparation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/genomepreparation -entry test_bismark_genomepreparation -c ./tests/config/nextflow.config -c ./tests/modules/bismark/genomepreparation/nextflow.config tags: - bismark - bismark/genomepreparation diff --git a/tests/modules/bismark/methylationextractor/main.nf b/tests/modules/bismark/methylationextractor/main.nf index 0b3f77a1..ed857fe8 100644 --- a/tests/modules/bismark/methylationextractor/main.nf +++ b/tests/modules/bismark/methylationextractor/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' workflow test_bismark_methylationextractor { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/methylationextractor/nextflow.config b/tests/modules/bismark/methylationextractor/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/methylationextractor/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/methylationextractor/test.yml b/tests/modules/bismark/methylationextractor/test.yml index 4505c428..f25b7646 100644 --- a/tests/modules/bismark/methylationextractor/test.yml +++ b/tests/modules/bismark/methylationextractor/test.yml @@ -1,5 +1,5 @@ - name: bismark methylation extractor test workflow - command: nextflow run ./tests/modules/bismark/methylationextractor -entry test_bismark_methylationextractor -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/methylationextractor -entry test_bismark_methylationextractor -c ./tests/config/nextflow.config -c ./tests/modules/bismark/methylationextractor/nextflow.config tags: - bismark - bismark/methylationextractor diff --git a/tests/modules/bismark/report/main.nf b/tests/modules/bismark/report/main.nf index 945d24ed..f80fb2bc 100644 --- a/tests/modules/bismark/report/main.nf +++ b/tests/modules/bismark/report/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' addParams( options: [:] ) -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) -include { BISMARK_REPORT } from '../../../../modules/bismark/report/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' +include { BISMARK_REPORT } from '../../../../modules/bismark/report/main.nf' workflow test_bismark_report { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/report/nextflow.config b/tests/modules/bismark/report/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/report/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/report/test.yml b/tests/modules/bismark/report/test.yml index 7e85e4dd..9195994c 100644 --- a/tests/modules/bismark/report/test.yml +++ b/tests/modules/bismark/report/test.yml @@ -1,5 +1,5 @@ - name: bismark report test workflow - command: nextflow run ./tests/modules/bismark/report -entry test_bismark_report -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/report -entry test_bismark_report -c ./tests/config/nextflow.config -c ./tests/modules/bismark/report/nextflow.config tags: - bismark - bismark/report diff --git a/tests/modules/bismark/summary/main.nf b/tests/modules/bismark/summary/main.nf index 4170d19a..8eabe51f 100644 --- a/tests/modules/bismark/summary/main.nf +++ b/tests/modules/bismark/summary/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' addParams( options: [:] ) -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) -include { BISMARK_SUMMARY } from '../../../../modules/bismark/summary/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' +include { BISMARK_SUMMARY } from '../../../../modules/bismark/summary/main.nf' workflow test_bismark_summary { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/summary/nextflow.config b/tests/modules/bismark/summary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/summary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/summary/test.yml b/tests/modules/bismark/summary/test.yml index 06478873..3b5196b9 100644 --- a/tests/modules/bismark/summary/test.yml +++ b/tests/modules/bismark/summary/test.yml @@ -1,5 +1,5 @@ - name: bismark summary test workflow - command: nextflow run ./tests/modules/bismark/summary -entry test_bismark_summary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/summary -entry test_bismark_summary -c ./tests/config/nextflow.config -c ./tests/modules/bismark/summary/nextflow.config tags: - bismark - bismark/summary diff --git a/tests/modules/blast/blastn/main.nf b/tests/modules/blast/blastn/main.nf index fd690dcc..3c8496dc 100644 --- a/tests/modules/blast/blastn/main.nf +++ b/tests/modules/blast/blastn/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' addParams( options: ['args': '-dbtype nucl'] ) -include { BLAST_BLASTN } from '../../../../modules/blast/blastn/main.nf' addParams( options: [:] ) +include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' +include { BLAST_BLASTN } from '../../../../modules/blast/blastn/main.nf' workflow test_blast_blastn { input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] diff --git a/tests/modules/blast/blastn/nextflow.config b/tests/modules/blast/blastn/nextflow.config new file mode 100644 index 00000000..1d5a2c01 --- /dev/null +++ b/tests/modules/blast/blastn/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BLAST_MAKEBLASTDB { + ext.args = '-dbtype nucl' + } + +} diff --git a/tests/modules/blast/blastn/test.yml b/tests/modules/blast/blastn/test.yml index 98f76921..17522c9b 100644 --- a/tests/modules/blast/blastn/test.yml +++ b/tests/modules/blast/blastn/test.yml @@ -1,5 +1,5 @@ - name: blast_blastn - command: nextflow run ./tests/modules/blast/blastn -entry test_blast_blastn -c tests/config/nextflow.config + command: nextflow run ./tests/modules/blast/blastn -entry test_blast_blastn -c ./tests/config/nextflow.config -c ./tests/modules/blast/blastn/nextflow.config tags: - blast - blast/blastn diff --git a/tests/modules/blast/makeblastdb/main.nf b/tests/modules/blast/makeblastdb/main.nf index 48b39f22..9d778457 100644 --- a/tests/modules/blast/makeblastdb/main.nf +++ b/tests/modules/blast/makeblastdb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' addParams( options: ['args': '-dbtype nucl'] ) +include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' workflow test_blast_makeblastdb { input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] diff --git a/tests/modules/blast/makeblastdb/nextflow.config b/tests/modules/blast/makeblastdb/nextflow.config new file mode 100644 index 00000000..1d5a2c01 --- /dev/null +++ b/tests/modules/blast/makeblastdb/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BLAST_MAKEBLASTDB { + ext.args = '-dbtype nucl' + } + +} diff --git a/tests/modules/blast/makeblastdb/test.yml b/tests/modules/blast/makeblastdb/test.yml index 7df17968..3b59f3f6 100644 --- a/tests/modules/blast/makeblastdb/test.yml +++ b/tests/modules/blast/makeblastdb/test.yml @@ -1,5 +1,5 @@ - name: blast_makeblastdb - command: nextflow run ./tests/modules/blast/makeblastdb -entry test_blast_makeblastdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/blast/makeblastdb -entry test_blast_makeblastdb -c ./tests/config/nextflow.config -c ./tests/modules/blast/makeblastdb/nextflow.config tags: - blast - blast/makeblastdb diff --git a/tests/modules/bowtie/align/main.nf b/tests/modules/bowtie/align/main.nf index b2c8059a..e773cd38 100644 --- a/tests/modules/bowtie/align/main.nf +++ b/tests/modules/bowtie/align/main.nf @@ -2,13 +2,16 @@ nextflow.enable.dsl = 2 -include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [:] ) -include { BOWTIE_ALIGN } from '../../../../modules/bowtie/align/main.nf' addParams( options: [:] ) +include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' +include { BOWTIE_ALIGN } from '../../../../modules/bowtie/align/main.nf' workflow test_bowtie_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE_BUILD ( fasta ) @@ -16,10 +19,13 @@ workflow test_bowtie_align_single_end { } workflow test_bowtie_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE_BUILD ( fasta ) diff --git a/tests/modules/bowtie/align/nextflow.config b/tests/modules/bowtie/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie/align/test.yml b/tests/modules/bowtie/align/test.yml index 76d63d68..1f8d1294 100644 --- a/tests/modules/bowtie/align/test.yml +++ b/tests/modules/bowtie/align/test.yml @@ -1,5 +1,5 @@ - name: bowtie align single-end - command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/align/nextflow.config tags: - bowtie - bowtie/align @@ -7,36 +7,36 @@ - path: ./output/bowtie/test.bam - path: ./output/bowtie/test.out md5sum: 4b9140ceadb8a18ae9330885370f8a0b - - path: ./output/index/bowtie/genome.3.ebwt + - path: ./output/bowtie/bowtie/genome.3.ebwt md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie/genome.2.ebwt + - path: ./output/bowtie/bowtie/genome.2.ebwt md5sum: 02b44af9f94c62ecd3c583048e25d4cf - - path: ./output/index/bowtie/genome.rev.2.ebwt + - path: ./output/bowtie/bowtie/genome.rev.2.ebwt md5sum: 9e6b0c4c1ddb99ae71ff8a4fe5ec6459 - - path: ./output/index/bowtie/genome.4.ebwt + - path: ./output/bowtie/bowtie/genome.4.ebwt md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie/genome.rev.1.ebwt + - path: ./output/bowtie/bowtie/genome.rev.1.ebwt md5sum: b37aaf11853e65a3b13561f27a912b06 - - path: ./output/index/bowtie/genome.1.ebwt + - path: ./output/bowtie/bowtie/genome.1.ebwt md5sum: d9b76ecf9fd0413240173273b38d8199 - name: bowtie align paired-end - command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/align/nextflow.config tags: - bowtie - bowtie/align files: - path: ./output/bowtie/test.bam - path: ./output/bowtie/test.out - - path: ./output/index/bowtie/genome.3.ebwt + - path: ./output/bowtie/bowtie/genome.3.ebwt md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie/genome.2.ebwt + - path: ./output/bowtie/bowtie/genome.2.ebwt md5sum: 02b44af9f94c62ecd3c583048e25d4cf - - path: ./output/index/bowtie/genome.rev.2.ebwt + - path: ./output/bowtie/bowtie/genome.rev.2.ebwt md5sum: 9e6b0c4c1ddb99ae71ff8a4fe5ec6459 - - path: ./output/index/bowtie/genome.4.ebwt + - path: ./output/bowtie/bowtie/genome.4.ebwt md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie/genome.rev.1.ebwt + - path: ./output/bowtie/bowtie/genome.rev.1.ebwt md5sum: b37aaf11853e65a3b13561f27a912b06 - - path: ./output/index/bowtie/genome.1.ebwt + - path: ./output/bowtie/bowtie/genome.1.ebwt md5sum: d9b76ecf9fd0413240173273b38d8199 diff --git a/tests/modules/bowtie/build_test/main.nf b/tests/modules/bowtie/build_test/main.nf index a89091a8..7a36fb55 100644 --- a/tests/modules/bowtie/build_test/main.nf +++ b/tests/modules/bowtie/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [publish_dir:'bowtie'] ) +include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' workflow test_bowtie_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie/build_test/nextflow.config b/tests/modules/bowtie/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie/build_test/test.yml b/tests/modules/bowtie/build_test/test.yml index c6b765c9..c51d1e8a 100644 --- a/tests/modules/bowtie/build_test/test.yml +++ b/tests/modules/bowtie/build_test/test.yml @@ -1,5 +1,5 @@ - name: bowtie build - command: nextflow run ./tests/modules/bowtie/build_test -entry test_bowtie_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/build_test -entry test_bowtie_build -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/build/nextflow.config tags: - bowtie - bowtie/build diff --git a/tests/modules/bowtie2/align/main.nf b/tests/modules/bowtie2/align/main.nf index 20602f30..8c8e3ab8 100644 --- a/tests/modules/bowtie2/align/main.nf +++ b/tests/modules/bowtie2/align/main.nf @@ -2,13 +2,16 @@ nextflow.enable.dsl = 2 -include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [:] ) -include { BOWTIE2_ALIGN } from '../../../../modules/bowtie2/align/main.nf' addParams( options: [:] ) +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' +include { BOWTIE2_ALIGN } from '../../../../modules/bowtie2/align/main.nf' workflow test_bowtie2_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE2_BUILD ( fasta ) @@ -16,11 +19,15 @@ workflow test_bowtie2_align_single_end { } workflow test_bowtie2_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + BOWTIE2_BUILD ( fasta ) BOWTIE2_ALIGN ( input, BOWTIE2_BUILD.out.index ) } diff --git a/tests/modules/bowtie2/align/nextflow.config b/tests/modules/bowtie2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie2/align/test.yml b/tests/modules/bowtie2/align/test.yml index 05952b76..95d48b88 100644 --- a/tests/modules/bowtie2/align/test.yml +++ b/tests/modules/bowtie2/align/test.yml @@ -1,41 +1,41 @@ - name: bowtie2 align single-end - command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/align/nextflow.config tags: - bowtie2 - bowtie2/align files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log - - path: ./output/index/bowtie2/genome.3.bt2 + - path: ./output/bowtie2/bowtie2/genome.3.bt2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie2/genome.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.2.bt2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: ./output/index/bowtie2/genome.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.1.bt2 md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf - - path: ./output/index/bowtie2/genome.4.bt2 + - path: ./output/bowtie2/bowtie2/genome.4.bt2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie2/genome.rev.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.1.bt2 md5sum: 52be6950579598a990570fbcf5372184 - - path: ./output/index/bowtie2/genome.rev.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.2.bt2 md5sum: e3b4ef343dea4dd571642010a7d09597 - name: bowtie2 align paired-end - command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/align/nextflow.config tags: - bowtie2 - bowtie2/align files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log - - path: ./output/index/bowtie2/genome.3.bt2 + - path: ./output/bowtie2/bowtie2/genome.3.bt2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie2/genome.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.2.bt2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: ./output/index/bowtie2/genome.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.1.bt2 md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf - - path: ./output/index/bowtie2/genome.4.bt2 + - path: ./output/bowtie2/bowtie2/genome.4.bt2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie2/genome.rev.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.1.bt2 md5sum: 52be6950579598a990570fbcf5372184 - - path: ./output/index/bowtie2/genome.rev.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.2.bt2 md5sum: e3b4ef343dea4dd571642010a7d09597 diff --git a/tests/modules/bowtie2/build_test/main.nf b/tests/modules/bowtie2/build_test/main.nf index 2b41fab2..f1d35083 100644 --- a/tests/modules/bowtie2/build_test/main.nf +++ b/tests/modules/bowtie2/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [publish_dir:'bowtie2'] ) +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' workflow test_bowtie2_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie2/build_test/nextflow.config b/tests/modules/bowtie2/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie2/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie2/build_test/test.yml b/tests/modules/bowtie2/build_test/test.yml index 3fd049b9..88e6c3ad 100644 --- a/tests/modules/bowtie2/build_test/test.yml +++ b/tests/modules/bowtie2/build_test/test.yml @@ -1,5 +1,5 @@ - name: bowtie2 build - command: nextflow run ./tests/modules/bowtie2/build_test -entry test_bowtie2_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/build_test -entry test_bowtie2_build -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/build/nextflow.config tags: - bowtie2 - bowtie2/build diff --git a/tests/modules/bwa/aln/main.nf b/tests/modules/bwa/aln/main.nf index feb7473d..909e7a2d 100644 --- a/tests/modules/bwa/aln/main.nf +++ b/tests/modules/bwa/aln/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' // // Test with single-end data // workflow test_bwa_aln_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwa_aln_single_end { // Test with paired-end data // workflow test_bwa_aln_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) diff --git a/tests/modules/bwa/aln/nextflow.config b/tests/modules/bwa/aln/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/aln/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/aln/test.yml b/tests/modules/bwa/aln/test.yml index 08848143..c89c47be 100644 --- a/tests/modules/bwa/aln/test.yml +++ b/tests/modules/bwa/aln/test.yml @@ -1,24 +1,24 @@ - name: bwa aln single-end - command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/aln/nextflow.config tags: - bwa - bwa/aln files: - path: ./output/bwa/test.sai md5sum: aaaf39b6814c96ca1a5eacc662adf926 - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - name: bwa aln paired-end - command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/aln/nextflow.config tags: - bwa - bwa/aln @@ -27,13 +27,13 @@ md5sum: aaaf39b6814c96ca1a5eacc662adf926 - path: ./output/bwa/test.2.sai md5sum: b4f185d9b4cb256dd5c377070a536124 - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwa/index/main.nf b/tests/modules/bwa/index/main.nf index 30d31202..fe040cb2 100644 --- a/tests/modules/bwa/index/main.nf +++ b/tests/modules/bwa/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [publish_dir:'bwa'] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' workflow test_bwa_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwa/index/nextflow.config b/tests/modules/bwa/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/index/test.yml b/tests/modules/bwa/index/test.yml index 3fe8663d..a9dab91d 100644 --- a/tests/modules/bwa/index/test.yml +++ b/tests/modules/bwa/index/test.yml @@ -1,5 +1,5 @@ - name: bwa index test_bwa_index - command: nextflow run tests/modules/bwa/index -entry test_bwa_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/index -entry test_bwa_index -c ./tests/config/nextflow.config -c ./tests/modules/bwa/index/nextflow.config tags: - bwa - bwa/index diff --git a/tests/modules/bwa/mem/main.nf b/tests/modules/bwa/mem/main.nf index bac51d23..117cbb4d 100644 --- a/tests/modules/bwa/mem/main.nf +++ b/tests/modules/bwa/mem/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_MEM } from '../../../../modules/bwa/mem/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_MEM } from '../../../../modules/bwa/mem/main.nf' // // Test with single-end data // workflow test_bwa_mem_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwa_mem_single_end { // Test with paired-end data // workflow test_bwa_mem_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) diff --git a/tests/modules/bwa/mem/nextflow.config b/tests/modules/bwa/mem/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/mem/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/mem/test.yml b/tests/modules/bwa/mem/test.yml index df1988b5..93535043 100644 --- a/tests/modules/bwa/mem/test.yml +++ b/tests/modules/bwa/mem/test.yml @@ -1,35 +1,35 @@ - name: bwa mem single-end - command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: - bwa - bwa/mem files: - path: ./output/bwa/test.bam - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - name: bwa mem paired-end - command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: - bwa - bwa/mem files: - path: ./output/bwa/test.bam - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwa/sampe/main.nf b/tests/modules/bwa/sampe/main.nf index 017f27e5..abd25566 100644 --- a/tests/modules/bwa/sampe/main.nf +++ b/tests/modules/bwa/sampe/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) -include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' +include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' workflow test_bwa_sampe { diff --git a/tests/modules/bwa/sampe/nextflow.config b/tests/modules/bwa/sampe/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/sampe/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/sampe/test.yml b/tests/modules/bwa/sampe/test.yml index ba5e704d..fb6d7708 100644 --- a/tests/modules/bwa/sampe/test.yml +++ b/tests/modules/bwa/sampe/test.yml @@ -1,5 +1,5 @@ - name: bwa sampe - command: nextflow run ./tests/modules/bwa/sampe -entry test_bwa_sampe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/sampe -entry test_bwa_sampe -c ./tests/config/nextflow.config -c ./tests/modules/bwa/sampe/nextflow.config tags: - bwa - bwa/sampe diff --git a/tests/modules/bwa/samse/main.nf b/tests/modules/bwa/samse/main.nf index 87a7c7b1..17912c36 100644 --- a/tests/modules/bwa/samse/main.nf +++ b/tests/modules/bwa/samse/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) -include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' +include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' workflow test_bwa_samse { diff --git a/tests/modules/bwa/samse/nextflow.config b/tests/modules/bwa/samse/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/samse/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/samse/test.yml b/tests/modules/bwa/samse/test.yml index 597844d4..5a2fe1e3 100644 --- a/tests/modules/bwa/samse/test.yml +++ b/tests/modules/bwa/samse/test.yml @@ -1,5 +1,5 @@ - name: bwa samse - command: nextflow run ./tests/modules/bwa/samse -entry test_bwa_samse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/samse -entry test_bwa_samse -c ./tests/config/nextflow.config -c ./tests/modules/bwa/samse/nextflow.config tags: - bwa - bwa/samse diff --git a/tests/modules/bwamem2/index/main.nf b/tests/modules/bwamem2/index/main.nf index bb7d0803..fe88f8f7 100644 --- a/tests/modules/bwamem2/index/main.nf +++ b/tests/modules/bwamem2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [publish_dir:'bwamem2'] ) +include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' workflow test_bwamem2_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwamem2/index/nextflow.config b/tests/modules/bwamem2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwamem2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwamem2/index/test.yml b/tests/modules/bwamem2/index/test.yml index d9d15c53..efada6ec 100644 --- a/tests/modules/bwamem2/index/test.yml +++ b/tests/modules/bwamem2/index/test.yml @@ -1,5 +1,5 @@ - name: bwamem2 index - command: nextflow run ./tests/modules/bwamem2/index -entry test_bwamem2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/index -entry test_bwamem2_index -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/index/nextflow.config tags: - bwamem2 - bwamem2/index diff --git a/tests/modules/bwamem2/mem/main.nf b/tests/modules/bwamem2/mem/main.nf index 5abda8bb..2ab557e6 100644 --- a/tests/modules/bwamem2/mem/main.nf +++ b/tests/modules/bwamem2/mem/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [:] ) -include { BWAMEM2_MEM } from '../../../../modules/bwamem2/mem/main.nf' addParams( options: [:] ) +include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' +include { BWAMEM2_MEM } from '../../../../modules/bwamem2/mem/main.nf' // // Test with single-end data // workflow test_bwamem2_mem_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwamem2_mem_single_end { // Test with paired-end data // workflow test_bwamem2_mem_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) diff --git a/tests/modules/bwamem2/mem/nextflow.config b/tests/modules/bwamem2/mem/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwamem2/mem/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwamem2/mem/test.yml b/tests/modules/bwamem2/mem/test.yml index cc2fe2a8..c1724bc0 100644 --- a/tests/modules/bwamem2/mem/test.yml +++ b/tests/modules/bwamem2/mem/test.yml @@ -1,35 +1,35 @@ - name: bwamem2 mem single-end - command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: - bwamem2 - bwamem2/mem files: - path: ./output/bwamem2/test.bam - - path: ./output/index/bwamem2/genome.fasta.amb + - path: ./output/bwamem2/bwamem2/genome.fasta.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwamem2/genome.fasta.pac + - path: ./output/bwamem2/bwamem2/genome.fasta.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwamem2/genome.fasta.0123 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 md5sum: b02870de80106104abcb03cd9463e7d8 - - path: ./output/index/bwamem2/genome.fasta.bwt.2bit.64 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 md5sum: d097a1b82dee375d41a1ea69895a9216 - - path: ./output/index/bwamem2/genome.fasta.ann + - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - name: bwamem2 mem paired-end - command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: - bwamem2 - bwamem2/mem files: - path: ./output/bwamem2/test.bam - - path: ./output/index/bwamem2/genome.fasta.amb + - path: ./output/bwamem2/bwamem2/genome.fasta.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwamem2/genome.fasta.pac + - path: ./output/bwamem2/bwamem2/genome.fasta.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwamem2/genome.fasta.0123 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 md5sum: b02870de80106104abcb03cd9463e7d8 - - path: ./output/index/bwamem2/genome.fasta.bwt.2bit.64 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 md5sum: d097a1b82dee375d41a1ea69895a9216 - - path: ./output/index/bwamem2/genome.fasta.ann + - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 diff --git a/tests/modules/bwameth/align/main.nf b/tests/modules/bwameth/align/main.nf index fb8cad6a..8066941c 100644 --- a/tests/modules/bwameth/align/main.nf +++ b/tests/modules/bwameth/align/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [:] ) -include { BWAMETH_ALIGN } from '../../../../modules/bwameth/align/main.nf' addParams( options: [:] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' +include { BWAMETH_ALIGN } from '../../../../modules/bwameth/align/main.nf' // // Test with single-end data // workflow test_bwameth_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwameth_align_single_end { // Test with paired-end data // workflow test_bwameth_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) diff --git a/tests/modules/bwameth/align/nextflow.config b/tests/modules/bwameth/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwameth/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwameth/align/test.yml b/tests/modules/bwameth/align/test.yml index 5cf4b84d..f921b5f4 100644 --- a/tests/modules/bwameth/align/test.yml +++ b/tests/modules/bwameth/align/test.yml @@ -1,5 +1,5 @@ - name: bwameth align single-end test workflow - command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/align/nextflow.config tags: - bwameth - bwameth/align @@ -7,7 +7,7 @@ - path: output/bwameth/test.bam - name: bwameth align paired-end test workflow - command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/align/nextflow.config tags: - bwameth - bwameth/align diff --git a/tests/modules/bwameth/index/main.nf b/tests/modules/bwameth/index/main.nf index 46662201..b70fd1f7 100644 --- a/tests/modules/bwameth/index/main.nf +++ b/tests/modules/bwameth/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [publish_dir:'bwameth'] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' workflow test_bwameth_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwameth/index/nextflow.config b/tests/modules/bwameth/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwameth/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwameth/index/test.yml b/tests/modules/bwameth/index/test.yml index 0cc7922e..9783c511 100644 --- a/tests/modules/bwameth/index/test.yml +++ b/tests/modules/bwameth/index/test.yml @@ -1,5 +1,5 @@ - name: bwameth index test workflow - command: nextflow run ./tests/modules/bwameth/index -entry test_bwameth_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/index -entry test_bwameth_index -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/index/nextflow.config tags: - bwameth - bwameth/index diff --git a/tests/modules/cat/cat/main.nf b/tests/modules/cat/cat/main.nf index a110a8ab..430c71fa 100644 --- a/tests/modules/cat/cat/main.nf +++ b/tests/modules/cat/cat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' addParams( options: [:] ) +include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' workflow test_cat_unzipped_unzipped { diff --git a/tests/modules/cat/cat/nextflow.config b/tests/modules/cat/cat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cat/cat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cat/cat/test.yml b/tests/modules/cat/cat/test.yml index 2f234a01..d7973042 100644 --- a/tests/modules/cat/cat/test.yml +++ b/tests/modules/cat/cat/test.yml @@ -1,5 +1,5 @@ - name: cat unzipped unzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -8,7 +8,7 @@ md5sum: f44b33a0e441ad58b2d3700270e2dbe2 - name: cat zipped zipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -16,7 +16,7 @@ - path: output/cat/cat.txt.gz - name: cat zipped unzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -25,7 +25,7 @@ md5sum: c439d3b60e7bc03e8802a451a0d9a5d9 - name: cat unzipped zipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat diff --git a/tests/modules/cat/fastq/main.nf b/tests/modules/cat/fastq/main.nf index 027bd108..c3da91d2 100644 --- a/tests/modules/cat/fastq/main.nf +++ b/tests/modules/cat/fastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' addParams( options: [publish_dir:'cat'] ) +include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' workflow test_cat_fastq_single_end { input = [ diff --git a/tests/modules/cat/fastq/nextflow.config b/tests/modules/cat/fastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cat/fastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cat/fastq/test.yml b/tests/modules/cat/fastq/test.yml index 9a5af25c..89ddf331 100644 --- a/tests/modules/cat/fastq/test.yml +++ b/tests/modules/cat/fastq/test.yml @@ -1,5 +1,5 @@ - name: cat fastq single-end - command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config tags: - cat - cat/fastq @@ -8,7 +8,7 @@ md5sum: 59f6dbe193741bb40f498f254aeb2e99 - name: cat fastq fastqc_paired_end - command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config tags: - cat - cat/fastq diff --git a/tests/modules/cellranger/mkref/main.nf b/tests/modules/cellranger/mkref/main.nf index b20a68db..ad98ed1a 100644 --- a/tests/modules/cellranger/mkref/main.nf +++ b/tests/modules/cellranger/mkref/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' addParams( options: [:] ) +include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' workflow test_cellranger_mkref { diff --git a/tests/modules/cellranger/mkref/nextflow.config b/tests/modules/cellranger/mkref/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cellranger/mkref/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cellranger/mkref/test.yml b/tests/modules/cellranger/mkref/test.yml index 5e60819e..eb01e9e2 100644 --- a/tests/modules/cellranger/mkref/test.yml +++ b/tests/modules/cellranger/mkref/test.yml @@ -1,5 +1,5 @@ - name: cellranger mkref test_cellranger_mkref - command: nextflow run tests/modules/cellranger/mkref -entry test_cellranger_mkref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cellranger/mkref -entry test_cellranger_mkref -c ./tests/config/nextflow.config -c ./tests/modules/cellranger/mkref/nextflow.config tags: - cellranger - cellranger/mkref diff --git a/tests/modules/checkm/lineagewf/main.nf b/tests/modules/checkm/lineagewf/main.nf index 94309896..e914774c 100644 --- a/tests/modules/checkm/lineagewf/main.nf +++ b/tests/modules/checkm/lineagewf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CHECKM_LINEAGEWF } from '../../../../modules/checkm/lineagewf/main.nf' addParams( options: [:] ) +include { CHECKM_LINEAGEWF } from '../../../../modules/checkm/lineagewf/main.nf' workflow test_checkm_lineagewf { diff --git a/tests/modules/checkm/lineagewf/nextflow.config b/tests/modules/checkm/lineagewf/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/checkm/lineagewf/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/checkm/lineagewf/test.yml b/tests/modules/checkm/lineagewf/test.yml index 768601b0..6749f6aa 100644 --- a/tests/modules/checkm/lineagewf/test.yml +++ b/tests/modules/checkm/lineagewf/test.yml @@ -1,5 +1,5 @@ - name: checkm lineagewf - command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf -c ./tests/config/nextflow.config -c ./tests/modules/checkm/lineagewf/nextflow.config tags: - checkm - checkm/lineagewf @@ -16,7 +16,7 @@ - "UID1" - name: checkm lineagewf_multi - command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf_multi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf_multi -c ./tests/config/nextflow.config -c ./tests/modules/checkm/lineagewf/nextflow.config tags: - checkm - checkm/lineagewf diff --git a/tests/modules/chromap/chromap/main.nf b/tests/modules/chromap/chromap/main.nf index a5a1fc86..5522f2b5 100644 --- a/tests/modules/chromap/chromap/main.nf +++ b/tests/modules/chromap/chromap/main.nf @@ -2,19 +2,20 @@ nextflow.enable.dsl = 2 -include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) -include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_BASE } from '../../../../modules/chromap/chromap/main.nf' addParams( options: [:] ) -include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_SAM } from '../../../../modules/chromap/chromap/main.nf' addParams( options: ['args': '--SAM'] ) +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_BASE } from '../../../../modules/chromap/chromap/main.nf' +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_SAM } from '../../../../modules/chromap/chromap/main.nf' workflow test_chromap_chromap_single_end { // Test single-end and gz compressed output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_BASE ( @@ -31,8 +32,6 @@ workflow test_chromap_chromap_single_end { workflow test_chromap_chromap_paired_end { // Test paired-end and gz compressed output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:false ], // meta map [ @@ -40,6 +39,7 @@ workflow test_chromap_chromap_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_BASE ( @@ -56,8 +56,6 @@ workflow test_chromap_chromap_paired_end { workflow test_chromap_chromap_paired_bam { // Test paired-end and bam output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:false ], // meta map [ @@ -65,6 +63,7 @@ workflow test_chromap_chromap_paired_bam { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_SAM ( diff --git a/tests/modules/chromap/chromap/nextflow.config b/tests/modules/chromap/chromap/nextflow.config new file mode 100644 index 00000000..1e979bb9 --- /dev/null +++ b/tests/modules/chromap/chromap/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CHROMAP_CHROMAP_SAM { + ext.args = '--SAM' + } + +} diff --git a/tests/modules/chromap/chromap/test.yml b/tests/modules/chromap/chromap/test.yml index b2ce8137..20a51e2b 100644 --- a/tests/modules/chromap/chromap/test.yml +++ b/tests/modules/chromap/chromap/test.yml @@ -1,5 +1,5 @@ - name: chromap chromap test_chromap_chromap_single_end - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_single_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap @@ -10,7 +10,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: chromap chromap test_chromap_chromap_paired_end - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap @@ -21,7 +21,7 @@ md5sum: cafd8fb21977f5ae69e9008b220ab169 - name: chromap chromap test_chromap_chromap_paired_bam - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap diff --git a/tests/modules/chromap/index/main.nf b/tests/modules/chromap/index/main.nf index 997baba1..18b42006 100644 --- a/tests/modules/chromap/index/main.nf +++ b/tests/modules/chromap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' workflow test_chromap_index { diff --git a/tests/modules/chromap/index/nextflow.config b/tests/modules/chromap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/chromap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/chromap/index/test.yml b/tests/modules/chromap/index/test.yml index 0a99a3a0..74cfadfc 100644 --- a/tests/modules/chromap/index/test.yml +++ b/tests/modules/chromap/index/test.yml @@ -1,5 +1,5 @@ - name: chromap index test_chromap_index - command: nextflow run tests/modules/chromap/index -entry test_chromap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/index -entry test_chromap_index -c ./tests/config/nextflow.config -c ./tests/modules/chromap/index/nextflow.config tags: - chromap/index - chromap diff --git a/tests/modules/clonalframeml/main.nf b/tests/modules/clonalframeml/main.nf index 35ecaa79..73773113 100644 --- a/tests/modules/clonalframeml/main.nf +++ b/tests/modules/clonalframeml/main.nf @@ -2,13 +2,15 @@ nextflow.enable.dsl = 2 -include { CLONALFRAMEML } from '../../../modules/clonalframeml/main.nf' addParams( options: [:] ) +include { CLONALFRAMEML } from '../../../modules/clonalframeml/main.nf' workflow test_clonalframeml { - - input = [ [ id:'test' ], // meta map - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.newick", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/genome_msa.fa.gz", checkIfExists: true),] + + input = [ + [ id:'test' ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_aln_nwk'], checkIfExists: true), + file(params.test_data['haemophilus_influenzae']['genome']['genome_aln_gz'], checkIfExists: true) + ] CLONALFRAMEML ( input ) } diff --git a/tests/modules/clonalframeml/nextflow.config b/tests/modules/clonalframeml/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/clonalframeml/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/clonalframeml/test.yml b/tests/modules/clonalframeml/test.yml index f2b68115..8ea11d16 100644 --- a/tests/modules/clonalframeml/test.yml +++ b/tests/modules/clonalframeml/test.yml @@ -1,5 +1,5 @@ - name: clonalframeml test_clonalframeml - command: nextflow run tests/modules/clonalframeml -entry test_clonalframeml -c tests/config/nextflow.config + command: nextflow run ./tests/modules/clonalframeml -entry test_clonalframeml -c ./tests/config/nextflow.config -c ./tests/modules/clonalframeml/nextflow.config tags: - clonalframeml files: diff --git a/tests/modules/cmseq/polymut/main.nf b/tests/modules/cmseq/polymut/main.nf index 729ed38f..df6a0ac1 100644 --- a/tests/modules/cmseq/polymut/main.nf +++ b/tests/modules/cmseq/polymut/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CMSEQ_POLYMUT } from '../../../../modules/cmseq/polymut/main.nf' addParams( options: [:] ) +include { CMSEQ_POLYMUT } from '../../../../modules/cmseq/polymut/main.nf' workflow test_cmseq_polymut_1 { diff --git a/tests/modules/cmseq/polymut/nextflow.config b/tests/modules/cmseq/polymut/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cmseq/polymut/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cmseq/polymut/test.yml b/tests/modules/cmseq/polymut/test.yml index 2a989cb9..05887fa8 100644 --- a/tests/modules/cmseq/polymut/test.yml +++ b/tests/modules/cmseq/polymut/test.yml @@ -1,5 +1,5 @@ - name: cmseq polymut test_cmseq_polymut_1 - command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_1 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_1 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config tags: - cmseq/polymut - cmseq @@ -8,7 +8,7 @@ md5sum: fd325c1724ee23d132a9115c64494efc - name: cmseq polymut test_cmseq_polymut_2 - command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_2 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config tags: - cmseq/polymut - cmseq @@ -17,7 +17,7 @@ md5sum: fd325c1724ee23d132a9115c64494efc - name: cmseq polymut test_cmseq_polymut_3 - command: nextflow run tests/modules/cmseq/polymut -entry test_cmseq_polymut_3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_3 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config tags: - cmseq/polymut - cmseq diff --git a/tests/modules/cnvkit/batch/main.nf b/tests/modules/cnvkit/batch/main.nf index 5d92afaa..6b40dec6 100755 --- a/tests/modules/cnvkit/batch/main.nf +++ b/tests/modules/cnvkit/batch/main.nf @@ -2,61 +2,54 @@ nextflow.enable.dsl = 2 -include { CNVKIT_BATCH as CNVKIT_HYBRID } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn' ] ) -include { CNVKIT_BATCH as CNVKIT_WGS } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn --method wgs' ] ) -include { CNVKIT_BATCH as CNVKIT_TUMORONLY } from '../../../../modules/cnvkit/batch/main.nf' addParams( options: [ 'args': '--method wgs' ] ) - +include { CNVKIT_BATCH as CNVKIT_HYBRID } from '../../../../modules/cnvkit/batch/main.nf' +include { CNVKIT_BATCH as CNVKIT_WGS } from '../../../../modules/cnvkit/batch/main.nf' +include { CNVKIT_BATCH as CNVKIT_TUMORONLY } from '../../../../modules/cnvkit/batch/main.nf' workflow test_cnvkit_hybrid { - tumor = file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - normal = file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test' ], // meta map - tumor, - normal - ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) CNVKIT_HYBRID ( input, fasta, targets, [] ) } workflow test_cnvkit_wgs { - tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test'], // meta map - tumor, - normal - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) CNVKIT_WGS ( input, fasta, [], [] ) } - workflow test_cnvkit_cram { - tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - normal = file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test'], // meta map - tumor, - normal - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) CNVKIT_WGS ( input, fasta, [], [] ) } - - workflow test_cnvkit_tumoronly { - tumor = file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) - input = [ [ id:'test'], // meta map - tumor, - [ ] - ] + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + [] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) reference = file(params.test_data['generic']['cnn']['reference'], checkIfExists: true) diff --git a/tests/modules/cnvkit/batch/nextflow.config b/tests/modules/cnvkit/batch/nextflow.config new file mode 100644 index 00000000..b8a8fc3f --- /dev/null +++ b/tests/modules/cnvkit/batch/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CNVKIT_HYBRID { + ext.args = '--output-reference reference.cnn' + } + + withName: CNVKIT_WGS { + ext.args = '--output-reference reference.cnn --method wgs' + } + + withName: CNVKIT_TUMORONLY { + ext.args = '--method wgs' + } + +} diff --git a/tests/modules/cnvkit/batch/test.yml b/tests/modules/cnvkit/batch/test.yml index 96ea670c..57af3603 100755 --- a/tests/modules/cnvkit/batch/test.yml +++ b/tests/modules/cnvkit/batch/test.yml @@ -1,5 +1,5 @@ - name: cnvkit batch test_cnvkit_hybrid - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_hybrid -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_hybrid -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit @@ -28,7 +28,7 @@ md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 - name: cnvkit batch test_cnvkit_wgs - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_wgs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_wgs -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit @@ -59,7 +59,7 @@ md5sum: 6ae6b3fce7299eedca6133d911c38fe1 - name: cnvkit batch test_cnvkit_cram - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_cram -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit @@ -90,7 +90,7 @@ md5sum: 6ae6b3fce7299eedca6133d911c38fe1 - name: cnvkit batch test_cnvkit_tumoronly - command: nextflow run tests/modules/cnvkit/batch -entry test_cnvkit_tumoronly -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_tumoronly -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config tags: - cnvkit/batch - cnvkit diff --git a/tests/modules/cooler/cload/main.nf b/tests/modules/cooler/cload/main.nf index dd9b3e98..170b7e11 100644 --- a/tests/modules/cooler/cload/main.nf +++ b/tests/modules/cooler/cload/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { COOLER_CLOAD } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairix'] ) -include { COOLER_CLOAD as COOLER_CLOAD_PAIRS } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'pairs --chrom1 1 --pos1 2 --chrom2 4 --pos2 5 -N'] ) -include { COOLER_CLOAD as COOLER_CLOAD_TABIX } from '../../../../modules/cooler/cload/main.nf' addParams( options: [args:'tabix'] ) -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) -include { COOLER_DUMP as COOLER_DUMP_PAIRS} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) -include { COOLER_DUMP as COOLER_DUMP_TABIX} from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_CLOAD } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_CLOAD as COOLER_CLOAD_PAIRS } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_CLOAD as COOLER_CLOAD_TABIX } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' +include { COOLER_DUMP as COOLER_DUMP_PAIRS} from '../../../../modules/cooler/dump/main.nf' +include { COOLER_DUMP as COOLER_DUMP_TABIX} from '../../../../modules/cooler/dump/main.nf' workflow test_cooler_cload_pairix { diff --git a/tests/modules/cooler/cload/nextflow.config b/tests/modules/cooler/cload/nextflow.config new file mode 100644 index 00000000..610a5425 --- /dev/null +++ b/tests/modules/cooler/cload/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: COOLER_CLOAD { + ext.args = 'pairix' + } + + withName: COOLER_CLOAD_PAIRS { + ext.args = 'pairs --chrom1 1 --pos1 2 --chrom2 4 --pos2 5 -N' + } + + withName: COOLER_CLOAD_TABIX { + ext.args = 'tabix' + } + +} diff --git a/tests/modules/cooler/cload/test.yml b/tests/modules/cooler/cload/test.yml index 7cb9a0bd..f99f4624 100644 --- a/tests/modules/cooler/cload/test.yml +++ b/tests/modules/cooler/cload/test.yml @@ -1,5 +1,5 @@ - name: cooler cload test_cooler_cload_pairix - command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_pairix -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config tags: - cooler/cload - cooler @@ -9,7 +9,7 @@ md5sum: 0cd85311089669688ec17468eae02111 - name: cooler cload test_cooler_cload_pairs - command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_pairs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_pairs -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config tags: - cooler/cload - cooler @@ -19,7 +19,7 @@ md5sum: 7f832733fc7853ebb1937b33e4c1e0de - name: cooler cload test_cooler_cload_tabix - command: nextflow run tests/modules/cooler/cload -entry test_cooler_cload_tabix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_tabix -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config tags: - cooler/cload - cooler diff --git a/tests/modules/cooler/digest/main.nf b/tests/modules/cooler/digest/main.nf index 817c9081..4dfa25be 100644 --- a/tests/modules/cooler/digest/main.nf +++ b/tests/modules/cooler/digest/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { COOLER_DIGEST } from '../../../../modules/cooler/digest/main.nf' addParams( options: [:] ) +include { COOLER_DIGEST } from '../../../../modules/cooler/digest/main.nf' workflow test_cooler_digest { diff --git a/tests/modules/cooler/digest/nextflow.config b/tests/modules/cooler/digest/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/digest/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/digest/test.yml b/tests/modules/cooler/digest/test.yml index b594a232..80430ed7 100644 --- a/tests/modules/cooler/digest/test.yml +++ b/tests/modules/cooler/digest/test.yml @@ -1,5 +1,5 @@ - name: cooler digest test_cooler_digest - command: nextflow run tests/modules/cooler/digest -entry test_cooler_digest -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/digest -entry test_cooler_digest -c ./tests/config/nextflow.config -c ./tests/modules/cooler/digest/nextflow.config tags: - cooler/digest - cooler diff --git a/tests/modules/cooler/dump/main.nf b/tests/modules/cooler/dump/main.nf index deeeb21f..d80ee0d7 100644 --- a/tests/modules/cooler/dump/main.nf +++ b/tests/modules/cooler/dump/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' workflow test_cooler_dump { diff --git a/tests/modules/cooler/dump/nextflow.config b/tests/modules/cooler/dump/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/dump/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/dump/test.yml b/tests/modules/cooler/dump/test.yml index ccfc5f47..6f81c7a9 100644 --- a/tests/modules/cooler/dump/test.yml +++ b/tests/modules/cooler/dump/test.yml @@ -1,5 +1,5 @@ - name: cooler dump test_cooler_dump - command: nextflow run tests/modules/cooler/dump -entry test_cooler_dump -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/dump -entry test_cooler_dump -c ./tests/config/nextflow.config -c ./tests/modules/cooler/dump/nextflow.config tags: - cooler/dump - cooler diff --git a/tests/modules/cooler/merge/main.nf b/tests/modules/cooler/merge/main.nf index 564660c5..81336984 100644 --- a/tests/modules/cooler/merge/main.nf +++ b/tests/modules/cooler/merge/main.nf @@ -2,15 +2,19 @@ nextflow.enable.dsl = 2 -include { COOLER_MERGE } from '../../../../modules/cooler/merge/main.nf' addParams( options: [publish_files:[:]] ) -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_MERGE } from '../../../../modules/cooler/merge/main.nf' +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' workflow test_cooler_merge { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true), - file(params.test_data['generic']['cooler']['test_merge_cool_cp2'], checkIfExists: true)] - ] + input = [ + [ id:'test' ], // meta map + [ + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_merge_cool_cp2'], checkIfExists: true) + ] + ] - COOLER_MERGE ( input ).cool | COOLER_DUMP + COOLER_MERGE ( input ) + COOLER_DUMP ( COOLER_MERGE.out.cool, "" ) } diff --git a/tests/modules/cooler/merge/nextflow.config b/tests/modules/cooler/merge/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/merge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/merge/test.yml b/tests/modules/cooler/merge/test.yml index 3ac388e7..c884ba5e 100644 --- a/tests/modules/cooler/merge/test.yml +++ b/tests/modules/cooler/merge/test.yml @@ -1,5 +1,5 @@ - name: cooler merge test_cooler_merge - command: nextflow run tests/modules/cooler/merge -entry test_cooler_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/merge -entry test_cooler_merge -c ./tests/config/nextflow.config -c ./tests/modules/cooler/merge/nextflow.config tags: - cooler/merge - cooler diff --git a/tests/modules/cooler/zoomify/main.nf b/tests/modules/cooler/zoomify/main.nf index 72c33983..42edadb8 100644 --- a/tests/modules/cooler/zoomify/main.nf +++ b/tests/modules/cooler/zoomify/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { COOLER_ZOOMIFY } from '../../../../modules/cooler/zoomify/main.nf' addParams( options: ['args':'-r 2,4,8', publish_files:[:]] ) -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_ZOOMIFY } from '../../../../modules/cooler/zoomify/main.nf' workflow test_cooler_zoomify { - input = [ [ id:'test' ], // meta map - file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true)] + + input = [ + [ id:'test' ], // meta map + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true) + ] COOLER_ZOOMIFY ( input ) - COOLER_DUMP(COOLER_ZOOMIFY.out.mcool, "/resolutions/2") } diff --git a/tests/modules/cooler/zoomify/nextflow.config b/tests/modules/cooler/zoomify/nextflow.config new file mode 100644 index 00000000..d4c3503f --- /dev/null +++ b/tests/modules/cooler/zoomify/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: COOLER_ZOOMIFY { + ext.args = '-r 2,4,8' + } + +} diff --git a/tests/modules/cooler/zoomify/test.yml b/tests/modules/cooler/zoomify/test.yml index 79a5af2c..3afdb8a6 100644 --- a/tests/modules/cooler/zoomify/test.yml +++ b/tests/modules/cooler/zoomify/test.yml @@ -1,8 +1,8 @@ - name: cooler zoomify test_cooler_zoomify - command: nextflow run tests/modules/cooler/zoomify -entry test_cooler_zoomify -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/zoomify -entry test_cooler_zoomify -c ./tests/config/nextflow.config -c ./tests/modules/cooler/zoomify/nextflow.config tags: - cooler - cooler/zoomify files: - path: output/cooler/test.bedpe - md5sum: 8d792beb609fff62b536c326661f9507 + md5sum: 0ce5e715bfc4674cdda02f2d7e7e3170 diff --git a/tests/modules/csvtk/concat/main.nf b/tests/modules/csvtk/concat/main.nf index 22b0205f..aee31679 100644 --- a/tests/modules/csvtk/concat/main.nf +++ b/tests/modules/csvtk/concat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CSVTK_CONCAT } from '../../../../modules/csvtk/concat/main.nf' addParams( options: [:] ) +include { CSVTK_CONCAT } from '../../../../modules/csvtk/concat/main.nf' workflow test_csvtk_concat { diff --git a/tests/modules/csvtk/concat/nextflow.config b/tests/modules/csvtk/concat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/csvtk/concat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/csvtk/concat/test.yml b/tests/modules/csvtk/concat/test.yml index 0fe9c604..11a2af67 100644 --- a/tests/modules/csvtk/concat/test.yml +++ b/tests/modules/csvtk/concat/test.yml @@ -1,5 +1,5 @@ - name: csvtk concat - command: nextflow run ./tests/modules/csvtk/concat -entry test_csvtk_concat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/csvtk/concat -entry test_csvtk_concat -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/concat/nextflow.config tags: - csvtk - csvtk/concat diff --git a/tests/modules/csvtk/split/main.nf b/tests/modules/csvtk/split/main.nf index 8dfd4053..31d24d61 100644 --- a/tests/modules/csvtk/split/main.nf +++ b/tests/modules/csvtk/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CSVTK_SPLIT } from '../../../../modules/csvtk/split/main.nf' addParams( options: [args: "-C '&' --fields 'first_name' "]) +include { CSVTK_SPLIT } from '../../../../modules/csvtk/split/main.nf' workflow test_csvtk_split_tsv { diff --git a/tests/modules/csvtk/split/nextflow.config b/tests/modules/csvtk/split/nextflow.config new file mode 100644 index 00000000..1dbd7615 --- /dev/null +++ b/tests/modules/csvtk/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CSVTK_SPLIT { + ext.args = "-C \'&\' --fields \'first_name\' " + } + +} diff --git a/tests/modules/csvtk/split/test.yml b/tests/modules/csvtk/split/test.yml index ade2fe48..bd13cca6 100644 --- a/tests/modules/csvtk/split/test.yml +++ b/tests/modules/csvtk/split/test.yml @@ -1,5 +1,5 @@ - name: csvtk split test_csvtk_split_tsv - command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_tsv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/csvtk/split -entry test_csvtk_split_tsv -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/split/nextflow.config tags: - csvtk/split - csvtk @@ -12,7 +12,7 @@ md5sum: 45ae6da8111096746d1736d34220a3ec - name: csvtk split test_csvtk_split_csv - command: nextflow run tests/modules/csvtk/split -entry test_csvtk_split_csv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/csvtk/split -entry test_csvtk_split_csv -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/split/nextflow.config tags: - csvtk/split - csvtk diff --git a/tests/modules/custom/dumpsoftwareversions/main.nf b/tests/modules/custom/dumpsoftwareversions/main.nf index 020b19bd..95a43a82 100644 --- a/tests/modules/custom/dumpsoftwareversions/main.nf +++ b/tests/modules/custom/dumpsoftwareversions/main.nf @@ -2,23 +2,54 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../../modules/fastqc/main.nf' addParams( options: [:] ) -include { MULTIQC } from '../../../../modules/multiqc/main.nf' addParams( options: [:] ) -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../../../modules/custom/dumpsoftwareversions/main.nf' addParams( options: [publish_dir:'custom'] ) +include { FASTQC } from '../../../../modules/fastqc/main.nf' +include { MULTIQC } from '../../../../modules/multiqc/main.nf' +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../../../modules/custom/dumpsoftwareversions/main.nf' + +workflow fastqc1 { + take: + input + + main: + FASTQC ( input ) + + emit: + versions = FASTQC.out.versions +} + +workflow fastqc2 { + take: + input + + main: + FASTQC ( input ) + + emit: + versions = FASTQC.out.versions + zip = FASTQC.out.zip +} workflow test_custom_dumpsoftwareversions { input = [ [ id: 'test', single_end: false ], - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] ] - FASTQC ( input ) - MULTIQC ( FASTQC.out.zip.collect { it[1] } ) + // Using subworkflows to ensure that the script can properly handle + // cases where subworkflows have a module with the same name. + fastqc1 ( input ) + fastqc2 ( input ) + MULTIQC ( fastqc2.out.zip.collect { it[1] } ) - ch_software_versions = Channel.empty() - ch_software_versions = ch_software_versions.mix(FASTQC.out.versions) - ch_software_versions = ch_software_versions.mix(MULTIQC.out.versions) + fastqc1 + .out + .versions + .mix(fastqc2.out.versions) + .mix(MULTIQC.out.versions) + .set { ch_software_versions } CUSTOM_DUMPSOFTWAREVERSIONS ( ch_software_versions.collectFile() ) } diff --git a/tests/modules/custom/dumpsoftwareversions/nextflow.config b/tests/modules/custom/dumpsoftwareversions/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/custom/dumpsoftwareversions/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/custom/dumpsoftwareversions/test.yml b/tests/modules/custom/dumpsoftwareversions/test.yml index 1815c0ba..363a1218 100644 --- a/tests/modules/custom/dumpsoftwareversions/test.yml +++ b/tests/modules/custom/dumpsoftwareversions/test.yml @@ -1,8 +1,14 @@ - name: custom dumpsoftwareversions - command: nextflow run ./tests/modules/custom/dumpsoftwareversions -entry test_custom_dumpsoftwareversions -c tests/config/nextflow.config + command: nextflow run ./tests/modules/custom/dumpsoftwareversions -entry test_custom_dumpsoftwareversions -c ./tests/config/nextflow.config -c ./tests/modules/custom/dumpsoftwareversions/nextflow.config tags: - custom - custom/dumpsoftwareversions files: - path: output/custom/software_versions.yml + contains: + - FASTQC + - MULTIQC + must_not_contain: + - fastqc1 + - fastqc2 - path: output/custom/software_versions_mqc.yml diff --git a/tests/modules/custom/getchromsizes/main.nf b/tests/modules/custom/getchromsizes/main.nf index 503668ec..b4f9fb9f 100644 --- a/tests/modules/custom/getchromsizes/main.nf +++ b/tests/modules/custom/getchromsizes/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CUSTOM_GETCHROMSIZES } from '../../../../modules/custom/getchromsizes/main.nf' addParams( options: [:] ) +include { CUSTOM_GETCHROMSIZES } from '../../../../modules/custom/getchromsizes/main.nf' workflow test_custom_getchromsizes { diff --git a/tests/modules/custom/getchromsizes/nextflow.config b/tests/modules/custom/getchromsizes/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/custom/getchromsizes/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/custom/getchromsizes/test.yml b/tests/modules/custom/getchromsizes/test.yml index 1265f478..9a770ad4 100644 --- a/tests/modules/custom/getchromsizes/test.yml +++ b/tests/modules/custom/getchromsizes/test.yml @@ -1,5 +1,5 @@ - name: custom getchromsizes - command: nextflow run ./tests/modules/custom/getchromsizes -entry test_custom_getchromsizes -c tests/config/nextflow.config + command: nextflow run ./tests/modules/custom/getchromsizes -entry test_custom_getchromsizes -c ./tests/config/nextflow.config -c ./tests/modules/custom/getchromsizes/nextflow.config tags: - custom - custom/getchromsizes diff --git a/tests/modules/cutadapt/main.nf b/tests/modules/cutadapt/main.nf index 8e060398..a47feebb 100644 --- a/tests/modules/cutadapt/main.nf +++ b/tests/modules/cutadapt/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CUTADAPT } from '../../../modules/cutadapt/main.nf' addParams( options: [ args:'-q 25' ] ) +include { CUTADAPT } from '../../../modules/cutadapt/main.nf' // // Test with single-end data diff --git a/tests/modules/cutadapt/nextflow.config b/tests/modules/cutadapt/nextflow.config new file mode 100644 index 00000000..2af532cc --- /dev/null +++ b/tests/modules/cutadapt/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CUTADAPT { + ext.args = '-q 25' + } + +} diff --git a/tests/modules/cutadapt/test.yml b/tests/modules/cutadapt/test.yml index 40710dc5..6fa0eb4f 100644 --- a/tests/modules/cutadapt/test.yml +++ b/tests/modules/cutadapt/test.yml @@ -1,5 +1,5 @@ - name: cutadapt single-end - command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_single_end -c ./tests/config/nextflow.config -c ./tests/modules/cutadapt/nextflow.config tags: - cutadapt files: @@ -7,7 +7,7 @@ - path: ./output/cutadapt/test.trim.fastq.gz - name: cutadapt paired-end - command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cutadapt/nextflow.config tags: - cutadapt files: diff --git a/tests/modules/damageprofiler/main.nf b/tests/modules/damageprofiler/main.nf index 36ae7b24..9207caf1 100644 --- a/tests/modules/damageprofiler/main.nf +++ b/tests/modules/damageprofiler/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DAMAGEPROFILER } from '../../../modules/damageprofiler/main.nf' addParams( options: [:] ) +include { DAMAGEPROFILER } from '../../../modules/damageprofiler/main.nf' workflow test_damageprofiler { diff --git a/tests/modules/damageprofiler/nextflow.config b/tests/modules/damageprofiler/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/damageprofiler/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/damageprofiler/test.yml b/tests/modules/damageprofiler/test.yml index 9ef964dc..4a560ce1 100644 --- a/tests/modules/damageprofiler/test.yml +++ b/tests/modules/damageprofiler/test.yml @@ -1,5 +1,5 @@ - name: damageprofiler - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: @@ -36,7 +36,7 @@ md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c - name: damageprofiler_reference - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_reference -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_reference -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: @@ -73,7 +73,7 @@ md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c - name: damageprofiler_specieslist - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_specieslist -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_specieslist -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: diff --git a/tests/modules/dastool/dastool/main.nf b/tests/modules/dastool/dastool/main.nf index 31c32ef4..f6f6becf 100644 --- a/tests/modules/dastool/dastool/main.nf +++ b/tests/modules/dastool/dastool/main.nf @@ -1,10 +1,10 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) -include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) -include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' addParams( options: [args: '--score_threshold 0 --debug'] ) +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' +include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' workflow test_dastool_dastool { diff --git a/tests/modules/dastool/dastool/nextflow.config b/tests/modules/dastool/dastool/nextflow.config new file mode 100644 index 00000000..e306b4b4 --- /dev/null +++ b/tests/modules/dastool/dastool/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + + withName: DASTOOL_DASTOOL { + ext.args = '--score_threshold 0 --debug' + } + +} diff --git a/tests/modules/dastool/dastool/test.yml b/tests/modules/dastool/dastool/test.yml index eff02f96..e2161890 100644 --- a/tests/modules/dastool/dastool/test.yml +++ b/tests/modules/dastool/dastool/test.yml @@ -1,5 +1,5 @@ - name: dastool dastool test_dastool_dastool - command: nextflow run tests/modules/dastool/dastool -entry test_dastool_dastool -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dastool/dastool -entry test_dastool_dastool -c ./tests/config/nextflow.config -c ./tests/modules/dastool/dastool/nextflow.config tags: - dastool - dastool/dastool diff --git a/tests/modules/dastool/scaffolds2bin/main.nf b/tests/modules/dastool/scaffolds2bin/main.nf index 63ffe82a..a0cd6726 100644 --- a/tests/modules/dastool/scaffolds2bin/main.nf +++ b/tests/modules/dastool/scaffolds2bin/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) -include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' addParams( options: [:] ) +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' workflow test_dastool_scaffolds2bin { diff --git a/tests/modules/dastool/scaffolds2bin/nextflow.config b/tests/modules/dastool/scaffolds2bin/nextflow.config new file mode 100644 index 00000000..83754d8b --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + +} diff --git a/tests/modules/dastool/scaffolds2bin/test.yml b/tests/modules/dastool/scaffolds2bin/test.yml index c6e25bff..26f528c9 100644 --- a/tests/modules/dastool/scaffolds2bin/test.yml +++ b/tests/modules/dastool/scaffolds2bin/test.yml @@ -1,5 +1,5 @@ - name: dastool scaffolds2bin test_dastool_scaffolds2bin - command: nextflow run tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c ./tests/config/nextflow.config -c ./tests/modules/dastool/scaffolds2bin/nextflow.config tags: - dastool - dastool/scaffolds2bin diff --git a/tests/modules/dedup/main.nf b/tests/modules/dedup/main.nf index 37e8e5c2..4a397eaa 100644 --- a/tests/modules/dedup/main.nf +++ b/tests/modules/dedup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEDUP } from '../../../modules/dedup/main.nf' addParams( options: [args: "-m"] ) +include { DEDUP } from '../../../modules/dedup/main.nf' workflow test_dedup { diff --git a/tests/modules/dedup/nextflow.config b/tests/modules/dedup/nextflow.config new file mode 100644 index 00000000..80a42463 --- /dev/null +++ b/tests/modules/dedup/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DEDUP { + ext.args = '-m' + } + +} diff --git a/tests/modules/dedup/test.yml b/tests/modules/dedup/test.yml index b35cfafd..077aac0d 100644 --- a/tests/modules/dedup/test.yml +++ b/tests/modules/dedup/test.yml @@ -1,5 +1,5 @@ - name: dedup test_dedup - command: nextflow run tests/modules/dedup -entry test_dedup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dedup -entry test_dedup -c ./tests/config/nextflow.config -c ./tests/modules/dedup/nextflow.config tags: - dedup files: diff --git a/tests/modules/deeptools/computematrix/main.nf b/tests/modules/deeptools/computematrix/main.nf index 116bc851..35e49f59 100644 --- a/tests/modules/deeptools/computematrix/main.nf +++ b/tests/modules/deeptools/computematrix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_COMPUTEMATRIX } from '../../../../modules/deeptools/computematrix/main.nf' addParams( options: ['args' : 'scale-regions -b 1000'] ) +include { DEEPTOOLS_COMPUTEMATRIX } from '../../../../modules/deeptools/computematrix/main.nf' workflow test_deeptools_computematrix { diff --git a/tests/modules/deeptools/computematrix/nextflow.config b/tests/modules/deeptools/computematrix/nextflow.config new file mode 100644 index 00000000..285b2165 --- /dev/null +++ b/tests/modules/deeptools/computematrix/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DEEPTOOLS_COMPUTEMATRIX { + ext.args = 'scale-regions -b 1000' + } + +} diff --git a/tests/modules/deeptools/computematrix/test.yml b/tests/modules/deeptools/computematrix/test.yml index fb2fa9e1..88657de3 100644 --- a/tests/modules/deeptools/computematrix/test.yml +++ b/tests/modules/deeptools/computematrix/test.yml @@ -1,5 +1,5 @@ - name: deeptools computematrix - command: nextflow run tests/modules/deeptools/computematrix -entry test_deeptools_computematrix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/computematrix -entry test_deeptools_computematrix -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/computematrix/nextflow.config tags: - deeptools - deeptools/computematrix diff --git a/tests/modules/deeptools/plotfingerprint/main.nf b/tests/modules/deeptools/plotfingerprint/main.nf index e84adc39..bcef970e 100644 --- a/tests/modules/deeptools/plotfingerprint/main.nf +++ b/tests/modules/deeptools/plotfingerprint/main.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl = 2 params.fragment_size = 1000 -include { DEEPTOOLS_PLOTFINGERPRINT } from '../../../../modules/deeptools/plotfingerprint/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTFINGERPRINT } from '../../../../modules/deeptools/plotfingerprint/main.nf' workflow test_deeptools_plotfingerprint { diff --git a/tests/modules/deeptools/plotfingerprint/nextflow.config b/tests/modules/deeptools/plotfingerprint/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotfingerprint/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotfingerprint/test.yml b/tests/modules/deeptools/plotfingerprint/test.yml index b7803a6e..11d4ae7b 100644 --- a/tests/modules/deeptools/plotfingerprint/test.yml +++ b/tests/modules/deeptools/plotfingerprint/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotfingerprint - command: nextflow run tests/modules/deeptools/plotfingerprint -entry test_deeptools_plotfingerprint -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotfingerprint -entry test_deeptools_plotfingerprint -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotfingerprint/nextflow.config tags: - deeptools - deeptools/plotfingerprint diff --git a/tests/modules/deeptools/plotheatmap/main.nf b/tests/modules/deeptools/plotheatmap/main.nf index 93e7d373..86005b2c 100644 --- a/tests/modules/deeptools/plotheatmap/main.nf +++ b/tests/modules/deeptools/plotheatmap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_PLOTHEATMAP } from '../../../../modules/deeptools/plotheatmap/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTHEATMAP } from '../../../../modules/deeptools/plotheatmap/main.nf' workflow test_deeptools_plotheatmap { diff --git a/tests/modules/deeptools/plotheatmap/nextflow.config b/tests/modules/deeptools/plotheatmap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotheatmap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotheatmap/test.yml b/tests/modules/deeptools/plotheatmap/test.yml index 641d5121..9273f840 100644 --- a/tests/modules/deeptools/plotheatmap/test.yml +++ b/tests/modules/deeptools/plotheatmap/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotheatmap - command: nextflow run tests/modules/deeptools/plotheatmap -entry test_deeptools_plotheatmap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotheatmap -entry test_deeptools_plotheatmap -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotheatmap/nextflow.config tags: - deeptools - deeptools/plotheatmap diff --git a/tests/modules/deeptools/plotprofile/main.nf b/tests/modules/deeptools/plotprofile/main.nf index ac91f0c5..63ee47cd 100644 --- a/tests/modules/deeptools/plotprofile/main.nf +++ b/tests/modules/deeptools/plotprofile/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_PLOTPROFILE } from '../../../../modules/deeptools/plotprofile/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTPROFILE } from '../../../../modules/deeptools/plotprofile/main.nf' workflow test_deeptools_plotprofile { diff --git a/tests/modules/deeptools/plotprofile/nextflow.config b/tests/modules/deeptools/plotprofile/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotprofile/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotprofile/test.yml b/tests/modules/deeptools/plotprofile/test.yml index efe02ce5..4b6c5b9a 100644 --- a/tests/modules/deeptools/plotprofile/test.yml +++ b/tests/modules/deeptools/plotprofile/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotprofile - command: nextflow run tests/modules/deeptools/plotprofile -entry test_deeptools_plotprofile -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotprofile -entry test_deeptools_plotprofile -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotprofile/nextflow.config tags: - deeptools - deeptools/plotprofile diff --git a/tests/modules/delly/call/main.nf b/tests/modules/delly/call/main.nf index f41dda95..f4583e05 100644 --- a/tests/modules/delly/call/main.nf +++ b/tests/modules/delly/call/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DELLY_CALL } from '../../../../modules/delly/call/main.nf' addParams( options: [:] ) +include { DELLY_CALL } from '../../../../modules/delly/call/main.nf' workflow test_delly_call { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/delly/call/nextflow.config b/tests/modules/delly/call/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/delly/call/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/delly/call/test.yml b/tests/modules/delly/call/test.yml index d8750892..a770d213 100644 --- a/tests/modules/delly/call/test.yml +++ b/tests/modules/delly/call/test.yml @@ -1,10 +1,9 @@ - name: delly call test_delly_call - command: nextflow run tests/modules/delly/call -entry test_delly_call -c tests/config/nextflow.config + command: nextflow run ./tests/modules/delly/call -entry test_delly_call -c ./tests/config/nextflow.config -c ./tests/modules/delly/call/nextflow.config tags: - delly - delly/call files: - path: output/delly/test.bcf - md5sum: 360c1bf6867f33bd2a868ddfb4d957fc - path: output/delly/test.bcf.csi md5sum: 19e0cdf06c415f4942f6d4dbd5fb7271 diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf index ab131a86..87d05bf9 100644 --- a/tests/modules/diamond/blastp/main.nf +++ b/tests/modules/diamond/blastp/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) -include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' addParams( options: [ suffix: '.diamond_blastp' ] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' +include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' workflow test_diamond_blastp { diff --git a/tests/modules/diamond/blastp/nextflow.config b/tests/modules/diamond/blastp/nextflow.config new file mode 100644 index 00000000..d1222d49 --- /dev/null +++ b/tests/modules/diamond/blastp/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DIAMOND_BLASTP { + ext.suffix = '.diamond_blastp' + } + +} diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index ae62ea51..673563cb 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -1,5 +1,5 @@ - name: diamond blastp - command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastp/nextflow.config tags: - diamond - diamond/blastp diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index c0e437d7..77eb08ea 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) -include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' addParams( options: [ suffix: '.diamond_blastx' ] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' +include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' workflow test_diamond_blastx { diff --git a/tests/modules/diamond/blastx/nextflow.config b/tests/modules/diamond/blastx/nextflow.config new file mode 100644 index 00000000..83169455 --- /dev/null +++ b/tests/modules/diamond/blastx/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DIAMOND_BLASTX { + ext.suffix = '.diamond_blastx' + } + +} diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml index 91a6eb4f..ee94802f 100644 --- a/tests/modules/diamond/blastx/test.yml +++ b/tests/modules/diamond/blastx/test.yml @@ -1,5 +1,5 @@ - name: diamond blastx - command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastx/nextflow.config tags: - diamond - diamond/blastx diff --git a/tests/modules/diamond/makedb/main.nf b/tests/modules/diamond/makedb/main.nf index bcd7691e..70982ae9 100644 --- a/tests/modules/diamond/makedb/main.nf +++ b/tests/modules/diamond/makedb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' workflow test_diamond_makedb { diff --git a/tests/modules/diamond/makedb/nextflow.config b/tests/modules/diamond/makedb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/diamond/makedb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/diamond/makedb/test.yml b/tests/modules/diamond/makedb/test.yml index 335b571f..c8f2d79e 100644 --- a/tests/modules/diamond/makedb/test.yml +++ b/tests/modules/diamond/makedb/test.yml @@ -1,5 +1,5 @@ - name: diamond makedb test_diamond_makedb - command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c ./tests/config/nextflow.config -c ./tests/modules/diamond/makedb/nextflow.config tags: - diamond - diamond/makedb diff --git a/tests/modules/dragonflye/main.nf b/tests/modules/dragonflye/main.nf index 4d3ac6e5..3d59bb21 100644 --- a/tests/modules/dragonflye/main.nf +++ b/tests/modules/dragonflye/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DRAGONFLYE } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler miniasm --gsize 5000000'] ) -include { DRAGONFLYE as DRAGONFLYE_RAVEN } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler raven --gsize 5000000'] ) +include { DRAGONFLYE } from '../../../modules/dragonflye/main.nf' +include { DRAGONFLYE as DRAGONFLYE_RAVEN } from '../../../modules/dragonflye/main.nf' workflow test_dragonflye { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/dragonflye/nextflow.config b/tests/modules/dragonflye/nextflow.config new file mode 100644 index 00000000..fea43da4 --- /dev/null +++ b/tests/modules/dragonflye/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DRAGONFLYE { + ext.args = '--assembler miniasm --gsize 5000000' + } + + withName: DRAGONFLYE_RAVEN { + ext.args = '--assembler raven --gsize 5000000' + } + +} diff --git a/tests/modules/dragonflye/test.yml b/tests/modules/dragonflye/test.yml index fe6283c0..ef9121ba 100644 --- a/tests/modules/dragonflye/test.yml +++ b/tests/modules/dragonflye/test.yml @@ -1,5 +1,5 @@ - name: dragonflye with miniasm - command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye -c ./tests/config/nextflow.config -c ./tests/modules/dragonflye/nextflow.config tags: - dragonflye files: @@ -12,7 +12,7 @@ - path: output/dragonflye/dragonflye.log - name: dragonflye with raven - command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye_raven -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye_raven -c ./tests/config/nextflow.config -c ./tests/modules/dragonflye/nextflow.config tags: - dragonflye files: diff --git a/tests/modules/dshbio/exportsegments/main.nf b/tests/modules/dshbio/exportsegments/main.nf index 6eef1046..c213dc54 100644 --- a/tests/modules/dshbio/exportsegments/main.nf +++ b/tests/modules/dshbio/exportsegments/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_EXPORTSEGMENTS } from '../../../../modules/dshbio/exportsegments/main.nf' addParams( options: [:] ) +include { DSHBIO_EXPORTSEGMENTS } from '../../../../modules/dshbio/exportsegments/main.nf' workflow test_dshbio_exportsegments { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/exportsegments/nextflow.config b/tests/modules/dshbio/exportsegments/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/dshbio/exportsegments/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/dshbio/exportsegments/test.yml b/tests/modules/dshbio/exportsegments/test.yml index 453e1cba..c811df03 100644 --- a/tests/modules/dshbio/exportsegments/test.yml +++ b/tests/modules/dshbio/exportsegments/test.yml @@ -1,5 +1,5 @@ - name: dshbio exportsegments - command: nextflow run ./tests/modules/dshbio/exportsegments -entry test_dshbio_exportsegments -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/exportsegments -entry test_dshbio_exportsegments -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/exportsegments/nextflow.config tags: - dshbio - dshbio/exportsegments diff --git a/tests/modules/dshbio/filterbed/main.nf b/tests/modules/dshbio/filterbed/main.nf index 722c88d2..454a03be 100644 --- a/tests/modules/dshbio/filterbed/main.nf +++ b/tests/modules/dshbio/filterbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_FILTERBED } from '../../../../modules/dshbio/filterbed/main.nf' addParams( options: [suffix: '.filtered', args: '--range chr1:0-1000'] ) +include { DSHBIO_FILTERBED } from '../../../../modules/dshbio/filterbed/main.nf' workflow test_dshbio_filterbed { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/filterbed/nextflow.config b/tests/modules/dshbio/filterbed/nextflow.config new file mode 100644 index 00000000..2f1e5ab9 --- /dev/null +++ b/tests/modules/dshbio/filterbed/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_FILTERBED { + ext.args = '--range chr1:0-1000' + ext.suffix = '.filtered' + } +} diff --git a/tests/modules/dshbio/filterbed/test.yml b/tests/modules/dshbio/filterbed/test.yml index ad1cde66..278fd5a3 100644 --- a/tests/modules/dshbio/filterbed/test.yml +++ b/tests/modules/dshbio/filterbed/test.yml @@ -1,5 +1,5 @@ - name: dshbio filterbed - command: nextflow run ./tests/modules/dshbio/filterbed -entry test_dshbio_filterbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/filterbed -entry test_dshbio_filterbed -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/filterbed/nextflow.config tags: - dshbio - dshbio/filterbed diff --git a/tests/modules/dshbio/filtergff3/main.nf b/tests/modules/dshbio/filtergff3/main.nf index 3156d091..7c803781 100644 --- a/tests/modules/dshbio/filtergff3/main.nf +++ b/tests/modules/dshbio/filtergff3/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_FILTERGFF3 } from '../../../../modules/dshbio/filtergff3/main.nf' addParams( options: [suffix: '.filtered', args: '--range MT192765.1:0-1000'] ) +include { DSHBIO_FILTERGFF3 } from '../../../../modules/dshbio/filtergff3/main.nf' workflow test_dshbio_filtergff3 { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/filtergff3/nextflow.config b/tests/modules/dshbio/filtergff3/nextflow.config new file mode 100644 index 00000000..c4b75eaf --- /dev/null +++ b/tests/modules/dshbio/filtergff3/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_FILTERGFF3 { + ext.args = '--range MT192765.1:0-1000' + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/dshbio/filtergff3/test.yml b/tests/modules/dshbio/filtergff3/test.yml index 95d1b446..43238333 100644 --- a/tests/modules/dshbio/filtergff3/test.yml +++ b/tests/modules/dshbio/filtergff3/test.yml @@ -1,5 +1,5 @@ - name: dshbio filtergff3 - command: nextflow run ./tests/modules/dshbio/filtergff3 -entry test_dshbio_filtergff3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/filtergff3 -entry test_dshbio_filtergff3 -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/filtergff3/nextflow.config tags: - dshbio - dshbio/filtergff3 diff --git a/tests/modules/dshbio/splitbed/main.nf b/tests/modules/dshbio/splitbed/main.nf index d7f3d004..517baad0 100644 --- a/tests/modules/dshbio/splitbed/main.nf +++ b/tests/modules/dshbio/splitbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_SPLITBED } from '../../../../modules/dshbio/splitbed/main.nf' addParams( options: [suffix: '.', args: '--records 2'] ) +include { DSHBIO_SPLITBED } from '../../../../modules/dshbio/splitbed/main.nf' workflow test_dshbio_splitbed { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/splitbed/nextflow.config b/tests/modules/dshbio/splitbed/nextflow.config new file mode 100644 index 00000000..4369c509 --- /dev/null +++ b/tests/modules/dshbio/splitbed/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_SPLITBED { + ext.suffix = '.' + ext.args = '--records 2' + } + +} diff --git a/tests/modules/dshbio/splitbed/test.yml b/tests/modules/dshbio/splitbed/test.yml index 04f5b150..ab14648e 100644 --- a/tests/modules/dshbio/splitbed/test.yml +++ b/tests/modules/dshbio/splitbed/test.yml @@ -1,5 +1,5 @@ - name: dshbio splitbed - command: nextflow run ./tests/modules/dshbio/splitbed -entry test_dshbio_splitbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/splitbed -entry test_dshbio_splitbed -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/splitbed/nextflow.config tags: - dshbio - dshbio/splitbed diff --git a/tests/modules/dshbio/splitgff3/main.nf b/tests/modules/dshbio/splitgff3/main.nf index dd58201a..03aa5394 100644 --- a/tests/modules/dshbio/splitgff3/main.nf +++ b/tests/modules/dshbio/splitgff3/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_SPLITGFF3 } from '../../../../modules/dshbio/splitgff3/main.nf' addParams( options: [suffix: '.', args: '--records 15'] ) +include { DSHBIO_SPLITGFF3 } from '../../../../modules/dshbio/splitgff3/main.nf' workflow test_dshbio_splitgff3 { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/splitgff3/nextflow.config b/tests/modules/dshbio/splitgff3/nextflow.config new file mode 100644 index 00000000..e31f8e13 --- /dev/null +++ b/tests/modules/dshbio/splitgff3/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_SPLITGFF3 { + ext.suffix = '.' + ext.args = '--records 15' + } + +} diff --git a/tests/modules/dshbio/splitgff3/test.yml b/tests/modules/dshbio/splitgff3/test.yml index fe5b1bed..6087ce11 100644 --- a/tests/modules/dshbio/splitgff3/test.yml +++ b/tests/modules/dshbio/splitgff3/test.yml @@ -1,5 +1,5 @@ - name: dshbio splitgff3 - command: nextflow run ./tests/modules/dshbio/splitgff3 -entry test_dshbio_splitgff3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/splitgff3 -entry test_dshbio_splitgff3 -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/splitgff3/nextflow.config tags: - dshbio - dshbio/splitgff3 diff --git a/tests/modules/ectyper/main.nf b/tests/modules/ectyper/main.nf index 123df68d..dd359fa2 100644 --- a/tests/modules/ectyper/main.nf +++ b/tests/modules/ectyper/main.nf @@ -2,12 +2,14 @@ nextflow.enable.dsl = 2 -include { ECTYPER } from '../../../modules/ectyper/main.nf' addParams( options: [:] ) +include { ECTYPER } from '../../../modules/ectyper/main.nf' workflow test_ectyper { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] ECTYPER ( input ) } diff --git a/tests/modules/ectyper/nextflow.config b/tests/modules/ectyper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ectyper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ectyper/test.yml b/tests/modules/ectyper/test.yml index c6f4c668..4f909bd9 100644 --- a/tests/modules/ectyper/test.yml +++ b/tests/modules/ectyper/test.yml @@ -1,5 +1,5 @@ - name: ectyper test_ectyper - command: nextflow run tests/modules/ectyper -entry test_ectyper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ectyper -entry test_ectyper -c ./tests/config/nextflow.config -c ./tests/modules/ectyper/nextflow.config tags: - ectyper files: diff --git a/tests/modules/emmtyper/main.nf b/tests/modules/emmtyper/main.nf index 9f2181a8..ee96fc32 100644 --- a/tests/modules/emmtyper/main.nf +++ b/tests/modules/emmtyper/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { EMMTYPER } from '../../../modules/emmtyper/main.nf' addParams( options: [:] ) +include { EMMTYPER } from '../../../modules/emmtyper/main.nf' workflow test_emmtyper { diff --git a/tests/modules/emmtyper/nextflow.config b/tests/modules/emmtyper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/emmtyper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/emmtyper/test.yml b/tests/modules/emmtyper/test.yml index da59e0f1..81854eb6 100644 --- a/tests/modules/emmtyper/test.yml +++ b/tests/modules/emmtyper/test.yml @@ -1,5 +1,5 @@ - name: emmtyper test_emmtyper - command: nextflow run tests/modules/emmtyper -entry test_emmtyper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/emmtyper -entry test_emmtyper -c ./tests/config/nextflow.config -c ./tests/modules/emmtyper/nextflow.config tags: - emmtyper files: diff --git a/tests/modules/ensemblvep/main.nf b/tests/modules/ensemblvep/main.nf index 3cbb26f1..223847c7 100644 --- a/tests/modules/ensemblvep/main.nf +++ b/tests/modules/ensemblvep/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { ENSEMBLVEP } from '../../../modules/ensemblvep/main.nf' addParams( vep_tag: '104.3.WBcel235', use_cache: false ) +include { ENSEMBLVEP } from '../../../modules/ensemblvep/main.nf' workflow test_ensemblvep { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + ENSEMBLVEP ( input, "WBcel235", "caenorhabditis_elegans", "104", [] ) } diff --git a/tests/modules/ensemblvep/nextflow.config b/tests/modules/ensemblvep/nextflow.config new file mode 100644 index 00000000..bcca2d06 --- /dev/null +++ b/tests/modules/ensemblvep/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ENSEMBLVEP { + ext.vep_tag = '104.3.WBcel235' + ext.use_cache = false + } + +} diff --git a/tests/modules/ensemblvep/test.yml b/tests/modules/ensemblvep/test.yml index a6e33cae..42384d6e 100644 --- a/tests/modules/ensemblvep/test.yml +++ b/tests/modules/ensemblvep/test.yml @@ -1,5 +1,5 @@ - name: ensemblvep test_ensemblvep - command: nextflow run tests/modules/ensemblvep -entry test_ensemblvep -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ensemblvep -entry test_ensemblvep -c ./tests/config/nextflow.config -c ./tests/modules/ensemblvep/nextflow.config tags: - ensemblvep files: diff --git a/tests/modules/expansionhunter/main.nf b/tests/modules/expansionhunter/main.nf index a7acbff4..91faeeb8 100644 --- a/tests/modules/expansionhunter/main.nf +++ b/tests/modules/expansionhunter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' addParams( options: [:] ) +include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' workflow test_expansionhunter { diff --git a/tests/modules/expansionhunter/nextflow.config b/tests/modules/expansionhunter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/expansionhunter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/expansionhunter/test.yml b/tests/modules/expansionhunter/test.yml index 78d5c002..19403588 100644 --- a/tests/modules/expansionhunter/test.yml +++ b/tests/modules/expansionhunter/test.yml @@ -1,5 +1,5 @@ - name: expansionhunter test_expansionhunter - command: nextflow run tests/modules/expansionhunter -entry test_expansionhunter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/expansionhunter -entry test_expansionhunter -c ./tests/config/nextflow.config -c ./tests/modules/expansionhunter/nextflow.config tags: - expansionhunter files: diff --git a/tests/modules/fargene/main.nf b/tests/modules/fargene/main.nf index f89392ff..6600015b 100644 --- a/tests/modules/fargene/main.nf +++ b/tests/modules/fargene/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FARGENE } from '../../../modules/fargene/main.nf' addParams( options: [:] ) +include { FARGENE } from '../../../modules/fargene/main.nf' workflow test_fargene { diff --git a/tests/modules/fargene/nextflow.config b/tests/modules/fargene/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fargene/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fargene/test.yml b/tests/modules/fargene/test.yml index 3db6699c..622e44b0 100644 --- a/tests/modules/fargene/test.yml +++ b/tests/modules/fargene/test.yml @@ -1,5 +1,5 @@ - name: fargene - command: nextflow run tests/modules/fargene -entry test_fargene -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fargene -entry test_fargene -c ./tests/config/nextflow.config -c ./tests/modules/fargene/nextflow.config tags: - fargene files: diff --git a/tests/modules/fastani/main.nf b/tests/modules/fastani/main.nf index a5548e20..0395f6a9 100644 --- a/tests/modules/fastani/main.nf +++ b/tests/modules/fastani/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTANI } from '../../../modules/fastani/main.nf' addParams( options: [:] ) +include { FASTANI } from '../../../modules/fastani/main.nf' workflow test_fastani { diff --git a/tests/modules/fastani/nextflow.config b/tests/modules/fastani/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastani/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastani/test.yml b/tests/modules/fastani/test.yml index cd411d06..f3748d25 100644 --- a/tests/modules/fastani/test.yml +++ b/tests/modules/fastani/test.yml @@ -1,5 +1,5 @@ - name: fastani - command: nextflow run ./tests/modules/fastani -entry test_fastani -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastani -entry test_fastani -c ./tests/config/nextflow.config -c ./tests/modules/fastani/nextflow.config tags: - fastani files: diff --git a/tests/modules/fastp/main.nf b/tests/modules/fastp/main.nf index c8e5112f..d1540974 100644 --- a/tests/modules/fastp/main.nf +++ b/tests/modules/fastp/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTP } from '../../../modules/fastp/main.nf' addParams( options: [:] ) +include { FASTP } from '../../../modules/fastp/main.nf' // // Test with single-end data diff --git a/tests/modules/fastp/nextflow.config b/tests/modules/fastp/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastp/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastp/test.yml b/tests/modules/fastp/test.yml index 365ce025..cd7ddeed 100644 --- a/tests/modules/fastp/test.yml +++ b/tests/modules/fastp/test.yml @@ -1,5 +1,5 @@ - name: fastp test_fastp_single_end - command: nextflow run tests/modules/fastp -entry test_fastp_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_single_end -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -17,7 +17,7 @@ md5sum: e0d856ebb3da9e4462c3ce9683efe01d - name: fastp test_fastp_paired_end - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -38,7 +38,7 @@ md5sum: 9eff7203596580cc5e42aceab4a469df - name: fastp test_fastp_single_end_trim_fail - command: nextflow run tests/modules/fastp -entry test_fastp_single_end_trim_fail -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_single_end_trim_fail -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -58,7 +58,7 @@ md5sum: de315d397c994d8e66bafc7a8dc11070 - name: fastp test_fastp_paired_end_trim_fail - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_trim_fail -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end_trim_fail -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -83,7 +83,7 @@ md5sum: f52309b35a7c15cbd56a9c3906ef98a5 - name: fastp test_fastp_paired_end_merged - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_merged -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end_merged -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: diff --git a/tests/modules/fastqc/main.nf b/tests/modules/fastqc/main.nf index d95befec..f7db9b7c 100644 --- a/tests/modules/fastqc/main.nf +++ b/tests/modules/fastqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../modules/fastqc/main.nf' addParams( options: [:] ) +include { FASTQC } from '../../../modules/fastqc/main.nf' // // Test with single-end data diff --git a/tests/modules/fastqc/nextflow.config b/tests/modules/fastqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastqc/test.yml b/tests/modules/fastqc/test.yml index 794e63fe..fa830cbc 100644 --- a/tests/modules/fastqc/test.yml +++ b/tests/modules/fastqc/test.yml @@ -1,5 +1,5 @@ - name: fastqc single-end - command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_single_end -c ./tests/config/nextflow.config -c ./tests/modules/fastqc/nextflow.config -c ./tests/modules/fastqc/nextflow.config tags: - fastqc files: @@ -7,7 +7,7 @@ - path: ./output/fastqc/test_fastqc.zip - name: fastqc paired-end - command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/fastqc/nextflow.config -c ./tests/modules/fastqc/nextflow.config tags: - fastqc files: diff --git a/tests/modules/fastqscan/main.nf b/tests/modules/fastqscan/main.nf index 5fd824f6..b9a321fe 100644 --- a/tests/modules/fastqscan/main.nf +++ b/tests/modules/fastqscan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTQSCAN } from '../../../modules/fastqscan/main.nf' addParams( options: [ args: "-g 30000"] ) +include { FASTQSCAN } from '../../../modules/fastqscan/main.nf' workflow test_fastqscan { diff --git a/tests/modules/fastqscan/nextflow.config b/tests/modules/fastqscan/nextflow.config new file mode 100644 index 00000000..f688ecb6 --- /dev/null +++ b/tests/modules/fastqscan/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FASTQSCAN { + ext.args = '-g 30000' + } + +} diff --git a/tests/modules/fastqscan/test.yml b/tests/modules/fastqscan/test.yml index 80bcbc47..d538804c 100644 --- a/tests/modules/fastqscan/test.yml +++ b/tests/modules/fastqscan/test.yml @@ -1,5 +1,5 @@ - name: fastqscan test_fastqscan - command: nextflow run tests/modules/fastqscan -entry test_fastqscan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqscan -entry test_fastqscan -c ./tests/config/nextflow.config -c ./tests/modules/fastqscan/nextflow.config tags: - fastqscan files: diff --git a/tests/modules/fasttree/main.nf b/tests/modules/fasttree/main.nf index 109aaa77..e33228a9 100644 --- a/tests/modules/fasttree/main.nf +++ b/tests/modules/fasttree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTTREE } from '../../../modules/fasttree/main.nf' addParams( options: [:] ) +include { FASTTREE } from '../../../modules/fasttree/main.nf' workflow test_fasttree { diff --git a/tests/modules/fasttree/nextflow.config b/tests/modules/fasttree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fasttree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fasttree/test.yml b/tests/modules/fasttree/test.yml index b30590c7..7e344cff 100644 --- a/tests/modules/fasttree/test.yml +++ b/tests/modules/fasttree/test.yml @@ -1,5 +1,5 @@ - name: fasttree - command: nextflow run ./tests/modules/fasttree -entry test_fasttree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fasttree -entry test_fasttree -c ./tests/config/nextflow.config -c ./tests/modules/fasttree/nextflow.config tags: - fasttree files: diff --git a/tests/modules/fgbio/callmolecularconsensusreads/main.nf b/tests/modules/fgbio/callmolecularconsensusreads/main.nf index 8ce34eca..e31fdf39 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/tests/modules/fgbio/callmolecularconsensusreads/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' addParams( options: [args: '-s TemplateCoordinate', suffix: '_out'] ) -include { FGBIO_CALLMOLECULARCONSENSUSREADS } from '../../../../modules/fgbio/callmolecularconsensusreads/main.nf' addParams( options: [args: '-M 1', suffix: '_molreads'] ) +include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' +include { FGBIO_CALLMOLECULARCONSENSUSREADS } from '../../../../modules/fgbio/callmolecularconsensusreads/main.nf' workflow test_fgbio_callmolecularconsensusreads { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config new file mode 100644 index 00000000..0a266da9 --- /dev/null +++ b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config @@ -0,0 +1,15 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FGBIO_SORTBAM { + ext.args = '-s TemplateCoordinate' + ext.suffix = '_out' + } + + withName: FGBIO_CALLMOLECULARCONSENSUSREADS { + ext.args = '-M 1' + ext.suffix = '_molreads' + } + +} diff --git a/tests/modules/fgbio/callmolecularconsensusreads/test.yml b/tests/modules/fgbio/callmolecularconsensusreads/test.yml index ac53957c..5e26cd01 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/test.yml +++ b/tests/modules/fgbio/callmolecularconsensusreads/test.yml @@ -1,5 +1,5 @@ - name: fgbio callmolecularconsensusreads - command: nextflow run tests/modules/fgbio/callmolecularconsensusreads -entry test_fgbio_callmolecularconsensusreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/callmolecularconsensusreads -entry test_fgbio_callmolecularconsensusreads -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/callmolecularconsensusreads/nextflow.config tags: - fgbio - fgbio/callmolecularconsensusreads diff --git a/tests/modules/fgbio/fastqtobam/main.nf b/tests/modules/fgbio/fastqtobam/main.nf index ce2f7efc..f01a17fa 100644 --- a/tests/modules/fgbio/fastqtobam/main.nf +++ b/tests/modules/fgbio/fastqtobam/main.nf @@ -1,16 +1,19 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -params.read_structure = "+T 12M11S+T" -include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' addParams( options: [:] ) +include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' workflow test_fgbio_fastqtobam { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + read_structure = "+T 12M11S+T" - FGBIO_FASTQTOBAM ( input, "${params.read_structure}" ) + FGBIO_FASTQTOBAM ( input, read_structure ) } diff --git a/tests/modules/fgbio/fastqtobam/nextflow.config b/tests/modules/fgbio/fastqtobam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/fastqtobam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/fastqtobam/test.yml b/tests/modules/fgbio/fastqtobam/test.yml index 6f2554e9..ab73f425 100644 --- a/tests/modules/fgbio/fastqtobam/test.yml +++ b/tests/modules/fgbio/fastqtobam/test.yml @@ -1,10 +1,8 @@ - name: fgbio fastqtobam test_fgbio_fastqtobam - command: nextflow run tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/fastqtobam/nextflow.config tags: - fgbio/fastqtobam - fgbio files: - path: output/fgbio/test_umi_converted.bam md5sum: 9510735554e5eff29244077a72075fb6 - - path: output/fgbio/versions.yml - md5sum: 524815093b96759060d0d800fc6a3f25 diff --git a/tests/modules/fgbio/groupreadsbyumi/main.nf b/tests/modules/fgbio/groupreadsbyumi/main.nf index 31f55724..1d5fb474 100644 --- a/tests/modules/fgbio/groupreadsbyumi/main.nf +++ b/tests/modules/fgbio/groupreadsbyumi/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { FGBIO_GROUPREADSBYUMI } from '../../../../modules/fgbio/groupreadsbyumi/main.nf' addParams( options: [:] ) +include { FGBIO_GROUPREADSBYUMI } from '../../../../modules/fgbio/groupreadsbyumi/main.nf' workflow test_fgbio_groupreadsbyumi { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) ] - + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) + ] strategy = "Adjacency" FGBIO_GROUPREADSBYUMI ( input, strategy ) diff --git a/tests/modules/fgbio/groupreadsbyumi/nextflow.config b/tests/modules/fgbio/groupreadsbyumi/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/groupreadsbyumi/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/groupreadsbyumi/test.yml b/tests/modules/fgbio/groupreadsbyumi/test.yml index ce70f129..c1cfd4f0 100644 --- a/tests/modules/fgbio/groupreadsbyumi/test.yml +++ b/tests/modules/fgbio/groupreadsbyumi/test.yml @@ -1,5 +1,5 @@ - name: fgbio groupreadsbyumi test_fgbio_groupreadsbyumi - command: nextflow run tests/modules/fgbio/groupreadsbyumi -entry test_fgbio_groupreadsbyumi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/groupreadsbyumi -entry test_fgbio_groupreadsbyumi -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/groupreadsbyumi/nextflow.config tags: - fgbio - fgbio/groupreadsbyumi diff --git a/tests/modules/fgbio/sortbam/main.nf b/tests/modules/fgbio/sortbam/main.nf index 65bea1d7..ada99d0f 100644 --- a/tests/modules/fgbio/sortbam/main.nf +++ b/tests/modules/fgbio/sortbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' addParams( options: [:] ) +include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' workflow test_fgbio_sortbam { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/fgbio/sortbam/nextflow.config b/tests/modules/fgbio/sortbam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/sortbam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/sortbam/test.yml b/tests/modules/fgbio/sortbam/test.yml index 68183cd2..6789aed8 100644 --- a/tests/modules/fgbio/sortbam/test.yml +++ b/tests/modules/fgbio/sortbam/test.yml @@ -1,5 +1,5 @@ - name: fgbio sortbam - command: nextflow run tests/modules/fgbio/sortbam -entry test_fgbio_sortbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/sortbam -entry test_fgbio_sortbam -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/sortbam/nextflow.config tags: - fgbio - fgbio/sortbam diff --git a/tests/modules/filtlong/main.nf b/tests/modules/filtlong/main.nf index cd037623..df7892aa 100644 --- a/tests/modules/filtlong/main.nf +++ b/tests/modules/filtlong/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FILTLONG } from '../../../modules/filtlong/main.nf' addParams( options: [:] ) +include { FILTLONG } from '../../../modules/filtlong/main.nf' workflow test_filtlong { diff --git a/tests/modules/filtlong/nextflow.config b/tests/modules/filtlong/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/filtlong/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/filtlong/test.yml b/tests/modules/filtlong/test.yml index 30779d45..dc5fa5a9 100644 --- a/tests/modules/filtlong/test.yml +++ b/tests/modules/filtlong/test.yml @@ -1,5 +1,5 @@ - name: filtlong test_filtlong - command: nextflow run tests/modules/filtlong -entry test_filtlong -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: @@ -7,7 +7,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: filtlong test_filtlong_illumina_se - command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_se -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_se -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: @@ -15,7 +15,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: filtlong test_filtlong_illumina_pe - command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_pe -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: diff --git a/tests/modules/flash/main.nf b/tests/modules/flash/main.nf index 2128650d..4afcb8fc 100644 --- a/tests/modules/flash/main.nf +++ b/tests/modules/flash/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FLASH } from '../../../modules/flash/main.nf' addParams( options: [args:'-m 20 -M 100'] ) +include { FLASH } from '../../../modules/flash/main.nf' workflow test_flash { input = [ diff --git a/tests/modules/flash/nextflow.config b/tests/modules/flash/nextflow.config new file mode 100644 index 00000000..2845f9d9 --- /dev/null +++ b/tests/modules/flash/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FLASH { + ext.args = '-m 20 -M 100' + } + +} diff --git a/tests/modules/flash/test.yml b/tests/modules/flash/test.yml index 31cdaeff..e5ed49ca 100644 --- a/tests/modules/flash/test.yml +++ b/tests/modules/flash/test.yml @@ -1,5 +1,5 @@ - name: flash test_flash - command: nextflow run tests/modules/flash -entry test_flash -c tests/config/nextflow.config + command: nextflow run ./tests/modules/flash -entry test_flash -c ./tests/config/nextflow.config -c ./tests/modules/flash/nextflow.config tags: - flash files: diff --git a/tests/modules/freebayes/main.nf b/tests/modules/freebayes/main.nf index c6f5641f..f8ae0ecb 100644 --- a/tests/modules/freebayes/main.nf +++ b/tests/modules/freebayes/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FREEBAYES } from '../../../modules/freebayes/main.nf' addParams( options: [:] ) +include { FREEBAYES } from '../../../modules/freebayes/main.nf' workflow test_freebayes { diff --git a/tests/modules/freebayes/nextflow.config b/tests/modules/freebayes/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/freebayes/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml index 22fd0e88..c9aa78da 100644 --- a/tests/modules/freebayes/test.yml +++ b/tests/modules/freebayes/test.yml @@ -1,33 +1,33 @@ - name: freebayes test_freebayes - command: nextflow run tests/modules/freebayes -entry test_freebayes -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_bed - command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_bed -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_cram - command: nextflow run tests/modules/freebayes -entry test_freebayes_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_cram -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_somatic - command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_somatic -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_somatic_cram_intervals - command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: diff --git a/tests/modules/gatk4/applybqsr/main.nf b/tests/modules/gatk4/applybqsr/main.nf index 80b51015..da85b11b 100644 --- a/tests/modules/gatk4/applybqsr/main.nf +++ b/tests/modules/gatk4/applybqsr/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_APPLYBQSR } from '../../../../modules/gatk4/applybqsr/main.nf' addParams( options: [:] ) +include { GATK4_APPLYBQSR } from '../../../../modules/gatk4/applybqsr/main.nf' workflow test_gatk4_applybqsr { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/applybqsr/nextflow.config b/tests/modules/gatk4/applybqsr/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/applybqsr/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/applybqsr/test.yml b/tests/modules/gatk4/applybqsr/test.yml index 02448b02..d0b07d94 100644 --- a/tests/modules/gatk4/applybqsr/test.yml +++ b/tests/modules/gatk4/applybqsr/test.yml @@ -1,5 +1,5 @@ - name: gatk4 applybqsr test_gatk4_applybqsr - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 @@ -8,7 +8,7 @@ md5sum: af56f5dd81b95070079d54670507f530 - name: gatk4 applybqsr test_gatk4_applybqsr_intervals - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 @@ -17,7 +17,7 @@ md5sum: 0cbfa4be143e988d56ce741b5077510e - name: gatk4 applybqsr test_gatk4_applybqsr_cram - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 diff --git a/tests/modules/gatk4/baserecalibrator/main.nf b/tests/modules/gatk4/baserecalibrator/main.nf index a50c09e3..2675d04b 100644 --- a/tests/modules/gatk4/baserecalibrator/main.nf +++ b/tests/modules/gatk4/baserecalibrator/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_BASERECALIBRATOR } from '../../../../modules/gatk4/baserecalibrator/main.nf' addParams( options: [:] ) +include { GATK4_BASERECALIBRATOR } from '../../../../modules/gatk4/baserecalibrator/main.nf' workflow test_gatk4_baserecalibrator { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/baserecalibrator/nextflow.config b/tests/modules/gatk4/baserecalibrator/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/baserecalibrator/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/baserecalibrator/test.yml b/tests/modules/gatk4/baserecalibrator/test.yml index a15c9ee3..163fac08 100644 --- a/tests/modules/gatk4/baserecalibrator/test.yml +++ b/tests/modules/gatk4/baserecalibrator/test.yml @@ -1,5 +1,5 @@ - name: gatk4 baserecalibrator test_gatk4_baserecalibrator - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -8,7 +8,7 @@ md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_cram - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -17,7 +17,7 @@ md5sum: 35d89a3811aa31711fc9815b6b80e6ec - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_intervals - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -26,7 +26,7 @@ md5sum: 9ecb5f00a2229291705addc09c0ec231 - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_multiple_sites - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator diff --git a/tests/modules/gatk4/bedtointervallist/main.nf b/tests/modules/gatk4/bedtointervallist/main.nf index 1ca4be58..2dd72904 100644 --- a/tests/modules/gatk4/bedtointervallist/main.nf +++ b/tests/modules/gatk4/bedtointervallist/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' addParams( options: [:] ) +include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' workflow test_gatk4_bedtointervallist { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/bedtointervallist/nextflow.config b/tests/modules/gatk4/bedtointervallist/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/bedtointervallist/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/bedtointervallist/test.yml b/tests/modules/gatk4/bedtointervallist/test.yml index 83c3a574..3482fa6c 100644 --- a/tests/modules/gatk4/bedtointervallist/test.yml +++ b/tests/modules/gatk4/bedtointervallist/test.yml @@ -1,5 +1,5 @@ - name: gatk4 bedtointervallist test_gatk4_bedtointervallist - command: nextflow run tests/modules/gatk4/bedtointervallist -entry test_gatk4_bedtointervallist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/bedtointervallist -entry test_gatk4_bedtointervallist -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/bedtointervallist/nextflow.config tags: - gatk4 - gatk4/bedtointervallist diff --git a/tests/modules/gatk4/calculatecontamination/main.nf b/tests/modules/gatk4/calculatecontamination/main.nf index f93f66fb..4b659ed3 100644 --- a/tests/modules/gatk4/calculatecontamination/main.nf +++ b/tests/modules/gatk4/calculatecontamination/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' addParams( options: [:] ) +include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' workflow test_gatk4_calculatecontamination_tumor_only { diff --git a/tests/modules/gatk4/calculatecontamination/nextflow.config b/tests/modules/gatk4/calculatecontamination/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/calculatecontamination/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/calculatecontamination/test.yml b/tests/modules/gatk4/calculatecontamination/test.yml index 89d419e0..0c489bff 100644 --- a/tests/modules/gatk4/calculatecontamination/test.yml +++ b/tests/modules/gatk4/calculatecontamination/test.yml @@ -1,5 +1,5 @@ - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_tumor_only - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_tumor_only -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_tumor_only -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 @@ -8,7 +8,7 @@ md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_matched_pair - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 @@ -17,7 +17,7 @@ md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_segmentation - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 diff --git a/tests/modules/gatk4/createsequencedictionary/main.nf b/tests/modules/gatk4/createsequencedictionary/main.nf index 443d77bc..b304b043 100644 --- a/tests/modules/gatk4/createsequencedictionary/main.nf +++ b/tests/modules/gatk4/createsequencedictionary/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../../modules/gatk4/createsequencedictionary/main.nf' addParams( options: [:] ) +include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../../modules/gatk4/createsequencedictionary/main.nf' workflow test_gatk4_createsequencedictionary { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/gatk4/createsequencedictionary/nextflow.config b/tests/modules/gatk4/createsequencedictionary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/createsequencedictionary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/createsequencedictionary/test.yml b/tests/modules/gatk4/createsequencedictionary/test.yml index 7788d16a..134a9d74 100644 --- a/tests/modules/gatk4/createsequencedictionary/test.yml +++ b/tests/modules/gatk4/createsequencedictionary/test.yml @@ -1,5 +1,5 @@ - name: gatk4 createsequencedictionary test_gatk4_createsequencedictionary - command: nextflow run tests/modules/gatk4/createsequencedictionary -entry test_gatk4_createsequencedictionary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/createsequencedictionary -entry test_gatk4_createsequencedictionary -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/createsequencedictionary/nextflow.config tags: - gatk4 - gatk4/createsequencedictionary diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf index 6e5366f5..5e1d1904 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' addParams( options: [suffix:'.pon'] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' workflow test_gatk4_createsomaticpanelofnormals { db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config new file mode 100644 index 00000000..6fda39ec --- /dev/null +++ b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_CREATESOMATICPANELOFNORMALS { + ext.suffix = '.pon' + } + +} diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml index d71059ad..a0e2bf26 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml +++ b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml @@ -1,5 +1,5 @@ - name: gatk4 createsomaticpanelofnormals test_gatk4_createsomaticpanelofnormals - command: nextflow run tests/modules/gatk4/createsomaticpanelofnormals -entry test_gatk4_createsomaticpanelofnormals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/createsomaticpanelofnormals -entry test_gatk4_createsomaticpanelofnormals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config tags: - gatk4 - gatk4/createsomaticpanelofnormals diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/main.nf b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf index 72772318..398a6c79 100644 --- a/tests/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' addParams( options: [:] ) +include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' workflow test_gatk4_estimatelibrarycomplexity { diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config b/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml index ca949c00..a33e4ec1 100644 --- a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml +++ b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml @@ -1,5 +1,5 @@ - name: gatk4 estimatelibrarycomplexity test_gatk4_estimatelibrarycomplexity - command: nextflow run tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config tags: - gatk4/estimatelibrarycomplexity - gatk4 diff --git a/tests/modules/gatk4/fastqtosam/main.nf b/tests/modules/gatk4/fastqtosam/main.nf index 64694d9f..4f53c791 100644 --- a/tests/modules/gatk4/fastqtosam/main.nf +++ b/tests/modules/gatk4/fastqtosam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_FASTQTOSAM } from '../../../../modules/gatk4/fastqtosam/main.nf' addParams( options: [:] ) +include { GATK4_FASTQTOSAM } from '../../../../modules/gatk4/fastqtosam/main.nf' workflow test_gatk4_fastqtosam_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/gatk4/fastqtosam/nextflow.config b/tests/modules/gatk4/fastqtosam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/fastqtosam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/fastqtosam/test.yml b/tests/modules/gatk4/fastqtosam/test.yml index b576075a..d5d23f94 100644 --- a/tests/modules/gatk4/fastqtosam/test.yml +++ b/tests/modules/gatk4/fastqtosam/test.yml @@ -1,5 +1,5 @@ - name: gatk4 fastqtosam test_gatk4_fastqtosam_single_end - command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_single_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/fastqtosam/nextflow.config tags: - gatk4/fastqtosam - gatk4 @@ -8,7 +8,7 @@ md5sum: 0a0d308b219837977b8df9daa26db7de - name: gatk4 fastqtosam test_gatk4_fastqtosam_paired_end - command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/fastqtosam/nextflow.config tags: - gatk4 - gatk4/fastqtosam diff --git a/tests/modules/gatk4/filtermutectcalls/main.nf b/tests/modules/gatk4/filtermutectcalls/main.nf index 5b2938e8..fa0acff9 100644 --- a/tests/modules/gatk4/filtermutectcalls/main.nf +++ b/tests/modules/gatk4/filtermutectcalls/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutectcalls/main.nf' addParams( options: [suffix:'.filtered'] ) +include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutectcalls/main.nf' workflow test_gatk4_filtermutectcalls_base { diff --git a/tests/modules/gatk4/filtermutectcalls/nextflow.config b/tests/modules/gatk4/filtermutectcalls/nextflow.config new file mode 100644 index 00000000..c830fdc6 --- /dev/null +++ b/tests/modules/gatk4/filtermutectcalls/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_FILTERMUTECTCALLS { + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/gatk4/filtermutectcalls/test.yml b/tests/modules/gatk4/filtermutectcalls/test.yml index d5b97d36..72504e66 100644 --- a/tests/modules/gatk4/filtermutectcalls/test.yml +++ b/tests/modules/gatk4/filtermutectcalls/test.yml @@ -1,5 +1,5 @@ - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_base - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_base -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls @@ -11,7 +11,7 @@ md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_with_files - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls @@ -23,7 +23,7 @@ md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_use_val - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls diff --git a/tests/modules/gatk4/genomicsdbimport/main.nf b/tests/modules/gatk4/genomicsdbimport/main.nf index aff3973d..417a08a4 100644 --- a/tests/modules/gatk4/genomicsdbimport/main.nf +++ b/tests/modules/gatk4/genomicsdbimport/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimport/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimport/main.nf' workflow test_gatk4_genomicsdbimport_create_genomicsdb { diff --git a/tests/modules/gatk4/genomicsdbimport/nextflow.config b/tests/modules/gatk4/genomicsdbimport/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/genomicsdbimport/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/genomicsdbimport/test.yml b/tests/modules/gatk4/genomicsdbimport/test.yml index 5fe2b49b..94a1a35e 100644 --- a/tests/modules/gatk4/genomicsdbimport/test.yml +++ b/tests/modules/gatk4/genomicsdbimport/test.yml @@ -1,5 +1,5 @@ - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_create_genomicsdb - command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_create_genomicsdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_create_genomicsdb -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config tags: - gatk4/genomicsdbimport - gatk4 @@ -20,7 +20,7 @@ md5sum: 18d3f68bd2cb6f4474990507ff95017a - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_get_intervalslist - command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_get_intervalslist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_get_intervalslist -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config tags: - gatk4/genomicsdbimport - gatk4 @@ -29,7 +29,7 @@ md5sum: 4c85812ac15fc1cd29711a851d23c0bf - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_update_genomicsdb - command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_update_genomicsdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_update_genomicsdb -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config tags: - gatk4/genomicsdbimport - gatk4 diff --git a/tests/modules/gatk4/genotypegvcfs/main.nf b/tests/modules/gatk4/genotypegvcfs/main.nf index 0b555180..208faf8b 100644 --- a/tests/modules/gatk4/genotypegvcfs/main.nf +++ b/tests/modules/gatk4/genotypegvcfs/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GATK4_GENOTYPEGVCFS } from '../../../../modules/gatk4/genotypegvcfs/main.nf' addParams( options: [suffix:'.genotyped'] ) -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) +include { GATK4_GENOTYPEGVCFS } from '../../../../modules/gatk4/genotypegvcfs/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' // Basic parameters with uncompressed VCF input workflow test_gatk4_genotypegvcfs_vcf_input { diff --git a/tests/modules/gatk4/genotypegvcfs/nextflow.config b/tests/modules/gatk4/genotypegvcfs/nextflow.config new file mode 100644 index 00000000..aaa704da --- /dev/null +++ b/tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_GENOTYPEGVCFS { + ext.suffix = '.genotyped' + } + +} diff --git a/tests/modules/gatk4/genotypegvcfs/test.yml b/tests/modules/gatk4/genotypegvcfs/test.yml index ad39a48d..45201af2 100644 --- a/tests/modules/gatk4/genotypegvcfs/test.yml +++ b/tests/modules/gatk4/genotypegvcfs/test.yml @@ -1,5 +1,5 @@ - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_vcf_input - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_vcf_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_vcf_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -8,7 +8,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -17,7 +17,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -26,7 +26,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -35,7 +35,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -44,7 +44,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -53,7 +53,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -62,7 +62,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs @@ -71,7 +71,7 @@ contains: ['AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680'] - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals - command: nextflow run tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config tags: - gatk4 - gatk4/genotypegvcfs diff --git a/tests/modules/gatk4/getpileupsummaries/main.nf b/tests/modules/gatk4/getpileupsummaries/main.nf index 66ee4990..52f3bdec 100644 --- a/tests/modules/gatk4/getpileupsummaries/main.nf +++ b/tests/modules/gatk4/getpileupsummaries/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_GETPILEUPSUMMARIES } from '../../../../modules/gatk4/getpileupsummaries/main.nf' addParams( options: [:] ) +include { GATK4_GETPILEUPSUMMARIES } from '../../../../modules/gatk4/getpileupsummaries/main.nf' workflow test_gatk4_getpileupsummaries_just_variants { diff --git a/tests/modules/gatk4/getpileupsummaries/nextflow.config b/tests/modules/gatk4/getpileupsummaries/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/getpileupsummaries/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/getpileupsummaries/test.yml b/tests/modules/gatk4/getpileupsummaries/test.yml index 6c5e1f84..3211a8fe 100644 --- a/tests/modules/gatk4/getpileupsummaries/test.yml +++ b/tests/modules/gatk4/getpileupsummaries/test.yml @@ -1,5 +1,5 @@ - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_just_variants - command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_just_variants -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_just_variants -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/getpileupsummaries/nextflow.config tags: - gatk4 - gatk4/getpileupsummaries @@ -8,7 +8,7 @@ md5sum: 0d19674bef2ff0700d5b02b3463dd210 - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_separate_sites - command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/getpileupsummaries/nextflow.config tags: - gatk4 - gatk4/getpileupsummaries diff --git a/tests/modules/gatk4/haplotypecaller/main.nf b/tests/modules/gatk4/haplotypecaller/main.nf index fd5f30fa..dc6bec67 100644 --- a/tests/modules/gatk4/haplotypecaller/main.nf +++ b/tests/modules/gatk4/haplotypecaller/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_HAPLOTYPECALLER } from '../../../../modules/gatk4/haplotypecaller/main.nf' addParams( options: [:] ) +include { GATK4_HAPLOTYPECALLER } from '../../../../modules/gatk4/haplotypecaller/main.nf' workflow test_gatk4_haplotypecaller { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/haplotypecaller/nextflow.config b/tests/modules/gatk4/haplotypecaller/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/haplotypecaller/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/haplotypecaller/test.yml b/tests/modules/gatk4/haplotypecaller/test.yml index 480ff8f0..31dd23fd 100644 --- a/tests/modules/gatk4/haplotypecaller/test.yml +++ b/tests/modules/gatk4/haplotypecaller/test.yml @@ -1,5 +1,5 @@ - name: gatk4 haplotypecaller test_gatk4_haplotypecaller - command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config tags: - gatk4/haplotypecaller - gatk4 @@ -8,7 +8,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 haplotypecaller test_gatk4_haplotypecaller_cram - command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config tags: - gatk4/haplotypecaller - gatk4 @@ -17,7 +17,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 haplotypecaller test_gatk4_haplotypecaller_intervals_dbsnp - command: nextflow run tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_intervals_dbsnp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_intervals_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config tags: - gatk4/haplotypecaller - gatk4 diff --git a/tests/modules/gatk4/indexfeaturefile/main.nf b/tests/modules/gatk4/indexfeaturefile/main.nf index e523606a..f61b57fc 100644 --- a/tests/modules/gatk4/indexfeaturefile/main.nf +++ b/tests/modules/gatk4/indexfeaturefile/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_INDEXFEATUREFILE } from '../../../../modules/gatk4/indexfeaturefile/main.nf' addParams( options: [:] ) +include { GATK4_INDEXFEATUREFILE } from '../../../../modules/gatk4/indexfeaturefile/main.nf' workflow test_gatk4_indexfeaturefile_bed { diff --git a/tests/modules/gatk4/indexfeaturefile/nextflow.config b/tests/modules/gatk4/indexfeaturefile/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/indexfeaturefile/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/indexfeaturefile/test.yml b/tests/modules/gatk4/indexfeaturefile/test.yml index 5883695a..c524e462 100644 --- a/tests/modules/gatk4/indexfeaturefile/test.yml +++ b/tests/modules/gatk4/indexfeaturefile/test.yml @@ -2,7 +2,7 @@ # a) the path to the file is embedded inside it, # b) the file is binary so we can't check for text inside it. - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile @@ -10,7 +10,7 @@ - path: output/gatk4/genome.bed.idx - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed_gz - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed_gz -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile @@ -22,7 +22,7 @@ # a) the path to the file is embedded inside it, # b) the file is binary so we can't check for text inside it. - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile @@ -30,7 +30,7 @@ - path: output/gatk4/test.genome.vcf.idx - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf_gz - command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf_gz -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config tags: - gatk4 - gatk4/indexfeaturefile diff --git a/tests/modules/gatk4/intervallisttools/main.nf b/tests/modules/gatk4/intervallisttools/main.nf index 59be74e8..535923fb 100644 --- a/tests/modules/gatk4/intervallisttools/main.nf +++ b/tests/modules/gatk4/intervallisttools/main.nf @@ -2,15 +2,17 @@ nextflow.enable.dsl = 2 -test_options = ['args': '--SCATTER_COUNT 6 --SUBDIVISION_MODE BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW --UNIQUE true --SORT true'] -include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' addParams( options: [:] ) -include { GATK4_INTERVALLISTTOOLS as INTERVALLISTTOOLS } from '../../../../modules/gatk4/intervallisttools/main.nf' addParams( options: test_options ) +include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' +include { GATK4_INTERVALLISTTOOLS } from '../../../../modules/gatk4/intervallisttools/main.nf' workflow test_gatk4_intervallisttools { - input = [ [ id:'test' ], [ file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) ]] + input = [ + [ id:'test' ], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) GATK4_BEDTOINTERVALLIST ( input, dict ) - INTERVALLISTTOOLS ( GATK4_BEDTOINTERVALLIST.out.interval_list ) + GATK4_INTERVALLISTTOOLS ( GATK4_BEDTOINTERVALLIST.out.interval_list ) } diff --git a/tests/modules/gatk4/intervallisttools/nextflow.config b/tests/modules/gatk4/intervallisttools/nextflow.config new file mode 100644 index 00000000..b751ad9b --- /dev/null +++ b/tests/modules/gatk4/intervallisttools/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_INTERVALLISTTOOLS { + ext.args = '--SCATTER_COUNT 6 --SUBDIVISION_MODE BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW --UNIQUE true --SORT true' + } + +} diff --git a/tests/modules/gatk4/intervallisttools/test.yml b/tests/modules/gatk4/intervallisttools/test.yml index da3e6172..c9cb23b8 100644 --- a/tests/modules/gatk4/intervallisttools/test.yml +++ b/tests/modules/gatk4/intervallisttools/test.yml @@ -1,16 +1,16 @@ - name: gatk4 intervallisttools test_gatk4_intervallisttools - command: nextflow run tests/modules/gatk4/intervallisttools -entry test_gatk4_intervallisttools -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/intervallisttools -entry test_gatk4_intervallisttools -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/intervallisttools/nextflow.config tags: - gatk4 - gatk4/intervallisttools files: - path: output/gatk4/test.interval_list md5sum: e51101c9357fb2d59fd30e370eefa39c - - path: output/intervallisttools/test_split/temp_0001_of_6/1scattered.interval_list + - path: output/gatk4/test_split/temp_0001_of_6/1scattered.interval_list md5sum: b8ba8a387200df76a0d1c577626dc265 - - path: output/intervallisttools/test_split/temp_0002_of_6/2scattered.interval_list + - path: output/gatk4/test_split/temp_0002_of_6/2scattered.interval_list md5sum: 0728d164666d9264ef442a493e008dee - - path: output/intervallisttools/test_split/temp_0003_of_6/3scattered.interval_list + - path: output/gatk4/test_split/temp_0003_of_6/3scattered.interval_list md5sum: 55da0f3c69504148f4e7002a0e072cfe - - path: output/intervallisttools/test_split/temp_0004_of_6/4scattered.interval_list + - path: output/gatk4/test_split/temp_0004_of_6/4scattered.interval_list md5sum: d29ca4447f32547f2936567fa902796a diff --git a/tests/modules/gatk4/learnreadorientationmodel/main.nf b/tests/modules/gatk4/learnreadorientationmodel/main.nf index 1a71873e..dc6e8b89 100644 --- a/tests/modules/gatk4/learnreadorientationmodel/main.nf +++ b/tests/modules/gatk4/learnreadorientationmodel/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_LEARNREADORIENTATIONMODEL } from '../../../../modules/gatk4/learnreadorientationmodel/main.nf' addParams( options: [suffix:'.artifact-prior'] ) +include { GATK4_LEARNREADORIENTATIONMODEL } from '../../../../modules/gatk4/learnreadorientationmodel/main.nf' workflow test_gatk4_learnreadorientationmodel { diff --git a/tests/modules/gatk4/learnreadorientationmodel/nextflow.config b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config new file mode 100644 index 00000000..3a74623a --- /dev/null +++ b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_LEARNREADORIENTATIONMODEL { + ext.suffix = '.artifact-prior' + } + +} diff --git a/tests/modules/gatk4/learnreadorientationmodel/test.yml b/tests/modules/gatk4/learnreadorientationmodel/test.yml index 6e999fa6..b88df15f 100644 --- a/tests/modules/gatk4/learnreadorientationmodel/test.yml +++ b/tests/modules/gatk4/learnreadorientationmodel/test.yml @@ -1,5 +1,5 @@ - name: gatk4 learnreadorientationmodel test_gatk4_learnreadorientationmodel - command: nextflow run tests/modules/gatk4/learnreadorientationmodel -entry test_gatk4_learnreadorientationmodel -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/learnreadorientationmodel -entry test_gatk4_learnreadorientationmodel -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/learnreadorientationmodel/nextflow.config tags: - gatk4 - gatk4/learnreadorientationmodel diff --git a/tests/modules/gatk4/markduplicates/main.nf b/tests/modules/gatk4/markduplicates/main.nf index f80c1bd5..90fd866f 100644 --- a/tests/modules/gatk4/markduplicates/main.nf +++ b/tests/modules/gatk4/markduplicates/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_MARKDUPLICATES } from '../../../../modules/gatk4/markduplicates/main.nf' addParams( options: [:] ) +include { GATK4_MARKDUPLICATES } from '../../../../modules/gatk4/markduplicates/main.nf' workflow test_gatk4_markduplicates { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/gatk4/markduplicates/nextflow.config b/tests/modules/gatk4/markduplicates/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/markduplicates/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index f4345bc4..0d4c7393 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -1,5 +1,5 @@ - name: gatk4 markduplicates test_gatk4_markduplicates - command: nextflow run tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/markduplicates/nextflow.config tags: - gatk4/markduplicates - gatk4 @@ -11,7 +11,7 @@ - path: output/gatk4/test.metrics - name: gatk4 markduplicates test_gatk4_markduplicates_multiple_bams - command: nextflow run tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates_multiple_bams -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates_multiple_bams -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/markduplicates/nextflow.config tags: - gatk4/markduplicates - gatk4 diff --git a/tests/modules/gatk4/mergebamalignment/main.nf b/tests/modules/gatk4/mergebamalignment/main.nf index 745113ae..59bd833b 100644 --- a/tests/modules/gatk4/mergebamalignment/main.nf +++ b/tests/modules/gatk4/mergebamalignment/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_MERGEBAMALIGNMENT } from '../../../../modules/gatk4/mergebamalignment/main.nf' addParams( options: [:] ) +include { GATK4_MERGEBAMALIGNMENT } from '../../../../modules/gatk4/mergebamalignment/main.nf' workflow test_gatk4_mergebamalignment { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/mergebamalignment/nextflow.config b/tests/modules/gatk4/mergebamalignment/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/mergebamalignment/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/mergebamalignment/test.yml b/tests/modules/gatk4/mergebamalignment/test.yml index 4fb98e3d..5e1ab8d5 100644 --- a/tests/modules/gatk4/mergebamalignment/test.yml +++ b/tests/modules/gatk4/mergebamalignment/test.yml @@ -1,5 +1,5 @@ - name: gatk4 mergebamalignment test_gatk4_mergebamalignment - command: nextflow run tests/modules/gatk4/mergebamalignment -entry test_gatk4_mergebamalignment -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mergebamalignment -entry test_gatk4_mergebamalignment -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergebamalignment/nextflow.config tags: - gatk4 - gatk4/mergebamalignment diff --git a/tests/modules/gatk4/mergevcfs/main.nf b/tests/modules/gatk4/mergevcfs/main.nf index 5da894ab..fa09d758 100644 --- a/tests/modules/gatk4/mergevcfs/main.nf +++ b/tests/modules/gatk4/mergevcfs/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_MERGEVCFS } from '../../../../modules/gatk4/mergevcfs/main.nf' addParams( options: [:] ) +include { GATK4_MERGEVCFS } from '../../../../modules/gatk4/mergevcfs/main.nf' workflow test_gatk4_mergevcfs { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/mergevcfs/nextflow.config b/tests/modules/gatk4/mergevcfs/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/mergevcfs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/mergevcfs/test.yml b/tests/modules/gatk4/mergevcfs/test.yml index 884738b0..3ff2bf93 100644 --- a/tests/modules/gatk4/mergevcfs/test.yml +++ b/tests/modules/gatk4/mergevcfs/test.yml @@ -1,5 +1,5 @@ - name: gatk4 mergevcfs test_gatk4_mergevcfs - command: nextflow run tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergevcfs/nextflow.config tags: - gatk4/mergevcfs - gatk4 @@ -8,7 +8,7 @@ md5sum: 5b289bda88d3a3504f2e19ee8cff177c - name: gatk4 mergevcfs test_gatk4_mergevcfs_refdict - command: nextflow run tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_refdict -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_refdict -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergevcfs/nextflow.config tags: - gatk4/mergevcfs - gatk4 diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index e163cf9c..f477a0d7 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' addParams( options: [:] ) +include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' // used to run with the mitochondria mode setting as this increases sensitivity, allowing for some tumor_normal variants to be detected while the old test data is still in use, will be removed when new test data for sarek is available. -include { GATK4_MUTECT2 as GATK4_TEMPFIX_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' addParams( options: [args: '--mitochondria-mode'] ) +include { GATK4_MUTECT2 as GATK4_TEMPFIX_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' workflow test_gatk4_mutect2_tumor_normal_pair { input = [ [ id:'test'], // meta map diff --git a/tests/modules/gatk4/mutect2/nextflow.config b/tests/modules/gatk4/mutect2/nextflow.config new file mode 100644 index 00000000..0966fc15 --- /dev/null +++ b/tests/modules/gatk4/mutect2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_TEMPFIX_MUTECT2 { + ext.args = '--mitochondria-mode' + } + +} diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml index 031ed072..c6801e04 100644 --- a/tests/modules/gatk4/mutect2/test.yml +++ b/tests/modules/gatk4/mutect2/test.yml @@ -1,5 +1,5 @@ - name: gatk4 mutect2 test_gatk4_mutect2_tumor_normal_pair - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -11,7 +11,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_tumor_single - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -22,7 +22,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_cram_input - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_cram_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_cram_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -33,7 +33,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_generate_pon - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_generate_pon -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_generate_pon -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -44,7 +44,7 @@ - path: output/gatk4/test.vcf.gz.tbi - name: gatk4 mutect2 test_gatk4_mutect2_mitochondria - command: nextflow run tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_mitochondria -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_mitochondria -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 diff --git a/tests/modules/gatk4/revertsam/main.nf b/tests/modules/gatk4/revertsam/main.nf index df127c9b..ab5dddee 100644 --- a/tests/modules/gatk4/revertsam/main.nf +++ b/tests/modules/gatk4/revertsam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_REVERTSAM } from '../../../../modules/gatk4/revertsam/main.nf' addParams( options: [:] ) +include { GATK4_REVERTSAM } from '../../../../modules/gatk4/revertsam/main.nf' workflow test_gatk4_revertsam { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/revertsam/nextflow.config b/tests/modules/gatk4/revertsam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/revertsam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/revertsam/test.yml b/tests/modules/gatk4/revertsam/test.yml index c65d3666..4199b118 100644 --- a/tests/modules/gatk4/revertsam/test.yml +++ b/tests/modules/gatk4/revertsam/test.yml @@ -1,5 +1,5 @@ - name: gatk4 revertsam test_gatk4_revertsam - command: nextflow run tests/modules/gatk4/revertsam -entry test_gatk4_revertsam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/revertsam -entry test_gatk4_revertsam -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/revertsam/nextflow.config tags: - gatk4 - gatk4/revertsam diff --git a/tests/modules/gatk4/samtofastq/main.nf b/tests/modules/gatk4/samtofastq/main.nf index db63a2a5..26a8ce2d 100644 --- a/tests/modules/gatk4/samtofastq/main.nf +++ b/tests/modules/gatk4/samtofastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_SAMTOFASTQ } from '../../../../modules/gatk4/samtofastq/main.nf' addParams( options: [:] ) +include { GATK4_SAMTOFASTQ } from '../../../../modules/gatk4/samtofastq/main.nf' workflow test_gatk4_samtofastq_single_end { input = [ [ id:'test', single_end: true ], // meta map diff --git a/tests/modules/gatk4/samtofastq/nextflow.config b/tests/modules/gatk4/samtofastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/samtofastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/samtofastq/test.yml b/tests/modules/gatk4/samtofastq/test.yml index 3d877d2f..66d3ee4c 100644 --- a/tests/modules/gatk4/samtofastq/test.yml +++ b/tests/modules/gatk4/samtofastq/test.yml @@ -1,5 +1,5 @@ - name: gatk4 samtofastq test_gatk4_samtofastq_single_end - command: nextflow run tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/samtofastq/nextflow.config tags: - gatk4 - gatk4/samtofastq @@ -8,7 +8,7 @@ md5sum: 50ace41d4c24467f24f8b929540a7797 - name: gatk4 samtofastq test_gatk4_samtofastq_paired_end - command: nextflow run tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/samtofastq/nextflow.config tags: - gatk4 - gatk4/samtofastq diff --git a/tests/modules/gatk4/splitncigarreads/main.nf b/tests/modules/gatk4/splitncigarreads/main.nf index 0934593f..7e5b7c9a 100644 --- a/tests/modules/gatk4/splitncigarreads/main.nf +++ b/tests/modules/gatk4/splitncigarreads/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_SPLITNCIGARREADS } from '../../../../modules/gatk4/splitncigarreads/main.nf' addParams( options: [:] ) +include { GATK4_SPLITNCIGARREADS } from '../../../../modules/gatk4/splitncigarreads/main.nf' workflow test_gatk4_splitncigarreads { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/splitncigarreads/nextflow.config b/tests/modules/gatk4/splitncigarreads/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/splitncigarreads/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/splitncigarreads/test.yml b/tests/modules/gatk4/splitncigarreads/test.yml index 146cd329..1ba8c5cd 100644 --- a/tests/modules/gatk4/splitncigarreads/test.yml +++ b/tests/modules/gatk4/splitncigarreads/test.yml @@ -1,5 +1,5 @@ - name: gatk4 splitncigarreads test_gatk4_splitncigarreads - command: nextflow run tests/modules/gatk4/splitncigarreads -entry test_gatk4_splitncigarreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/splitncigarreads -entry test_gatk4_splitncigarreads -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/splitncigarreads/nextflow.config tags: - gatk4 - gatk4/splitncigarreads diff --git a/tests/modules/gatk4/variantfiltration/main.nf b/tests/modules/gatk4/variantfiltration/main.nf index 67c9daec..221c469a 100644 --- a/tests/modules/gatk4/variantfiltration/main.nf +++ b/tests/modules/gatk4/variantfiltration/main.nf @@ -2,35 +2,38 @@ nextflow.enable.dsl = 2 -test_options = ['args': '--filter-name "test_filter" --filter-expression "MQ0 > 0"', 'suffix': '.filtered'] -include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' addParams( options: test_options ) +include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' // Basic parameters with uncompressed VCF input workflow test_gatk4_variantfiltration_vcf_input { - input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fasta_index = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fasta_dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) + GATK4_VARIANTFILTRATION ( input, fasta, fasta_index, fasta_dict ) } // Basic parameters with compressed VCF input workflow test_gatk4_variantfiltration_gz_input { - input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fasta_index = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fasta_dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_VARIANTFILTRATION ( input, fasta, fastaIndex, fastaDict ) + GATK4_VARIANTFILTRATION ( input, fasta, fasta_index, fasta_dict ) } diff --git a/tests/modules/gatk4/variantfiltration/nextflow.config b/tests/modules/gatk4/variantfiltration/nextflow.config new file mode 100644 index 00000000..ff2feb9c --- /dev/null +++ b/tests/modules/gatk4/variantfiltration/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_VARIANTFILTRATION { + ext.args = "--filter-name \'test_filter\' --filter-expression \'MQ0 > 0\'" + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/gatk4/variantfiltration/test.yml b/tests/modules/gatk4/variantfiltration/test.yml index e3177cfc..b5da0e5c 100644 --- a/tests/modules/gatk4/variantfiltration/test.yml +++ b/tests/modules/gatk4/variantfiltration/test.yml @@ -1,5 +1,5 @@ - name: gatk4 variantfiltration test_gatk4_variantfiltration_vcf_input - command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_vcf_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_vcf_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/variantfiltration/nextflow.config tags: - gatk4/variantfiltration - gatk4 @@ -9,7 +9,7 @@ - path: output/gatk4/test.filtered.vcf.gz.tbi - name: gatk4 variantfiltration test_gatk4_variantfiltration_gz_input - command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/variantfiltration/nextflow.config tags: - gatk4/variantfiltration - gatk4 diff --git a/tests/modules/genmap/index/main.nf b/tests/modules/genmap/index/main.nf index 358ebb35..06106640 100644 --- a/tests/modules/genmap/index/main.nf +++ b/tests/modules/genmap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [publish_dir:'genmap'] ) +include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' workflow test_genmap_index { diff --git a/tests/modules/genmap/index/nextflow.config b/tests/modules/genmap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/genmap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/genmap/index/test.yml b/tests/modules/genmap/index/test.yml index c5078014..ce2098ce 100644 --- a/tests/modules/genmap/index/test.yml +++ b/tests/modules/genmap/index/test.yml @@ -1,5 +1,5 @@ - name: genmap index test_genmap_index - command: nextflow run tests/modules/genmap/index -entry test_genmap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genmap/index -entry test_genmap_index -c ./tests/config/nextflow.config -c ./tests/modules/genmap/index/nextflow.config tags: - genmap - genmap/index diff --git a/tests/modules/genmap/mappability/main.nf b/tests/modules/genmap/mappability/main.nf index 636ec0e4..eb6a34fa 100644 --- a/tests/modules/genmap/mappability/main.nf +++ b/tests/modules/genmap/mappability/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [:] ) -include { GENMAP_MAPPABILITY } from '../../../../modules/genmap/mappability/main.nf' addParams( options: [args : '-K 50 -E 2 -w -t -bg'] ) +include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' +include { GENMAP_MAPPABILITY } from '../../../../modules/genmap/mappability/main.nf' workflow test_genmap_map { diff --git a/tests/modules/genmap/mappability/nextflow.config b/tests/modules/genmap/mappability/nextflow.config new file mode 100644 index 00000000..6936b9ea --- /dev/null +++ b/tests/modules/genmap/mappability/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GENMAP_MAPPABILITY { + ext.args = '-K 50 -E 2 -w -t -bg' + } + +} diff --git a/tests/modules/genmap/mappability/test.yml b/tests/modules/genmap/mappability/test.yml index 29a12de1..94c1d501 100644 --- a/tests/modules/genmap/mappability/test.yml +++ b/tests/modules/genmap/mappability/test.yml @@ -1,5 +1,5 @@ - name: genmap mappability test_genmap_map - command: nextflow run tests/modules/genmap/mappability -entry test_genmap_map -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genmap/mappability -entry test_genmap_map -c ./tests/config/nextflow.config -c ./tests/modules/genmap/mappability/nextflow.config tags: - genmap - genmap/mappability diff --git a/tests/modules/genrich/main.nf b/tests/modules/genrich/main.nf index aa1a2d49..34db589e 100644 --- a/tests/modules/genrich/main.nf +++ b/tests/modules/genrich/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { GENRICH } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.1"] ) -include { GENRICH as GENRICH_CTRL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.9"] ) -include { GENRICH as GENRICH_ALL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-r -p 0.1"] ) -include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-j -p 0.1"] ) +include { GENRICH } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_CTRL } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_ALL } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' workflow test_genrich { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/genrich/nextflow.config b/tests/modules/genrich/nextflow.config new file mode 100644 index 00000000..8f79d7be --- /dev/null +++ b/tests/modules/genrich/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GENRICH { + ext.args = '-p 0.1' + } + + withName: GENRICH_CTRL { + ext.args = '-p 0.9' + } + + withName: GENRICH_ALL { + ext.args = '-r -p 0.1' + } + + withName: GENRICH_ATACSEQ { + ext.args = '-j -p 0.1' + } + +} diff --git a/tests/modules/genrich/test.yml b/tests/modules/genrich/test.yml index 63bf2927..972335c4 100644 --- a/tests/modules/genrich/test.yml +++ b/tests/modules/genrich/test.yml @@ -1,5 +1,5 @@ - name: genrich test_genrich - command: nextflow run tests/modules/genrich -entry test_genrich -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -7,7 +7,7 @@ md5sum: 6afabdd3f691c7c84c66ff8a23984681 - name: genrich test_genrich_ctrl - command: nextflow run tests/modules/genrich -entry test_genrich_ctrl -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_ctrl -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -15,7 +15,7 @@ md5sum: 2fcc392360b317f5ebee88cdbc149e05 - name: genrich test_genrich_all_outputs - command: nextflow run tests/modules/genrich -entry test_genrich_all_outputs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_all_outputs -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -31,7 +31,7 @@ md5sum: b14feef34b6d2379a173a734ca963cde - name: genrich test_genrich_blacklist - command: nextflow run tests/modules/genrich -entry test_genrich_blacklist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_blacklist -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -39,7 +39,7 @@ md5sum: 6afabdd3f691c7c84c66ff8a23984681 - name: genrich test_genrich_atacseq - command: nextflow run tests/modules/genrich -entry test_genrich_atacseq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_atacseq -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: diff --git a/tests/modules/gffread/main.nf b/tests/modules/gffread/main.nf index 87e95275..6ab7922b 100644 --- a/tests/modules/gffread/main.nf +++ b/tests/modules/gffread/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GFFREAD } from '../../../modules/gffread/main.nf' addParams( options: [suffix: '.out'] ) +include { GFFREAD } from '../../../modules/gffread/main.nf' workflow test_gffread { input = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) diff --git a/tests/modules/gffread/nextflow.config b/tests/modules/gffread/nextflow.config new file mode 100644 index 00000000..00c052f5 --- /dev/null +++ b/tests/modules/gffread/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GFFREAD { + ext.suffix = '.out' + } + +} diff --git a/tests/modules/gffread/test.yml b/tests/modules/gffread/test.yml index 48096f1e..c5a16132 100644 --- a/tests/modules/gffread/test.yml +++ b/tests/modules/gffread/test.yml @@ -1,5 +1,5 @@ - name: gffread - command: nextflow run ./tests/modules/gffread/ -entry test_gffread -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gffread/ -entry test_gffread -c ./tests/config/nextflow.config -c ./tests/modules/gffread/nextflow.config tags: - gffread files: diff --git a/tests/modules/glnexus/main.nf b/tests/modules/glnexus/main.nf index 2a79b2fa..aeb7c7e2 100644 --- a/tests/modules/glnexus/main.nf +++ b/tests/modules/glnexus/main.nf @@ -2,12 +2,16 @@ nextflow.enable.dsl = 2 -include { GLNEXUS } from '../../../modules/glnexus/main.nf' addParams( options: [:] ) +include { GLNEXUS } from '../../../modules/glnexus/main.nf' workflow test_glnexus { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'], checkIfExists: true) + ] + ] + GLNEXUS ( input ) } diff --git a/tests/modules/glnexus/nextflow.config b/tests/modules/glnexus/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/glnexus/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/glnexus/test.yml b/tests/modules/glnexus/test.yml index c7b255ee..bfca4529 100644 --- a/tests/modules/glnexus/test.yml +++ b/tests/modules/glnexus/test.yml @@ -1,7 +1,7 @@ - name: glnexus test_glnexus - command: nextflow run tests/modules/glnexus -entry test_glnexus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/glnexus -entry test_glnexus -c ./tests/config/nextflow.config -c ./tests/modules/glnexus/nextflow.config tags: - glnexus files: - path: output/glnexus/test.bcf - md5sum: 33ac8c9f3ff54e6a23177ba94a449173 + md5sum: 62b2cea9c1b92ac63645cb031eea46fc diff --git a/tests/modules/graphmap2/align/main.nf b/tests/modules/graphmap2/align/main.nf index 0cd885ab..96b95166 100644 --- a/tests/modules/graphmap2/align/main.nf +++ b/tests/modules/graphmap2/align/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' addParams( options: [:] ) -include { GRAPHMAP2_ALIGN } from '../../../../modules/graphmap2/align/main.nf' addParams( options: [:] ) +include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' +include { GRAPHMAP2_ALIGN } from '../../../../modules/graphmap2/align/main.nf' workflow test_graphmap2_align { diff --git a/tests/modules/graphmap2/align/nextflow.config b/tests/modules/graphmap2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/graphmap2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/graphmap2/align/test.yml b/tests/modules/graphmap2/align/test.yml index 7e90b8d4..90e52dd1 100644 --- a/tests/modules/graphmap2/align/test.yml +++ b/tests/modules/graphmap2/align/test.yml @@ -1,5 +1,5 @@ - name: graphmap2 align - command: nextflow run ./tests/modules/graphmap2/align -entry test_graphmap2_align -c tests/config/nextflow.config + command: nextflow run ./tests/modules/graphmap2/align -entry test_graphmap2_align -c ./tests/config/nextflow.config -c ./tests/modules/graphmap2/align/nextflow.config tags: - graphmap2 - graphmap2/align diff --git a/tests/modules/graphmap2/index/main.nf b/tests/modules/graphmap2/index/main.nf index 66347f06..3c449c6b 100644 --- a/tests/modules/graphmap2/index/main.nf +++ b/tests/modules/graphmap2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' addParams( options: [:] ) +include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' workflow test_graphmap2_index { diff --git a/tests/modules/graphmap2/index/nextflow.config b/tests/modules/graphmap2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/graphmap2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/graphmap2/index/test.yml b/tests/modules/graphmap2/index/test.yml index 15042e97..0bff487e 100644 --- a/tests/modules/graphmap2/index/test.yml +++ b/tests/modules/graphmap2/index/test.yml @@ -1,5 +1,5 @@ - name: graphmap2 index - command: nextflow run ./tests/modules/graphmap2/index -entry test_graphmap2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/graphmap2/index -entry test_graphmap2_index -c ./tests/config/nextflow.config -c ./tests/modules/graphmap2/index/nextflow.config tags: - graphmap2 - graphmap2/index diff --git a/tests/modules/gstama/collapse/main.nf b/tests/modules/gstama/collapse/main.nf index 70b3c741..3eb97767 100644 --- a/tests/modules/gstama/collapse/main.nf +++ b/tests/modules/gstama/collapse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GSTAMA_COLLAPSE } from '../../../../modules/gstama/collapse/main.nf' addParams( options: [ args:"-x capped -b BAM", suffix:'_tc' ] ) +include { GSTAMA_COLLAPSE } from '../../../../modules/gstama/collapse/main.nf' workflow test_gstama_collapse { diff --git a/tests/modules/gstama/collapse/nextflow.config b/tests/modules/gstama/collapse/nextflow.config new file mode 100644 index 00000000..0455c8b2 --- /dev/null +++ b/tests/modules/gstama/collapse/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GSTAMA_COLLAPSE { + ext.args = '-x capped -b BAM' + ext.suffix = '_tc' + } + +} diff --git a/tests/modules/gstama/collapse/test.yml b/tests/modules/gstama/collapse/test.yml index 3815a156..50d3775e 100644 --- a/tests/modules/gstama/collapse/test.yml +++ b/tests/modules/gstama/collapse/test.yml @@ -1,5 +1,5 @@ - name: gstama collapse test_gstama_collapse - command: nextflow run tests/modules/gstama/collapse -entry test_gstama_collapse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gstama/collapse -entry test_gstama_collapse -c ./tests/config/nextflow.config -c ./tests/modules/gstama/collapse/nextflow.config tags: - gstama - gstama/collapse diff --git a/tests/modules/gstama/merge/main.nf b/tests/modules/gstama/merge/main.nf index f9a8e05f..4a9102a2 100644 --- a/tests/modules/gstama/merge/main.nf +++ b/tests/modules/gstama/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GSTAMA_MERGE } from '../../../../modules/gstama/merge/main' addParams( options: [suffix:'_merged'] ) +include { GSTAMA_MERGE } from '../../../../modules/gstama/merge/main' workflow test_gstama_merge { diff --git a/tests/modules/gstama/merge/nextflow.config b/tests/modules/gstama/merge/nextflow.config new file mode 100644 index 00000000..a9c63fcf --- /dev/null +++ b/tests/modules/gstama/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GSTAMA_MERGE { + ext.suffix = '_merged' + } + +} diff --git a/tests/modules/gstama/merge/test.yml b/tests/modules/gstama/merge/test.yml index b98e35b6..1db35d15 100644 --- a/tests/modules/gstama/merge/test.yml +++ b/tests/modules/gstama/merge/test.yml @@ -1,5 +1,5 @@ - name: gstama merge test_gstama_merge - command: nextflow run tests/modules/gstama/merge -entry test_gstama_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gstama/merge -entry test_gstama_merge -c ./tests/config/nextflow.config -c ./tests/modules/gstama/merge/nextflow.config tags: - gstama - gstama/merge diff --git a/tests/modules/gtdbtk/classifywf/main.nf b/tests/modules/gtdbtk/classifywf/main.nf index f52b0ccc..1517d7cc 100644 --- a/tests/modules/gtdbtk/classifywf/main.nf +++ b/tests/modules/gtdbtk/classifywf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GTDBTK_CLASSIFYWF } from '../../../../modules/gtdbtk/classifywf/main.nf' addParams( options: [:] ) +include { GTDBTK_CLASSIFYWF } from '../../../../modules/gtdbtk/classifywf/main.nf' process STUB_GTDBTK_DATABASE { output: diff --git a/tests/modules/gtdbtk/classifywf/nextflow.config b/tests/modules/gtdbtk/classifywf/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gtdbtk/classifywf/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gtdbtk/classifywf/test.yml b/tests/modules/gtdbtk/classifywf/test.yml index 6d0f055e..e24f1e17 100644 --- a/tests/modules/gtdbtk/classifywf/test.yml +++ b/tests/modules/gtdbtk/classifywf/test.yml @@ -1,5 +1,5 @@ - name: gtdbtk classifywf - command: nextflow run ./tests/modules/gtdbtk/classifywf -entry test_gtdbtk_classifywf -c tests/config/nextflow.config -stub-run + command: nextflow run ./tests/modules/gtdbtk/classifywf -entry test_gtdbtk_classifywf -c ./tests/config/nextflow.config -stub-run -c ./tests/modules/gtdbtk/classifywf/nextflow.config tags: - gtdbtk - gtdbtk/classifywf diff --git a/tests/modules/gubbins/main.nf b/tests/modules/gubbins/main.nf index 87e164d0..342150b3 100644 --- a/tests/modules/gubbins/main.nf +++ b/tests/modules/gubbins/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUBBINS } from '../../../modules/gubbins/main.nf' addParams( options: [:] ) +include { GUBBINS } from '../../../modules/gubbins/main.nf' workflow test_gubbins { input = file(params.test_data['sarscov2']['genome']['all_sites_fas'], checkIfExists: true) diff --git a/tests/modules/gubbins/nextflow.config b/tests/modules/gubbins/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gubbins/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gubbins/test.yml b/tests/modules/gubbins/test.yml index 7bc0216b..6c85260d 100644 --- a/tests/modules/gubbins/test.yml +++ b/tests/modules/gubbins/test.yml @@ -1,5 +1,5 @@ - name: gubbins - command: nextflow run ./tests/modules/gubbins -entry test_gubbins -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gubbins -entry test_gubbins -c ./tests/config/nextflow.config -c ./tests/modules/gubbins/nextflow.config tags: - gubbins files: diff --git a/tests/modules/gunc/downloaddb/main.nf b/tests/modules/gunc/downloaddb/main.nf index c0321279..3e3126f5 100644 --- a/tests/modules/gunc/downloaddb/main.nf +++ b/tests/modules/gunc/downloaddb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' workflow test_gunc_downloaddb { diff --git a/tests/modules/gunc/downloaddb/nextflow.config b/tests/modules/gunc/downloaddb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunc/downloaddb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunc/downloaddb/test.yml b/tests/modules/gunc/downloaddb/test.yml index d1aafae7..4e1c23f8 100644 --- a/tests/modules/gunc/downloaddb/test.yml +++ b/tests/modules/gunc/downloaddb/test.yml @@ -1,5 +1,5 @@ - name: gunc downloaddb - command: nextflow run ./tests/modules/gunc/downloaddb -entry test_gunc_downloaddb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunc/downloaddb -entry test_gunc_downloaddb -c ./tests/config/nextflow.config -c ./tests/modules/gunc/downloaddb/nextflow.config tags: - gunc - gunc/downloaddb diff --git a/tests/modules/gunc/run/main.nf b/tests/modules/gunc/run/main.nf index a1a191dc..28ecd35f 100644 --- a/tests/modules/gunc/run/main.nf +++ b/tests/modules/gunc/run/main.nf @@ -2,16 +2,16 @@ nextflow.enable.dsl = 2 -include { GUNC_RUN } from '../../../../modules/gunc/run/main.nf' addParams( options: [:] ) -include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) - +include { GUNC_RUN } from '../../../../modules/gunc/run/main.nf' +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' workflow test_gunc_run { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) ] - - GUNC_DOWNLOADDB('progenomes') + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) + ] + GUNC_DOWNLOADDB ( 'progenomes' ) GUNC_RUN ( input, GUNC_DOWNLOADDB.out.db ) } diff --git a/tests/modules/gunc/run/nextflow.config b/tests/modules/gunc/run/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunc/run/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunc/run/test.yml b/tests/modules/gunc/run/test.yml index d527f37e..5bcef868 100644 --- a/tests/modules/gunc/run/test.yml +++ b/tests/modules/gunc/run/test.yml @@ -1,5 +1,5 @@ - name: gunc run - command: nextflow run ./tests/modules/gunc/run -entry test_gunc_run -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunc/run -entry test_gunc_run -c ./tests/config/nextflow.config -c ./tests/modules/gunc/run/nextflow.config tags: - gunc - gunc/run diff --git a/tests/modules/gunzip/main.nf b/tests/modules/gunzip/main.nf index 0c23a8cd..3d41a4a2 100644 --- a/tests/modules/gunzip/main.nf +++ b/tests/modules/gunzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUNZIP } from '../../../modules/gunzip/main.nf' addParams( options: [:] ) +include { GUNZIP } from '../../../modules/gunzip/main.nf' workflow test_gunzip { input = [ [], diff --git a/tests/modules/gunzip/nextflow.config b/tests/modules/gunzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunzip/test.yml b/tests/modules/gunzip/test.yml index 70012b21..70e95d6b 100644 --- a/tests/modules/gunzip/test.yml +++ b/tests/modules/gunzip/test.yml @@ -1,5 +1,5 @@ - name: gunzip - command: nextflow run ./tests/modules/gunzip -entry test_gunzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunzip -entry test_gunzip -c ./tests/config/nextflow.config -c ./tests/modules/gunzip/nextflow.config tags: - gunzip files: diff --git a/tests/modules/hicap/main.nf b/tests/modules/hicap/main.nf index 3ac9c20b..82c515de 100644 --- a/tests/modules/hicap/main.nf +++ b/tests/modules/hicap/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { HICAP } from '../../../modules/hicap/main.nf' addParams( options: [:] ) +include { HICAP } from '../../../modules/hicap/main.nf' workflow test_hicap { - - input = [ [ id:'test', single_end:false ], // meta map - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species/haemophilus_influenzae/genome/GCF_900478275.fna.gz", checkIfExists: true) ] - + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_fna_gz'], checkIfExists: true) + ] database_dir = [] model_fp = [] diff --git a/tests/modules/hicap/nextflow.config b/tests/modules/hicap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hicap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hicap/test.yml b/tests/modules/hicap/test.yml index 8c8420fd..0cce28c7 100644 --- a/tests/modules/hicap/test.yml +++ b/tests/modules/hicap/test.yml @@ -1,10 +1,10 @@ - name: hicap test_hicap - command: nextflow run tests/modules/hicap -entry test_hicap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hicap -entry test_hicap -c ./tests/config/nextflow.config -c ./tests/modules/hicap/nextflow.config tags: - hicap files: - - path: output/hicap/GCF_900478275.gbk + - path: output/hicap/genome.gbk md5sum: 562d026956903354ac80721f501335d4 - - path: output/hicap/GCF_900478275.svg + - path: output/hicap/genome.svg md5sum: 4fb94871dd0fdd8b4496049668176631 - - path: output/hicap/GCF_900478275.tsv + - path: output/hicap/genome.tsv diff --git a/tests/modules/hifiasm/main.nf b/tests/modules/hifiasm/main.nf index 30614389..f0e2a0f4 100644 --- a/tests/modules/hifiasm/main.nf +++ b/tests/modules/hifiasm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HIFIASM } from '../../../modules/hifiasm/main.nf' addParams( options: [args:'-f0'] ) +include { HIFIASM } from '../../../modules/hifiasm/main.nf' /* * Test with long reads only diff --git a/tests/modules/hifiasm/nextflow.config b/tests/modules/hifiasm/nextflow.config new file mode 100644 index 00000000..0994c901 --- /dev/null +++ b/tests/modules/hifiasm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HIFIASM { + ext.args = '-f0' + } + +} diff --git a/tests/modules/hifiasm/test.yml b/tests/modules/hifiasm/test.yml index 47d9e38f..f7e3e6ae 100644 --- a/tests/modules/hifiasm/test.yml +++ b/tests/modules/hifiasm/test.yml @@ -1,5 +1,5 @@ - name: hifiasm test_hifiasm_hifi_only - command: nextflow run tests/modules/hifiasm -entry test_hifiasm_hifi_only -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hifiasm -entry test_hifiasm_hifi_only -c ./tests/config/nextflow.config -c ./tests/modules/hifiasm/nextflow.config tags: - hifiasm files: @@ -16,7 +16,7 @@ - path: output/hifiasm/test.asm.ovlp.source.bin - name: hifiasm test_hifiasm_with_parental_reads - command: nextflow run tests/modules/hifiasm -entry test_hifiasm_with_parental_reads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hifiasm -entry test_hifiasm_with_parental_reads -c ./tests/config/nextflow.config -c ./tests/modules/hifiasm/nextflow.config tags: - hifiasm files: diff --git a/tests/modules/hisat2/align/main.nf b/tests/modules/hisat2/align/main.nf index 7bbe3a4b..17b47c93 100644 --- a/tests/modules/hisat2/align/main.nf +++ b/tests/modules/hisat2/align/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) -include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' addParams( options: [:] ) -include { HISAT2_ALIGN } from '../../../../modules/hisat2/align/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' +include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' +include { HISAT2_ALIGN } from '../../../../modules/hisat2/align/main.nf' workflow test_hisat2_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -19,10 +22,13 @@ workflow test_hisat2_align_single_end { } workflow test_hisat2_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/hisat2/align/nextflow.config b/tests/modules/hisat2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/align/test.yml b/tests/modules/hisat2/align/test.yml index 1c6c8ac2..54e263bc 100644 --- a/tests/modules/hisat2/align/test.yml +++ b/tests/modules/hisat2/align/test.yml @@ -1,5 +1,5 @@ - name: hisat2 align test_hisat2_align_single_end - command: nextflow run tests/modules/hisat2/align -entry test_hisat2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/align -entry test_hisat2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/align/nextflow.config tags: - hisat2 - hisat2/align @@ -9,25 +9,25 @@ - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/hisat2/test.bam - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - name: hisat2 align test_hisat2_align_paired_end - command: nextflow run tests/modules/hisat2/align -entry test_hisat2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/align -entry test_hisat2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/align/nextflow.config tags: - hisat2 - hisat2/align @@ -37,19 +37,19 @@ - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/hisat2/test.bam - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 diff --git a/tests/modules/hisat2/build_test/main.nf b/tests/modules/hisat2/build_test/main.nf index f40f47cc..a0c14dc8 100644 --- a/tests/modules/hisat2/build_test/main.nf +++ b/tests/modules/hisat2/build_test/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) -include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' +include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' workflow test_hisat2_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/hisat2/build_test/nextflow.config b/tests/modules/hisat2/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/build_test/test.yml b/tests/modules/hisat2/build_test/test.yml index a8bb2390..da5a450c 100644 --- a/tests/modules/hisat2/build_test/test.yml +++ b/tests/modules/hisat2/build_test/test.yml @@ -1,24 +1,24 @@ - name: hisat2 build test_hisat2_build - command: nextflow run tests/modules/hisat2/build_test -entry test_hisat2_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/build_test -entry test_hisat2_build -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/build/nextflow.config tags: - hisat2 - hisat2/build files: - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 diff --git a/tests/modules/hisat2/extractsplicesites/main.nf b/tests/modules/hisat2/extractsplicesites/main.nf index 5c7e17b9..e947717e 100644 --- a/tests/modules/hisat2/extractsplicesites/main.nf +++ b/tests/modules/hisat2/extractsplicesites/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' workflow test_hisat2_extractsplicesites { gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/hisat2/extractsplicesites/nextflow.config b/tests/modules/hisat2/extractsplicesites/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/extractsplicesites/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/extractsplicesites/test.yml b/tests/modules/hisat2/extractsplicesites/test.yml index a3e29346..a528199c 100644 --- a/tests/modules/hisat2/extractsplicesites/test.yml +++ b/tests/modules/hisat2/extractsplicesites/test.yml @@ -1,5 +1,5 @@ - name: hisat2 extractsplicesites test_hisat2_extractsplicesites - command: nextflow run tests/modules/hisat2/extractsplicesites -entry test_hisat2_extractsplicesites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/extractsplicesites -entry test_hisat2_extractsplicesites -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/extractsplicesites/nextflow.config tags: - hisat2 - hisat2/extractsplicesites diff --git a/tests/modules/hmmcopy/gccounter/main.nf b/tests/modules/hmmcopy/gccounter/main.nf index 30846ca9..05728bf5 100644 --- a/tests/modules/hmmcopy/gccounter/main.nf +++ b/tests/modules/hmmcopy/gccounter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMCOPY_GCCOUNTER } from '../../../../modules/hmmcopy/gccounter/main.nf' addParams( options: [:] ) +include { HMMCOPY_GCCOUNTER } from '../../../../modules/hmmcopy/gccounter/main.nf' workflow test_hmmcopy_gccounter { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/hmmcopy/gccounter/nextflow.config b/tests/modules/hmmcopy/gccounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmcopy/gccounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmcopy/gccounter/test.yml b/tests/modules/hmmcopy/gccounter/test.yml index edcd6b92..1cd20273 100644 --- a/tests/modules/hmmcopy/gccounter/test.yml +++ b/tests/modules/hmmcopy/gccounter/test.yml @@ -1,5 +1,5 @@ - name: hmmcopy gccounter test_hmmcopy_gccounter - command: nextflow run tests/modules/hmmcopy/gccounter -entry test_hmmcopy_gccounter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmcopy/gccounter -entry test_hmmcopy_gccounter -c ./tests/config/nextflow.config -c ./tests/modules/hmmcopy/gccounter/nextflow.config tags: - hmmcopy - hmmcopy/gccounter diff --git a/tests/modules/hmmcopy/readcounter/main.nf b/tests/modules/hmmcopy/readcounter/main.nf index 9025f98e..21737aab 100644 --- a/tests/modules/hmmcopy/readcounter/main.nf +++ b/tests/modules/hmmcopy/readcounter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMCOPY_READCOUNTER } from '../../../../modules/hmmcopy/readcounter/main.nf' addParams( options: [:] ) +include { HMMCOPY_READCOUNTER } from '../../../../modules/hmmcopy/readcounter/main.nf' workflow test_hmmcopy_readcounter { diff --git a/tests/modules/hmmcopy/readcounter/nextflow.config b/tests/modules/hmmcopy/readcounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmcopy/readcounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmcopy/readcounter/test.yml b/tests/modules/hmmcopy/readcounter/test.yml index 6c00ee08..a7e84f35 100644 --- a/tests/modules/hmmcopy/readcounter/test.yml +++ b/tests/modules/hmmcopy/readcounter/test.yml @@ -1,5 +1,5 @@ - name: hmmcopy readcounter test_hmmcopy_readcounter - command: nextflow run tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c ./tests/config/nextflow.config -c ./tests/modules/hmmcopy/readcounter/nextflow.config tags: - hmmcopy - hmmcopy/readcounter diff --git a/tests/modules/hmmer/hmmalign/main.nf b/tests/modules/hmmer/hmmalign/main.nf index 55194dc6..3bf6d452 100644 --- a/tests/modules/hmmer/hmmalign/main.nf +++ b/tests/modules/hmmer/hmmalign/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMER_HMMALIGN } from '../../../../modules/hmmer/hmmalign/main.nf' addParams( options: [:] ) +include { HMMER_HMMALIGN } from '../../../../modules/hmmer/hmmalign/main.nf' workflow test_hmmer_hmmalign { diff --git a/tests/modules/hmmer/hmmalign/nextflow.config b/tests/modules/hmmer/hmmalign/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmer/hmmalign/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmer/hmmalign/test.yml b/tests/modules/hmmer/hmmalign/test.yml index 4afb34ca..2e5ccfaf 100644 --- a/tests/modules/hmmer/hmmalign/test.yml +++ b/tests/modules/hmmer/hmmalign/test.yml @@ -1,5 +1,5 @@ - name: hmmer hmmalign test_hmmer_hmmalign - command: nextflow run tests/modules/hmmer/hmmalign -entry test_hmmer_hmmalign -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmer/hmmalign -entry test_hmmer_hmmalign -c ./tests/config/nextflow.config -c ./tests/modules/hmmer/hmmalign/nextflow.config tags: - hmmer - hmmer/hmmalign diff --git a/tests/modules/homer/annotatepeaks/main.nf b/tests/modules/homer/annotatepeaks/main.nf index b146c857..ab8f6f8f 100644 --- a/tests/modules/homer/annotatepeaks/main.nf +++ b/tests/modules/homer/annotatepeaks/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HOMER_ANNOTATEPEAKS } from '../../../../modules/homer/annotatepeaks/main.nf' addParams( options: [:] ) +include { HOMER_ANNOTATEPEAKS } from '../../../../modules/homer/annotatepeaks/main.nf' workflow test_homer_annotatepeaks { input = [ [ id:'test'], diff --git a/tests/modules/homer/annotatepeaks/nextflow.config b/tests/modules/homer/annotatepeaks/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/homer/annotatepeaks/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/homer/annotatepeaks/test.yml b/tests/modules/homer/annotatepeaks/test.yml index fed0f82e..52fd99a3 100644 --- a/tests/modules/homer/annotatepeaks/test.yml +++ b/tests/modules/homer/annotatepeaks/test.yml @@ -1,5 +1,5 @@ - name: homer annotatepeaks test_homer_annotatepeaks - command: nextflow run tests/modules/homer/annotatepeaks -entry test_homer_annotatepeaks -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/annotatepeaks -entry test_homer_annotatepeaks -c ./tests/config/nextflow.config -c ./tests/modules/homer/annotatepeaks/nextflow.config tags: - homer - homer/annotatepeaks diff --git a/tests/modules/homer/findpeaks/main.nf b/tests/modules/homer/findpeaks/main.nf index 06d44bdf..0e7e8ed6 100644 --- a/tests/modules/homer/findpeaks/main.nf +++ b/tests/modules/homer/findpeaks/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) -include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' addParams( options: [args: '-style factor'] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' workflow test_homer_findpeaks { input = [[id:'test'], diff --git a/tests/modules/homer/findpeaks/nextflow.config b/tests/modules/homer/findpeaks/nextflow.config new file mode 100644 index 00000000..9a921a3c --- /dev/null +++ b/tests/modules/homer/findpeaks/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + + withName: HOMER_FINDPEAKS { + ext.args = '-style factor' + } + +} diff --git a/tests/modules/homer/findpeaks/test.yml b/tests/modules/homer/findpeaks/test.yml index b0b1a0df..75e94529 100644 --- a/tests/modules/homer/findpeaks/test.yml +++ b/tests/modules/homer/findpeaks/test.yml @@ -1,5 +1,5 @@ - name: homer findpeaks - command: nextflow run ./tests/modules/homer/findpeaks -entry test_homer_findpeaks -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/findpeaks -entry test_homer_findpeaks -c ./tests/config/nextflow.config -c ./tests/modules/homer/findpeaks/nextflow.config tags: - homer - homer/findpeaks diff --git a/tests/modules/homer/maketagdirectory/main.nf b/tests/modules/homer/maketagdirectory/main.nf index 897aac1f..766aff0d 100644 --- a/tests/modules/homer/maketagdirectory/main.nf +++ b/tests/modules/homer/maketagdirectory/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' workflow test_homer_maketagdirectory { input = [[id:'test'], diff --git a/tests/modules/homer/maketagdirectory/nextflow.config b/tests/modules/homer/maketagdirectory/nextflow.config new file mode 100644 index 00000000..81587d69 --- /dev/null +++ b/tests/modules/homer/maketagdirectory/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + +} diff --git a/tests/modules/homer/maketagdirectory/test.yml b/tests/modules/homer/maketagdirectory/test.yml index 80112c0b..746c6ef6 100644 --- a/tests/modules/homer/maketagdirectory/test.yml +++ b/tests/modules/homer/maketagdirectory/test.yml @@ -1,5 +1,5 @@ - name: homer maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory @@ -16,7 +16,7 @@ md5sum: e5aa2b9843ca9c04ace297280aed6af4 - name: homer meta maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory diff --git a/tests/modules/homer/makeucscfile/main.nf b/tests/modules/homer/makeucscfile/main.nf index 5ed75959..986c9c14 100644 --- a/tests/modules/homer/makeucscfile/main.nf +++ b/tests/modules/homer/makeucscfile/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) -include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main.nf' addParams( options: [:] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main.nf' workflow test_homer_makeucscfile { input = [[id:'test'], diff --git a/tests/modules/homer/makeucscfile/nextflow.config b/tests/modules/homer/makeucscfile/nextflow.config new file mode 100644 index 00000000..81587d69 --- /dev/null +++ b/tests/modules/homer/makeucscfile/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + +} diff --git a/tests/modules/homer/makeucscfile/test.yml b/tests/modules/homer/makeucscfile/test.yml index 4d337f41..cf3d1b4d 100644 --- a/tests/modules/homer/makeucscfile/test.yml +++ b/tests/modules/homer/makeucscfile/test.yml @@ -1,5 +1,5 @@ - name: homer makeucscfile - command: nextflow run ./tests/modules/homer/makeucscfile -entry test_homer_makeucscfile -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/makeucscfile -entry test_homer_makeucscfile -c ./tests/config/nextflow.config -c ./tests/modules/homer/makeucscfile/nextflow.config tags: - homer - homer/makeucscfile diff --git a/tests/modules/idr/main.nf b/tests/modules/idr/main.nf index aa141a57..ed3bf289 100644 --- a/tests/modules/idr/main.nf +++ b/tests/modules/idr/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IDR } from '../../../modules/idr/main.nf' addParams( options: [:] ) +include { IDR } from '../../../modules/idr/main.nf' workflow test_idr_narrowpeak { diff --git a/tests/modules/idr/nextflow.config b/tests/modules/idr/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/idr/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/idr/test.yml b/tests/modules/idr/test.yml index 35ee4bc9..9d5ef2a7 100644 --- a/tests/modules/idr/test.yml +++ b/tests/modules/idr/test.yml @@ -1,5 +1,5 @@ - name: idr test_idr_narrowpeak - command: nextflow run tests/modules/idr -entry test_idr_narrowpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_narrowpeak -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: @@ -11,7 +11,7 @@ md5sum: 6443507ac66b9d3b64bc56b78328083e - name: idr test_idr_broadpeak - command: nextflow run tests/modules/idr -entry test_idr_broadpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_broadpeak -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: @@ -23,7 +23,7 @@ md5sum: e6917133112b5cec135c182ffac19237 - name: idr test_idr_noprefix - command: nextflow run tests/modules/idr -entry test_idr_noprefix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_noprefix -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: diff --git a/tests/modules/imputeme/vcftoprs/main.nf b/tests/modules/imputeme/vcftoprs/main.nf index ff59ca5e..dccc06e0 100644 --- a/tests/modules/imputeme/vcftoprs/main.nf +++ b/tests/modules/imputeme/vcftoprs/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IMPUTEME_VCFTOPRS } from '../../../../modules/imputeme/vcftoprs/main.nf' addParams( options: [:] ) +include { IMPUTEME_VCFTOPRS } from '../../../../modules/imputeme/vcftoprs/main.nf' workflow test_imputeme_vcftoprs { diff --git a/tests/modules/imputeme/vcftoprs/nextflow.config b/tests/modules/imputeme/vcftoprs/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/imputeme/vcftoprs/test.yml b/tests/modules/imputeme/vcftoprs/test.yml index efb73769..e5152a03 100644 --- a/tests/modules/imputeme/vcftoprs/test.yml +++ b/tests/modules/imputeme/vcftoprs/test.yml @@ -1,5 +1,5 @@ - name: imputeme vcftoprs test_imputeme_vcftoprs - command: nextflow run tests/modules/imputeme/vcftoprs -entry test_imputeme_vcftoprs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/imputeme/vcftoprs -entry test_imputeme_vcftoprs -c ./tests/config/nextflow.config -c ./tests/modules/imputeme/vcftoprs/nextflow.config tags: - imputeme - imputeme/vcftoprs diff --git a/tests/modules/iqtree/main.nf b/tests/modules/iqtree/main.nf index 977d7c0a..2d73bd52 100644 --- a/tests/modules/iqtree/main.nf +++ b/tests/modules/iqtree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IQTREE } from '../../../modules/iqtree/main.nf' addParams( options: [:] ) +include { IQTREE } from '../../../modules/iqtree/main.nf' workflow test_iqtree { diff --git a/tests/modules/iqtree/nextflow.config b/tests/modules/iqtree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/iqtree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/iqtree/test.yml b/tests/modules/iqtree/test.yml index e40656a2..06de90d9 100644 --- a/tests/modules/iqtree/test.yml +++ b/tests/modules/iqtree/test.yml @@ -1,5 +1,5 @@ - name: iqtree test workflow - command: nextflow run ./tests/modules/iqtree -entry test_iqtree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/iqtree -entry test_iqtree -c ./tests/config/nextflow.config -c ./tests/modules/iqtree/nextflow.config tags: - iqtree files: diff --git a/tests/modules/ismapper/main.nf b/tests/modules/ismapper/main.nf index b28344dc..abb180f7 100644 --- a/tests/modules/ismapper/main.nf +++ b/tests/modules/ismapper/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISMAPPER } from '../../../modules/ismapper/main.nf' addParams( options: [:] ) +include { ISMAPPER } from '../../../modules/ismapper/main.nf' workflow test_ismapper { diff --git a/tests/modules/ismapper/nextflow.config b/tests/modules/ismapper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ismapper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ismapper/test.yml b/tests/modules/ismapper/test.yml index 0574b855..b4f64448 100644 --- a/tests/modules/ismapper/test.yml +++ b/tests/modules/ismapper/test.yml @@ -1,5 +1,5 @@ - name: ismapper test_ismapper - command: nextflow run tests/modules/ismapper -entry test_ismapper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ismapper -entry test_ismapper -c ./tests/config/nextflow.config -c ./tests/modules/ismapper/nextflow.config tags: - ismapper files: diff --git a/tests/modules/isoseq3/cluster/main.nf b/tests/modules/isoseq3/cluster/main.nf index 90a24c11..958b03a6 100644 --- a/tests/modules/isoseq3/cluster/main.nf +++ b/tests/modules/isoseq3/cluster/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISOSEQ3_CLUSTER } from '../../../../modules/isoseq3/cluster/main.nf' addParams( options: [args: '--singletons --use-qvs --verbose'] ) +include { ISOSEQ3_CLUSTER } from '../../../../modules/isoseq3/cluster/main.nf' workflow test_isoseq3_cluster { diff --git a/tests/modules/isoseq3/cluster/nextflow.config b/tests/modules/isoseq3/cluster/nextflow.config new file mode 100644 index 00000000..8bfeaebd --- /dev/null +++ b/tests/modules/isoseq3/cluster/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ISOSEQ3_CLUSTER { + ext.args = '--singletons --use-qvs --verbose' + } + +} diff --git a/tests/modules/isoseq3/cluster/test.yml b/tests/modules/isoseq3/cluster/test.yml index 58b20ae2..b1f12df7 100644 --- a/tests/modules/isoseq3/cluster/test.yml +++ b/tests/modules/isoseq3/cluster/test.yml @@ -1,5 +1,5 @@ - name: isoseq3 cluster test_isoseq3_cluster - command: nextflow run tests/modules/isoseq3/cluster -entry test_isoseq3_cluster -c tests/config/nextflow.config + command: nextflow run ./tests/modules/isoseq3/cluster -entry test_isoseq3_cluster -c ./tests/config/nextflow.config -c ./tests/modules/isoseq3/cluster/nextflow.config tags: - isoseq3 - isoseq3/cluster diff --git a/tests/modules/isoseq3/refine/main.nf b/tests/modules/isoseq3/refine/main.nf index 13736604..45dd1560 100644 --- a/tests/modules/isoseq3/refine/main.nf +++ b/tests/modules/isoseq3/refine/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISOSEQ3_REFINE } from '../../../../modules/isoseq3/refine/main' addParams( options: [suffix:'.refine'] ) +include { ISOSEQ3_REFINE } from '../../../../modules/isoseq3/refine/main' workflow test_isoseq3_refine { diff --git a/tests/modules/isoseq3/refine/nextflow.config b/tests/modules/isoseq3/refine/nextflow.config new file mode 100644 index 00000000..88f1bdc4 --- /dev/null +++ b/tests/modules/isoseq3/refine/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ISOSEQ3_REFINE { + ext.suffix = '.refine' + } + +} diff --git a/tests/modules/isoseq3/refine/test.yml b/tests/modules/isoseq3/refine/test.yml index 2e7782d3..f2c63fda 100644 --- a/tests/modules/isoseq3/refine/test.yml +++ b/tests/modules/isoseq3/refine/test.yml @@ -1,5 +1,5 @@ - name: isoseq3 refine test_isoseq3_refine - command: nextflow run tests/modules/isoseq3/refine -entry test_isoseq3_refine -c tests/config/nextflow.config + command: nextflow run ./tests/modules/isoseq3/refine -entry test_isoseq3_refine -c ./tests/config/nextflow.config -c ./tests/modules/isoseq3/refine/nextflow.config tags: - isoseq3 - isoseq3/refine diff --git a/tests/modules/ivar/consensus/main.nf b/tests/modules/ivar/consensus/main.nf index 5e0457b5..d0807984 100644 --- a/tests/modules/ivar/consensus/main.nf +++ b/tests/modules/ivar/consensus/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 params.save_mpileup = true -include { IVAR_CONSENSUS } from '../../../../modules/ivar/consensus/main.nf' addParams( [ options: [args2: '-aa -A -d 0 -Q 0'] ] ) +include { IVAR_CONSENSUS } from '../../../../modules/ivar/consensus/main.nf' workflow test_ivar_consensus { input = [ [ id:'test'], diff --git a/tests/modules/ivar/consensus/nextflow.config b/tests/modules/ivar/consensus/nextflow.config new file mode 100644 index 00000000..7407619a --- /dev/null +++ b/tests/modules/ivar/consensus/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: IVAR_CONSENSUS { + ext.args2 = '-aa -A -d 0 -Q 0' + } + +} diff --git a/tests/modules/ivar/consensus/test.yml b/tests/modules/ivar/consensus/test.yml index 071fdc98..caaa640f 100644 --- a/tests/modules/ivar/consensus/test.yml +++ b/tests/modules/ivar/consensus/test.yml @@ -1,5 +1,5 @@ - name: ivar consensus - command: nextflow run ./tests/modules/ivar/consensus -entry test_ivar_consensus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/consensus -entry test_ivar_consensus -c ./tests/config/nextflow.config -c ./tests/modules/ivar/consensus/nextflow.config tags: - ivar - ivar/consensus diff --git a/tests/modules/ivar/trim/main.nf b/tests/modules/ivar/trim/main.nf index 05b390b0..15d0e739 100644 --- a/tests/modules/ivar/trim/main.nf +++ b/tests/modules/ivar/trim/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IVAR_TRIM } from '../../../../modules/ivar/trim/main.nf' addParams([:]) +include { IVAR_TRIM } from '../../../../modules/ivar/trim/main.nf' workflow test_ivar_trim { input = [ [ id:'test'], diff --git a/tests/modules/ivar/trim/nextflow.config b/tests/modules/ivar/trim/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ivar/trim/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ivar/trim/test.yml b/tests/modules/ivar/trim/test.yml index f2f46676..0be18ba8 100644 --- a/tests/modules/ivar/trim/test.yml +++ b/tests/modules/ivar/trim/test.yml @@ -1,5 +1,5 @@ - name: ivar trim - command: nextflow run ./tests/modules/ivar/trim -entry test_ivar_trim -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/trim -entry test_ivar_trim -c ./tests/config/nextflow.config -c ./tests/modules/ivar/trim/nextflow.config tags: - ivar - ivar/trim diff --git a/tests/modules/ivar/variants/main.nf b/tests/modules/ivar/variants/main.nf index 5358e785..f603b5e5 100644 --- a/tests/modules/ivar/variants/main.nf +++ b/tests/modules/ivar/variants/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IVAR_VARIANTS } from '../../../../modules/ivar/variants/main.nf' addParams([:]) +include { IVAR_VARIANTS } from '../../../../modules/ivar/variants/main.nf' workflow test_ivar_variants_no_gff_no_mpileup { params.gff = false diff --git a/tests/modules/ivar/variants/nextflow.config b/tests/modules/ivar/variants/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ivar/variants/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ivar/variants/test.yml b/tests/modules/ivar/variants/test.yml index a8be12a8..00e6e2c0 100644 --- a/tests/modules/ivar/variants/test.yml +++ b/tests/modules/ivar/variants/test.yml @@ -1,5 +1,5 @@ - name: ivar variants no gff no mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_no_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_no_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants @@ -8,7 +8,7 @@ md5sum: 728f1430f2402861396d9953465ac706 - name: ivar variants no gff with mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_with_mpileup -c tests/config/nextflow.config --save_mpileup + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_with_mpileup -c ./tests/config/nextflow.config --save_mpileup -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants @@ -19,7 +19,7 @@ md5sum: 56c4cd5a4ecb7d6364878818f46ae256 - name: ivar variants with gff with mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_with_gff_with_mpileup -c tests/config/nextflow.config --gff tests/data/gff/sarscov2/MN908947.3.gff3 --save_mpileup + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_with_gff_with_mpileup -c ./tests/config/nextflow.config --gff tests/data/gff/sarscov2/MN908947.3.gff3 --save_mpileup -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants diff --git a/tests/modules/jupyternotebook/main.nf b/tests/modules/jupyternotebook/main.nf index c1da7e11..1db9d812 100644 --- a/tests/modules/jupyternotebook/main.nf +++ b/tests/modules/jupyternotebook/main.nf @@ -2,15 +2,9 @@ nextflow.enable.dsl = 2 -include { JUPYTERNOTEBOOK } from '../../../modules/jupyternotebook/main.nf' addParams( - parametrize: false, options: [:] -) -include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE } from '../../../modules/jupyternotebook/main.nf' addParams( - options: [:] -) -include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB } from '../../../modules/jupyternotebook/main.nf' addParams( - options: [:] -) +include { JUPYTERNOTEBOOK } from '../../../modules/jupyternotebook/main.nf' +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE } from '../../../modules/jupyternotebook/main.nf' +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB } from '../../../modules/jupyternotebook/main.nf' workflow test_jupyternotebook { diff --git a/tests/modules/jupyternotebook/nextflow.config b/tests/modules/jupyternotebook/nextflow.config new file mode 100644 index 00000000..6066b2b8 --- /dev/null +++ b/tests/modules/jupyternotebook/nextflow.config @@ -0,0 +1,19 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: JUPYTERNOTEBOOK { + ext = ['parametrize': false] + } + + // this should be the default options, but need to work around + // https://github.com/nextflow-io/nextflow/issues/2422 + withName: JUPYTERNOTEBOOK_PARAMETRIZE { + ext = ['parametrize': true] + } + + withName: JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB { + ext = ['parametrize': true] + } + +} diff --git a/tests/modules/jupyternotebook/test.yml b/tests/modules/jupyternotebook/test.yml index dd4f1175..31fdfdbb 100644 --- a/tests/modules/jupyternotebook/test.yml +++ b/tests/modules/jupyternotebook/test.yml @@ -1,5 +1,5 @@ - name: jupyternotebook test_jupyternotebook - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: @@ -8,7 +8,7 @@ - "n_iter = 10" - name: jupyternotebook test_jupyternotebook_parametrize - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: @@ -19,7 +19,7 @@ - "n_iter = 12" - name: jupyternotebook test_jupyternotebook_parametrize_ipynb - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize_ipynb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize_ipynb -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: diff --git a/tests/modules/kallisto/index/main.nf b/tests/modules/kallisto/index/main.nf index 7c6078f8..8ecd6d52 100644 --- a/tests/modules/kallisto/index/main.nf +++ b/tests/modules/kallisto/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTO_INDEX } from '../../../../modules/kallisto/index/main.nf' addParams( options: [:] ) +include { KALLISTO_INDEX } from '../../../../modules/kallisto/index/main.nf' workflow test_kallisto_index { diff --git a/tests/modules/kallisto/index/nextflow.config b/tests/modules/kallisto/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kallisto/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kallisto/index/test.yml b/tests/modules/kallisto/index/test.yml index b9dd23ad..90a06325 100644 --- a/tests/modules/kallisto/index/test.yml +++ b/tests/modules/kallisto/index/test.yml @@ -1,5 +1,5 @@ - name: kallisto index test_kallisto_index - command: nextflow run tests/modules/kallisto/index -entry test_kallisto_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallisto/index -entry test_kallisto_index -c ./tests/config/nextflow.config -c ./tests/modules/kallisto/index/nextflow.config tags: - kallisto - kallisto/index diff --git a/tests/modules/kallistobustools/count/main.nf b/tests/modules/kallistobustools/count/main.nf index 9172ddfc..6e6be03d 100644 --- a/tests/modules/kallistobustools/count/main.nf +++ b/tests/modules/kallistobustools/count/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' addParams( options: [args:"--cellranger -m 1"] ) +include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' workflow test_kallistobustools_count { diff --git a/tests/modules/kallistobustools/count/nextflow.config b/tests/modules/kallistobustools/count/nextflow.config new file mode 100644 index 00000000..eb4e20bd --- /dev/null +++ b/tests/modules/kallistobustools/count/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: KALLISTOBUSTOOLS_COUNT { + ext.args = '--cellranger -m 1' + } + +} diff --git a/tests/modules/kallistobustools/count/test.yml b/tests/modules/kallistobustools/count/test.yml index 766d5b57..664e9fa6 100644 --- a/tests/modules/kallistobustools/count/test.yml +++ b/tests/modules/kallistobustools/count/test.yml @@ -1,5 +1,5 @@ - name: kallistobustools count test_kallistobustools_count - command: nextflow run tests/modules/kallistobustools/count -entry test_kallistobustools_count -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/count -entry test_kallistobustools_count -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/count/nextflow.config tags: - kallistobustools/count - kallistobustools diff --git a/tests/modules/kallistobustools/ref/main.nf b/tests/modules/kallistobustools/ref/main.nf index 31b36d0d..09ea68ea 100644 --- a/tests/modules/kallistobustools/ref/main.nf +++ b/tests/modules/kallistobustools/ref/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTOBUSTOOLS_REF } from '../../../../modules/kallistobustools/ref/main.nf' addParams( options: [:] ) +include { KALLISTOBUSTOOLS_REF } from '../../../../modules/kallistobustools/ref/main.nf' workflow test_kallistobustools_ref_standard { diff --git a/tests/modules/kallistobustools/ref/nextflow.config b/tests/modules/kallistobustools/ref/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kallistobustools/ref/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kallistobustools/ref/test.yml b/tests/modules/kallistobustools/ref/test.yml index 54954085..1e8fd6c4 100644 --- a/tests/modules/kallistobustools/ref/test.yml +++ b/tests/modules/kallistobustools/ref/test.yml @@ -1,5 +1,5 @@ - name: kallistobustools ref test_kallistobustools_ref_standard - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_standard -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_standard -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools @@ -9,7 +9,7 @@ - path: output/kallistobustools/t2g.txt - name: kallistobustools ref test_kallistobustools_ref_lamanno - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_lamanno -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_lamanno -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools @@ -22,7 +22,7 @@ - path: output/kallistobustools/t2g.txt - name: kallistobustools ref test_kallistobustools_ref_nucleus - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_nucleus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_nucleus -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools diff --git a/tests/modules/khmer/normalizebymedian/main.nf b/tests/modules/khmer/normalizebymedian/main.nf index 3a3b348c..c439c40f 100644 --- a/tests/modules/khmer/normalizebymedian/main.nf +++ b/tests/modules/khmer/normalizebymedian/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [:] ) -include { KHMER_NORMALIZEBYMEDIAN } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [:] ) -include { KHMER_NORMALIZEBYMEDIAN as KHMER_NORMALIZEBYMEDIAN_ARGS } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [args: '-C 20 -k 32'] ) +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' +include { KHMER_NORMALIZEBYMEDIAN } from '../../../../modules/khmer/normalizebymedian/main.nf' +include { KHMER_NORMALIZEBYMEDIAN as KHMER_NORMALIZEBYMEDIAN_ARGS } from '../../../../modules/khmer/normalizebymedian/main.nf' workflow test_khmer_normalizebymedian_only_pe { diff --git a/tests/modules/khmer/normalizebymedian/nextflow.config b/tests/modules/khmer/normalizebymedian/nextflow.config new file mode 100644 index 00000000..279a972a --- /dev/null +++ b/tests/modules/khmer/normalizebymedian/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: KHMER_NORMALIZEBYMEDIAN_ARGS { + ext.args = '-C 20 -k 32' + } + +} diff --git a/tests/modules/khmer/normalizebymedian/test.yml b/tests/modules/khmer/normalizebymedian/test.yml index a914a8ef..0e61588f 100644 --- a/tests/modules/khmer/normalizebymedian/test.yml +++ b/tests/modules/khmer/normalizebymedian/test.yml @@ -1,6 +1,6 @@ # nf-core modules create-test-yml khmer/normalizebymedian - name: khmer normalizebymedian only pe reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_pe -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -10,7 +10,7 @@ #md5sum: 75e05f2e80cf4bd0b534d4b73f7c059c - name: khmer normalizebymedian only se reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_se -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_se -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -18,7 +18,7 @@ - path: output/khmer/only_se.fastq.gz - name: khmer normalizebymedian mixed reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_mixed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_mixed -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -26,7 +26,7 @@ - path: output/khmer/mixed.fastq.gz - name: khmer normalizebymedian multiple pe reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_multiple_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_multiple_pe -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -34,7 +34,7 @@ - path: output/khmer/multiple_pe.fastq.gz - name: khmer normalizebymedian args - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_args -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_args -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian diff --git a/tests/modules/kleborate/main.nf b/tests/modules/kleborate/main.nf index f846e642..bce31225 100644 --- a/tests/modules/kleborate/main.nf +++ b/tests/modules/kleborate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KLEBORATE } from '../../../modules/kleborate/main.nf' addParams( options: [:] ) +include { KLEBORATE } from '../../../modules/kleborate/main.nf' workflow test_kleborate { diff --git a/tests/modules/kleborate/nextflow.config b/tests/modules/kleborate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kleborate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kleborate/test.yml b/tests/modules/kleborate/test.yml index 1bee4708..c7b25778 100644 --- a/tests/modules/kleborate/test.yml +++ b/tests/modules/kleborate/test.yml @@ -1,5 +1,5 @@ - name: kleborate - command: nextflow run ./tests/modules/kleborate -entry test_kleborate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kleborate -entry test_kleborate -c ./tests/config/nextflow.config -c ./tests/modules/kleborate/nextflow.config tags: - kleborate files: diff --git a/tests/modules/kraken2/kraken2/main.nf b/tests/modules/kraken2/kraken2/main.nf index e5638ec5..12399e9e 100644 --- a/tests/modules/kraken2/kraken2/main.nf +++ b/tests/modules/kraken2/kraken2/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { KRAKEN2_KRAKEN2 } from '../../../../modules/kraken2/kraken2/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { KRAKEN2_KRAKEN2 } from '../../../../modules/kraken2/kraken2/main.nf' workflow test_kraken2_kraken2_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/kraken2/kraken2/nextflow.config b/tests/modules/kraken2/kraken2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kraken2/kraken2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kraken2/kraken2/test.yml b/tests/modules/kraken2/kraken2/test.yml index 688fb34c..1ec413bf 100644 --- a/tests/modules/kraken2/kraken2/test.yml +++ b/tests/modules/kraken2/kraken2/test.yml @@ -1,5 +1,5 @@ - name: kraken2 kraken2 single-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config tags: - kraken2 - kraken2/kraken2 @@ -12,7 +12,7 @@ md5sum: 4227755fe40478b8d7dc8634b489761e - name: kraken2 kraken2 paired-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config tags: - kraken2 - kraken2/kraken2 diff --git a/tests/modules/krona/kronadb/main.nf b/tests/modules/krona/kronadb/main.nf new file mode 100644 index 00000000..ed955854 --- /dev/null +++ b/tests/modules/krona/kronadb/main.nf @@ -0,0 +1,9 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONA_KRONADB } from '../../../../modules/krona/kronadb/main.nf' + +workflow test_krona_kronadb { + KRONA_KRONADB ( ) +} diff --git a/tests/modules/krona/kronadb/nextflow.config b/tests/modules/krona/kronadb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/krona/kronadb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/krona/kronadb/test.yml b/tests/modules/krona/kronadb/test.yml new file mode 100644 index 00000000..1d61640f --- /dev/null +++ b/tests/modules/krona/kronadb/test.yml @@ -0,0 +1,7 @@ +- name: krona kronadb test_krona_kronadb + command: nextflow run ./tests/modules/krona/kronadb -entry test_krona_kronadb -c ./tests/config/nextflow.config -c ./tests/modules/krona/kronadb/nextflow.config + tags: + - krona + - krona/kronadb + files: + - path: output/krona/taxonomy/taxonomy.tab diff --git a/tests/modules/krona/ktimporttaxonomy/main.nf b/tests/modules/krona/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..a23e6fcb --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONA_KTIMPORTTAXONOMY } from '../../../../modules/krona/ktimporttaxonomy/main.nf' + +workflow test_krona_ktimporttaxonomy { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + ] + taxonomy = file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + + KRONA_KTIMPORTTAXONOMY ( input, taxonomy ) +} diff --git a/tests/modules/krona/ktimporttaxonomy/nextflow.config b/tests/modules/krona/ktimporttaxonomy/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/krona/ktimporttaxonomy/test.yml b/tests/modules/krona/ktimporttaxonomy/test.yml new file mode 100644 index 00000000..b7748980 --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/test.yml @@ -0,0 +1,9 @@ +- name: krona ktimporttaxonomy test_krona_ktimporttaxonomy + command: nextflow run ./tests/modules/krona/ktimporttaxonomy -entry test_krona_ktimporttaxonomy -c ./tests/config/nextflow.config -c ./tests/modules/krona/ktimporttaxonomy/nextflow.config + tags: + - krona/ktimporttaxonomy + - krona + files: + - path: output/krona/taxonomy.krona.html + contains: + - "DOCTYPE html PUBLIC" diff --git a/tests/modules/kronatools/kronadb/main.nf b/tests/modules/kronatools/kronadb/main.nf deleted file mode 100644 index 90b6e30c..00000000 --- a/tests/modules/kronatools/kronadb/main.nf +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { KRONATOOLS_KRONADB } from '../../../../modules/kronatools/kronadb/main.nf' addParams( options: [:] ) - -workflow test_kronatools_kronadb { - KRONATOOLS_KRONADB ( ) -} diff --git a/tests/modules/kronatools/kronadb/test.yml b/tests/modules/kronatools/kronadb/test.yml deleted file mode 100644 index 3f346a9d..00000000 --- a/tests/modules/kronatools/kronadb/test.yml +++ /dev/null @@ -1,7 +0,0 @@ -- name: kronatools kronadb test_kronatools_kronadb - command: nextflow run tests/modules/kronatools/kronadb -entry test_kronatools_kronadb -c tests/config/nextflow.config - tags: - - kronatools - - kronatools/kronadb - files: - - path: output/kronatools/taxonomy/taxonomy.tab diff --git a/tests/modules/kronatools/ktimporttaxonomy/main.nf b/tests/modules/kronatools/ktimporttaxonomy/main.nf deleted file mode 100644 index d7b08a2f..00000000 --- a/tests/modules/kronatools/ktimporttaxonomy/main.nf +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { KRONATOOLS_KTIMPORTTAXONOMY } from '../../../../modules/kronatools/ktimporttaxonomy/main.nf' addParams( options: [:] ) - -workflow test_kronatools_ktimporttaxonomy { - - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['generic']['txt']['hello'], checkIfExists: true) ] - - taxonomy = [ file(params.test_data['generic']['txt']['hello'] , checkIfExists: true) ] - - KRONATOOLS_KTIMPORTTAXONOMY ( input, taxonomy ) -} diff --git a/tests/modules/kronatools/ktimporttaxonomy/test.yml b/tests/modules/kronatools/ktimporttaxonomy/test.yml deleted file mode 100644 index 15882b2e..00000000 --- a/tests/modules/kronatools/ktimporttaxonomy/test.yml +++ /dev/null @@ -1,9 +0,0 @@ -- name: kronatools ktimporttaxonomy test_kronatools_ktimporttaxonomy - command: nextflow run tests/modules/kronatools/ktimporttaxonomy -entry test_kronatools_ktimporttaxonomy -c tests/config/nextflow.config - tags: - - kronatools/ktimporttaxonomy - - kronatools - files: - - path: output/kronatools/taxonomy.krona.html - contains: - - "DOCTYPE html PUBLIC" diff --git a/tests/modules/last/dotplot/main.nf b/tests/modules/last/dotplot/main.nf index b92ed270..3353821d 100644 --- a/tests/modules/last/dotplot/main.nf +++ b/tests/modules/last/dotplot/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_DOTPLOT } from '../../../../modules/last/dotplot/main.nf' addParams( options: [:] ) +include { LAST_DOTPLOT } from '../../../../modules/last/dotplot/main.nf' workflow test_last_dotplot { diff --git a/tests/modules/last/dotplot/nextflow.config b/tests/modules/last/dotplot/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/dotplot/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/dotplot/test.yml b/tests/modules/last/dotplot/test.yml index 177e377b..c2a9910f 100644 --- a/tests/modules/last/dotplot/test.yml +++ b/tests/modules/last/dotplot/test.yml @@ -1,5 +1,5 @@ - name: last dotplot test_last_dotplot - command: nextflow run tests/modules/last/dotplot -entry test_last_dotplot -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/dotplot -entry test_last_dotplot -c ./tests/config/nextflow.config -c ./tests/modules/last/dotplot/nextflow.config tags: - last/dotplot - last diff --git a/tests/modules/last/lastal/main.nf b/tests/modules/last/lastal/main.nf index 262c8f5f..95c2f917 100644 --- a/tests/modules/last/lastal/main.nf +++ b/tests/modules/last/lastal/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { LAST_LASTAL } from '../../../../modules/last/lastal/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { LAST_LASTAL } from '../../../../modules/last/lastal/main.nf' workflow test_last_lastal_with_dummy_param_file { diff --git a/tests/modules/last/lastal/nextflow.config b/tests/modules/last/lastal/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/lastal/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/lastal/test.yml b/tests/modules/last/lastal/test.yml index 48b0d223..f75e4ac5 100644 --- a/tests/modules/last/lastal/test.yml +++ b/tests/modules/last/lastal/test.yml @@ -1,5 +1,5 @@ - name: last lastal test_last_lastal_with_dummy_param_file - command: nextflow run tests/modules/last/lastal -entry test_last_lastal_with_dummy_param_file -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastal -entry test_last_lastal_with_dummy_param_file -c ./tests/config/nextflow.config -c ./tests/modules/last/lastal/nextflow.config tags: - last - last/lastal @@ -22,7 +22,7 @@ md5sum: b7c40f06b1309dc6f37849eeb86dfd22 - name: last lastal test_last_lastal_with_real_param_file - command: nextflow run tests/modules/last/lastal -entry test_last_lastal_with_real_param_file -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastal -entry test_last_lastal_with_real_param_file -c ./tests/config/nextflow.config -c ./tests/modules/last/lastal/nextflow.config tags: - last - last/lastal diff --git a/tests/modules/last/lastdb/main.nf b/tests/modules/last/lastdb/main.nf index 2f11bee4..d1c7b79a 100644 --- a/tests/modules/last/lastdb/main.nf +++ b/tests/modules/last/lastdb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_LASTDB } from '../../../../modules/last/lastdb/main.nf' addParams( options: ['args': '-Q0'] ) +include { LAST_LASTDB } from '../../../../modules/last/lastdb/main.nf' workflow test_last_lastdb { diff --git a/tests/modules/last/lastdb/nextflow.config b/tests/modules/last/lastdb/nextflow.config new file mode 100644 index 00000000..9b8b9878 --- /dev/null +++ b/tests/modules/last/lastdb/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_LASTDB { + ext.args = '-Q0' + } + +} diff --git a/tests/modules/last/lastdb/test.yml b/tests/modules/last/lastdb/test.yml index c69ecfac..ece44cf3 100644 --- a/tests/modules/last/lastdb/test.yml +++ b/tests/modules/last/lastdb/test.yml @@ -1,5 +1,5 @@ - name: last lastdb test_last_lastdb - command: nextflow run tests/modules/last/lastdb -entry test_last_lastdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastdb -entry test_last_lastdb -c ./tests/config/nextflow.config -c ./tests/modules/last/lastdb/nextflow.config tags: - last/lastdb - last @@ -20,7 +20,7 @@ md5sum: b7c40f06b1309dc6f37849eeb86dfd22 - name: last lastdb test_last_lastdb_gzipped_input - command: nextflow run tests/modules/last/lastdb -entry test_last_lastdb_gzipped_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastdb -entry test_last_lastdb_gzipped_input -c ./tests/config/nextflow.config -c ./tests/modules/last/lastdb/nextflow.config tags: - last/lastdb - last diff --git a/tests/modules/last/mafconvert/main.nf b/tests/modules/last/mafconvert/main.nf index 7864c68a..c87f6e6a 100644 --- a/tests/modules/last/mafconvert/main.nf +++ b/tests/modules/last/mafconvert/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_MAFCONVERT } from '../../../../modules/last/mafconvert/main.nf' addParams( options: [:] ) +include { LAST_MAFCONVERT } from '../../../../modules/last/mafconvert/main.nf' workflow test_last_mafconvert { diff --git a/tests/modules/last/mafconvert/nextflow.config b/tests/modules/last/mafconvert/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/mafconvert/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/mafconvert/test.yml b/tests/modules/last/mafconvert/test.yml index 35c65ce9..86a80f20 100644 --- a/tests/modules/last/mafconvert/test.yml +++ b/tests/modules/last/mafconvert/test.yml @@ -1,5 +1,5 @@ - name: last mafconvert test_last_mafconvert - command: nextflow run tests/modules/last/mafconvert -entry test_last_mafconvert -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/mafconvert -entry test_last_mafconvert -c ./tests/config/nextflow.config -c ./tests/modules/last/mafconvert/nextflow.config tags: - last/mafconvert - last diff --git a/tests/modules/last/mafswap/main.nf b/tests/modules/last/mafswap/main.nf index 3bb72d63..5cc94932 100644 --- a/tests/modules/last/mafswap/main.nf +++ b/tests/modules/last/mafswap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_MAFSWAP } from '../../../../modules/last/mafswap/main.nf' addParams( options: [:] ) +include { LAST_MAFSWAP } from '../../../../modules/last/mafswap/main.nf' workflow test_last_mafswap { diff --git a/tests/modules/last/mafswap/nextflow.config b/tests/modules/last/mafswap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/mafswap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/mafswap/test.yml b/tests/modules/last/mafswap/test.yml index c7e3778d..a0865e00 100644 --- a/tests/modules/last/mafswap/test.yml +++ b/tests/modules/last/mafswap/test.yml @@ -1,5 +1,5 @@ - name: last mafswap test_last_mafswap - command: nextflow run tests/modules/last/mafswap -entry test_last_mafswap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/mafswap -entry test_last_mafswap -c ./tests/config/nextflow.config -c ./tests/modules/last/mafswap/nextflow.config tags: - last - last/mafswap diff --git a/tests/modules/last/postmask/main.nf b/tests/modules/last/postmask/main.nf index c30ac806..9bbb10e9 100644 --- a/tests/modules/last/postmask/main.nf +++ b/tests/modules/last/postmask/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_POSTMASK } from '../../../../modules/last/postmask/main.nf' addParams( options: [suffix:'.postmask'] ) +include { LAST_POSTMASK } from '../../../../modules/last/postmask/main.nf' workflow test_last_postmask { diff --git a/tests/modules/last/postmask/nextflow.config b/tests/modules/last/postmask/nextflow.config new file mode 100644 index 00000000..dc021264 --- /dev/null +++ b/tests/modules/last/postmask/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_POSTMASK { + ext.suffix = '.postmask' + } + +} diff --git a/tests/modules/last/postmask/test.yml b/tests/modules/last/postmask/test.yml index 57aea822..81ae7f73 100644 --- a/tests/modules/last/postmask/test.yml +++ b/tests/modules/last/postmask/test.yml @@ -1,5 +1,5 @@ - name: last postmask test_last_postmask - command: nextflow run tests/modules/last/postmask -entry test_last_postmask -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/postmask -entry test_last_postmask -c ./tests/config/nextflow.config -c ./tests/modules/last/postmask/nextflow.config tags: - last - last/postmask diff --git a/tests/modules/last/split/main.nf b/tests/modules/last/split/main.nf index 19d899ab..f4ece4f2 100644 --- a/tests/modules/last/split/main.nf +++ b/tests/modules/last/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_SPLIT } from '../../../../modules/last/split/main.nf' addParams( options: ['suffix':'.split'] ) +include { LAST_SPLIT } from '../../../../modules/last/split/main.nf' workflow test_last_split { diff --git a/tests/modules/last/split/nextflow.config b/tests/modules/last/split/nextflow.config new file mode 100644 index 00000000..8b31ca0f --- /dev/null +++ b/tests/modules/last/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_SPLIT { + ext.suffix = '.split' + } + +} diff --git a/tests/modules/last/split/test.yml b/tests/modules/last/split/test.yml index d57d7477..57eb345f 100644 --- a/tests/modules/last/split/test.yml +++ b/tests/modules/last/split/test.yml @@ -1,5 +1,5 @@ - name: last split test_last_split - command: nextflow run tests/modules/last/split -entry test_last_split -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/split -entry test_last_split -c ./tests/config/nextflow.config -c ./tests/modules/last/split/nextflow.config tags: - last - last/split diff --git a/tests/modules/last/train/main.nf b/tests/modules/last/train/main.nf index 26e318c3..0f280a82 100644 --- a/tests/modules/last/train/main.nf +++ b/tests/modules/last/train/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { LAST_TRAIN } from '../../../../modules/last/train/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { LAST_TRAIN } from '../../../../modules/last/train/main.nf' workflow test_last_train { diff --git a/tests/modules/last/train/nextflow.config b/tests/modules/last/train/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/train/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/train/test.yml b/tests/modules/last/train/test.yml index 18eec951..8641600b 100644 --- a/tests/modules/last/train/test.yml +++ b/tests/modules/last/train/test.yml @@ -1,5 +1,5 @@ - name: last train test_last_train - command: nextflow run tests/modules/last/train -entry test_last_train -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/train -entry test_last_train -c ./tests/config/nextflow.config -c ./tests/modules/last/train/nextflow.config tags: - last/train - last diff --git a/tests/modules/leehom/main.nf b/tests/modules/leehom/main.nf index 2fe6f12f..1615d2e1 100644 --- a/tests/modules/leehom/main.nf +++ b/tests/modules/leehom/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { LEEHOM } from '../../../modules/leehom/main.nf' addParams( options: [:] ) -include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' addParams( options: [args: "-f4 -b"] ) +include { LEEHOM } from '../../../modules/leehom/main.nf' +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' workflow test_leehom_bam { diff --git a/tests/modules/leehom/nextflow.config b/tests/modules/leehom/nextflow.config new file mode 100644 index 00000000..25df48cd --- /dev/null +++ b/tests/modules/leehom/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_VIEW { + ext.args = '-f4 -b' + } + +} diff --git a/tests/modules/leehom/test.yml b/tests/modules/leehom/test.yml index 8a9f083e..98257492 100644 --- a/tests/modules/leehom/test.yml +++ b/tests/modules/leehom/test.yml @@ -1,17 +1,15 @@ - name: leehom test_leehom_bam - command: nextflow run tests/modules/leehom -entry test_leehom_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/leehom -entry test_leehom_bam -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config tags: - leehom files: - path: output/leehom/test.bam - md5sum: 19a1bf95714523868791f1d4d3aaee73 + - path: output/samtools/test.bam - path: output/leehom/test.log md5sum: d1f5da273eb69f41babda510797c7671 - - path: output/samtools/test.bam - md5sum: 25d13b3b31b147bb3836dea9932c38dd - name: leehom test_leehom_se_fq - command: nextflow run tests/modules/leehom -entry test_leehom_se_fq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/leehom -entry test_leehom_se_fq -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config tags: - leehom files: @@ -23,7 +21,7 @@ md5sum: 59aa280cb72dfbea05ba913cb89db143 - name: leehom test_leehom_pe_fq - command: nextflow run tests/modules/leehom -entry test_leehom_pe_fq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/leehom -entry test_leehom_pe_fq -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config tags: - leehom files: diff --git a/tests/modules/lima/main.nf b/tests/modules/lima/main.nf index df4b2be2..7501def9 100644 --- a/tests/modules/lima/main.nf +++ b/tests/modules/lima/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LIMA } from '../../../modules/lima/main.nf' addParams( options: [args: '--isoseq --peek-guess', suffix: ".fl"] ) +include { LIMA } from '../../../modules/lima/main.nf' workflow test_lima_bam { diff --git a/tests/modules/lima/nextflow.config b/tests/modules/lima/nextflow.config new file mode 100644 index 00000000..5091b034 --- /dev/null +++ b/tests/modules/lima/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LIMA { + ext.args = '--isoseq --peek-guess' + ext.suffix = '.fl' + } + +} diff --git a/tests/modules/lima/test.yml b/tests/modules/lima/test.yml index 1ff860d9..8d927624 100644 --- a/tests/modules/lima/test.yml +++ b/tests/modules/lima/test.yml @@ -1,5 +1,5 @@ - name: lima test_lima_bam - command: nextflow run tests/modules/lima -entry test_lima_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_bam -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -23,7 +23,7 @@ md5sum: bcbcaaaca418bdeb91141c81715ca420 - name: lima test_lima_fa - command: nextflow run tests/modules/lima -entry test_lima_fa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fa -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -39,7 +39,7 @@ md5sum: 03be2311ba4afb878d8e547ab38c11eb - name: lima test_lima_fa_gz - command: nextflow run tests/modules/lima -entry test_lima_fa_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fa_gz -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -55,7 +55,7 @@ md5sum: 03be2311ba4afb878d8e547ab38c11eb - name: lima test_lima_fq - command: nextflow run tests/modules/lima -entry test_lima_fq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fq -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -73,7 +73,7 @@ md5sum: e91d3c386aaf4effa63f33ee2eb7da2a - name: lima test_lima_fq_gz - command: nextflow run tests/modules/lima -entry test_lima_fq_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fq_gz -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: diff --git a/tests/modules/lissero/main.nf b/tests/modules/lissero/main.nf index e653bd76..339576c3 100644 --- a/tests/modules/lissero/main.nf +++ b/tests/modules/lissero/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LISSERO } from '../../../modules/lissero/main.nf' addParams( options: [:] ) +include { LISSERO } from '../../../modules/lissero/main.nf' workflow test_lissero { diff --git a/tests/modules/lissero/nextflow.config b/tests/modules/lissero/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lissero/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lissero/test.yml b/tests/modules/lissero/test.yml index 19e79623..8dd7339e 100644 --- a/tests/modules/lissero/test.yml +++ b/tests/modules/lissero/test.yml @@ -1,5 +1,5 @@ - name: lissero test_lissero - command: nextflow run tests/modules/lissero -entry test_lissero -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lissero -entry test_lissero -c ./tests/config/nextflow.config -c ./tests/modules/lissero/nextflow.config tags: - lissero files: diff --git a/tests/modules/lofreq/call/main.nf b/tests/modules/lofreq/call/main.nf index 2c306fd1..70da4ea5 100644 --- a/tests/modules/lofreq/call/main.nf +++ b/tests/modules/lofreq/call/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_CALL } from '../../../../modules/lofreq/call/main.nf' addParams( options: [:] ) +include { LOFREQ_CALL } from '../../../../modules/lofreq/call/main.nf' workflow test_lofreq_call { diff --git a/tests/modules/lofreq/call/nextflow.config b/tests/modules/lofreq/call/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/call/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/call/test.yml b/tests/modules/lofreq/call/test.yml index 88700bfe..b9f42542 100644 --- a/tests/modules/lofreq/call/test.yml +++ b/tests/modules/lofreq/call/test.yml @@ -1,5 +1,5 @@ - name: lofreq call test_lofreq_call - command: nextflow run tests/modules/lofreq/call -entry test_lofreq_call -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/call -entry test_lofreq_call -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/call/nextflow.config tags: - lofreq - lofreq/call diff --git a/tests/modules/lofreq/callparallel/main.nf b/tests/modules/lofreq/callparallel/main.nf index 724bbff1..24ab2db3 100644 --- a/tests/modules/lofreq/callparallel/main.nf +++ b/tests/modules/lofreq/callparallel/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_CALLPARALLEL } from '../../../../modules/lofreq/callparallel/main.nf' addParams( options: [:] ) +include { LOFREQ_CALLPARALLEL } from '../../../../modules/lofreq/callparallel/main.nf' workflow test_lofreq_callparallel { diff --git a/tests/modules/lofreq/callparallel/nextflow.config b/tests/modules/lofreq/callparallel/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/callparallel/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/callparallel/test.yml b/tests/modules/lofreq/callparallel/test.yml index e09f68c3..db281012 100644 --- a/tests/modules/lofreq/callparallel/test.yml +++ b/tests/modules/lofreq/callparallel/test.yml @@ -1,5 +1,5 @@ - name: lofreq callparallel test_lofreq_callparallel - command: nextflow run tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/callparallel/nextflow.config tags: - lofreq/callparallel - lofreq diff --git a/tests/modules/lofreq/filter/main.nf b/tests/modules/lofreq/filter/main.nf index c5dcea97..bd2a7f54 100644 --- a/tests/modules/lofreq/filter/main.nf +++ b/tests/modules/lofreq/filter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_FILTER } from '../../../../modules/lofreq/filter/main.nf' addParams( options: [:] ) +include { LOFREQ_FILTER } from '../../../../modules/lofreq/filter/main.nf' workflow test_lofreq_filter { diff --git a/tests/modules/lofreq/filter/nextflow.config b/tests/modules/lofreq/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/filter/test.yml b/tests/modules/lofreq/filter/test.yml index 4ee82654..d3ee3812 100644 --- a/tests/modules/lofreq/filter/test.yml +++ b/tests/modules/lofreq/filter/test.yml @@ -1,5 +1,5 @@ - name: lofreq filter test_lofreq_filter - command: nextflow run tests/modules/lofreq/filter -entry test_lofreq_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/filter -entry test_lofreq_filter -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/filter/nextflow.config tags: - lofreq - lofreq/filter diff --git a/tests/modules/lofreq/indelqual/main.nf b/tests/modules/lofreq/indelqual/main.nf index ba0493dd..71652ce1 100644 --- a/tests/modules/lofreq/indelqual/main.nf +++ b/tests/modules/lofreq/indelqual/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_INDELQUAL } from '../../../../modules/lofreq/indelqual/main.nf' addParams( options: [ 'args': '--dindel', 'suffix':'.indelqual'] ) +include { LOFREQ_INDELQUAL } from '../../../../modules/lofreq/indelqual/main.nf' workflow test_lofreq_indelqual { diff --git a/tests/modules/lofreq/indelqual/nextflow.config b/tests/modules/lofreq/indelqual/nextflow.config new file mode 100644 index 00000000..b9ad2787 --- /dev/null +++ b/tests/modules/lofreq/indelqual/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LOFREQ_INDELQUAL { + ext.args = '--dindel' + ext.suffix = '.indelqual' + } + +} diff --git a/tests/modules/lofreq/indelqual/test.yml b/tests/modules/lofreq/indelqual/test.yml index f3e73297..6fffb523 100644 --- a/tests/modules/lofreq/indelqual/test.yml +++ b/tests/modules/lofreq/indelqual/test.yml @@ -1,5 +1,5 @@ - name: lofreq indelqual - command: nextflow run ./tests/modules/lofreq/indelqual -entry test_lofreq_indelqual -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/indelqual -entry test_lofreq_indelqual -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/indelqual/nextflow.config tags: - lofreq - lofreq/indelqual diff --git a/tests/modules/macs2/callpeak/main.nf b/tests/modules/macs2/callpeak/main.nf index db598564..070469dd 100644 --- a/tests/modules/macs2/callpeak/main.nf +++ b/tests/modules/macs2/callpeak/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { MACS2_CALLPEAK } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) -include { MACS2_CALLPEAK as MACS2_CALLPEAK_CTRL } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--qval 0.1"] ) -include { MACS2_CALLPEAK as MACS2_CALLPEAK_BED } from '../../../../modules/macs2/callpeak/main.nf' addParams( options: ["args": "--format BED --qval 1 --nomodel --extsize 200"] ) +include { MACS2_CALLPEAK } from '../../../../modules/macs2/callpeak/main.nf' +include { MACS2_CALLPEAK as MACS2_CALLPEAK_CTRL } from '../../../../modules/macs2/callpeak/main.nf' +include { MACS2_CALLPEAK as MACS2_CALLPEAK_BED } from '../../../../modules/macs2/callpeak/main.nf' workflow test_macs2_callpeak_bed { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/macs2/callpeak/nextflow.config b/tests/modules/macs2/callpeak/nextflow.config new file mode 100644 index 00000000..e3bd3f5d --- /dev/null +++ b/tests/modules/macs2/callpeak/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MACS2_CALLPEAK { + ext.args = '--qval 0.1' + } + + withName: MACS2_CALLPEAK_CTRL { + ext.args = '--qval 0.1' + } + + withName: MACS2_CALLPEAK_BED { + ext.args = '--format BED --qval 1 --nomodel --extsize 200' + } + +} diff --git a/tests/modules/macs2/callpeak/test.yml b/tests/modules/macs2/callpeak/test.yml index 424a9746..43c99140 100644 --- a/tests/modules/macs2/callpeak/test.yml +++ b/tests/modules/macs2/callpeak/test.yml @@ -1,5 +1,5 @@ - name: macs2 callpeak test_macs2_callpeak_bed - command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak_bed -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config tags: - macs2 - macs2/callpeak @@ -12,7 +12,7 @@ md5sum: d41d8cd98f00b204e9800998ecf8427e - name: macs2 callpeak test_macs2_callpeak - command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config tags: - macs2 - macs2/callpeak @@ -25,7 +25,7 @@ md5sum: 26f0f97b6c14dbca129e947a58067c82 - name: macs2 callpeak test_macs2_callpeak_ctrl - command: nextflow run tests/modules/macs2/callpeak -entry test_macs2_callpeak_ctrl -c tests/config/nextflow.config + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak_ctrl -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config tags: - macs2 - macs2/callpeak diff --git a/tests/modules/malt/build_test/main.nf b/tests/modules/malt/build_test/main.nf index b2f3eaf6..2542da0c 100644 --- a/tests/modules/malt/build_test/main.nf +++ b/tests/modules/malt/build_test/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' workflow test_malt_build { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/malt/build_test/nextflow.config b/tests/modules/malt/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/malt/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/malt/build_test/test.yml b/tests/modules/malt/build_test/test.yml index c3ed4b8f..c6694ad5 100644 --- a/tests/modules/malt/build_test/test.yml +++ b/tests/modules/malt/build_test/test.yml @@ -1,5 +1,5 @@ - name: malt build - command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build -c ./tests/config/nextflow.config -c ./tests/modules/malt/build/nextflow.config tags: - malt - malt/build @@ -21,7 +21,7 @@ - path: output/malt/malt_index/taxonomy.tre md5sum: bde26a1fff5c63d3046d3863607a1e70 - name: malt build gff - command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c ./tests/config/nextflow.config -c ./tests/modules/malt/build/nextflow.config tags: - malt - malt/build diff --git a/tests/modules/malt/run/main.nf b/tests/modules/malt/run/main.nf index 6292ca61..292a3fcf 100644 --- a/tests/modules/malt/run/main.nf +++ b/tests/modules/malt/run/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../../modules/malt/run/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../../modules/malt/run/main.nf' workflow test_malt_run { diff --git a/tests/modules/malt/run/nextflow.config b/tests/modules/malt/run/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/malt/run/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/malt/run/test.yml b/tests/modules/malt/run/test.yml index 0c245f2f..5b0742e4 100644 --- a/tests/modules/malt/run/test.yml +++ b/tests/modules/malt/run/test.yml @@ -1,5 +1,5 @@ - name: malt run - command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c ./tests/config/nextflow.config -c ./tests/modules/malt/run/nextflow.config tags: - malt - malt/run diff --git a/tests/modules/maltextract/main.nf b/tests/modules/maltextract/main.nf index d18923ca..8e0a2241 100644 --- a/tests/modules/maltextract/main.nf +++ b/tests/modules/maltextract/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) -include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../modules/malt/run/main.nf' +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' workflow test_maltextract { diff --git a/tests/modules/maltextract/nextflow.config b/tests/modules/maltextract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/maltextract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/maltextract/test.yml b/tests/modules/maltextract/test.yml index 87bf0182..2440c100 100644 --- a/tests/modules/maltextract/test.yml +++ b/tests/modules/maltextract/test.yml @@ -1,5 +1,5 @@ - name: maltextract - command: nextflow run ./tests/modules/maltextract -entry test_maltextract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/maltextract -entry test_maltextract -c ./tests/config/nextflow.config -c ./tests/modules/maltextract/nextflow.config tags: - maltextract files: diff --git a/tests/modules/manta/germline/main.nf b/tests/modules/manta/germline/main.nf index df996464..f8adedb0 100644 --- a/tests/modules/manta/germline/main.nf +++ b/tests/modules/manta/germline/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' addParams( options: [:] ) +include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' workflow test_manta_germline { input = [ diff --git a/tests/modules/manta/germline/nextflow.config b/tests/modules/manta/germline/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/germline/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/germline/test.yml b/tests/modules/manta/germline/test.yml index b4086d76..c6ead9eb 100644 --- a/tests/modules/manta/germline/test.yml +++ b/tests/modules/manta/germline/test.yml @@ -1,5 +1,5 @@ - name: manta germline - command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c ./tests/config/nextflow.config -c ./tests/modules/manta/germline/nextflow.config tags: - manta - manta/germline @@ -11,7 +11,7 @@ - path: output/manta/test.diploid_sv.vcf.gz - path: output/manta/test.diploid_sv.vcf.gz.tbi - name: manta germline target bed - command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/manta/germline/nextflow.config tags: - manta - manta/germline diff --git a/tests/modules/manta/somatic/main.nf b/tests/modules/manta/somatic/main.nf index 553735c9..7da41bea 100644 --- a/tests/modules/manta/somatic/main.nf +++ b/tests/modules/manta/somatic/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' addParams( options: [:] ) +include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' workflow test_manta_somatic { diff --git a/tests/modules/manta/somatic/nextflow.config b/tests/modules/manta/somatic/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/somatic/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/somatic/test.yml b/tests/modules/manta/somatic/test.yml index 72f0953d..d701a210 100644 --- a/tests/modules/manta/somatic/test.yml +++ b/tests/modules/manta/somatic/test.yml @@ -1,5 +1,5 @@ - name: manta somatic test_manta_somatic - command: nextflow run tests/modules/manta/somatic -entry test_manta_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/somatic -entry test_manta_somatic -c ./tests/config/nextflow.config -c ./tests/modules/manta/somatic/nextflow.config tags: - manta/somatic - manta diff --git a/tests/modules/manta/tumoronly/main.nf b/tests/modules/manta/tumoronly/main.nf index 436ab781..be0d3dbb 100644 --- a/tests/modules/manta/tumoronly/main.nf +++ b/tests/modules/manta/tumoronly/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' addParams( options: [:] ) +include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' workflow test_manta_tumoronly { input = [ diff --git a/tests/modules/manta/tumoronly/nextflow.config b/tests/modules/manta/tumoronly/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/tumoronly/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/tumoronly/test.yml b/tests/modules/manta/tumoronly/test.yml index 13f2cde1..c56e23fa 100644 --- a/tests/modules/manta/tumoronly/test.yml +++ b/tests/modules/manta/tumoronly/test.yml @@ -1,5 +1,5 @@ - name: manta tumoronly - command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c ./tests/config/nextflow.config -c ./tests/modules/manta/tumoronly/nextflow.config tags: - manta - manta/tumoronly @@ -11,7 +11,7 @@ - path: output/manta/test.tumor_sv.vcf.gz - path: output/manta/test.tumor_sv.vcf.gz.tbi - name: manta tumoronly target bed - command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/manta/tumoronly/nextflow.config tags: - manta - manta/tumoronly diff --git a/tests/modules/mapdamage2/main.nf b/tests/modules/mapdamage2/main.nf index a4a0eb02..b7e4d23b 100644 --- a/tests/modules/mapdamage2/main.nf +++ b/tests/modules/mapdamage2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MAPDAMAGE2 } from '../../../modules/mapdamage2/main.nf' addParams( options: [:] ) +include { MAPDAMAGE2 } from '../../../modules/mapdamage2/main.nf' workflow test_mapdamage2 { diff --git a/tests/modules/mapdamage2/nextflow.config b/tests/modules/mapdamage2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mapdamage2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mapdamage2/test.yml b/tests/modules/mapdamage2/test.yml index 657f59b5..96c8b2da 100644 --- a/tests/modules/mapdamage2/test.yml +++ b/tests/modules/mapdamage2/test.yml @@ -1,5 +1,5 @@ - name: mapdamage2 test_mapdamage2 - command: nextflow run tests/modules/mapdamage2 -entry test_mapdamage2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mapdamage2 -entry test_mapdamage2 -c ./tests/config/nextflow.config -c ./tests/modules/mapdamage2/nextflow.config tags: - mapdamage2 files: diff --git a/tests/modules/mash/sketch/main.nf b/tests/modules/mash/sketch/main.nf index da72d1e3..cec2035b 100644 --- a/tests/modules/mash/sketch/main.nf +++ b/tests/modules/mash/sketch/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MASH_SKETCH } from '../../../../modules/mash/sketch/main.nf' addParams( options: [:] ) +include { MASH_SKETCH } from '../../../../modules/mash/sketch/main.nf' workflow test_mash_sketch { diff --git a/tests/modules/mash/sketch/nextflow.config b/tests/modules/mash/sketch/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mash/sketch/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mash/sketch/test.yml b/tests/modules/mash/sketch/test.yml index 78f4598b..d5039956 100644 --- a/tests/modules/mash/sketch/test.yml +++ b/tests/modules/mash/sketch/test.yml @@ -1,5 +1,5 @@ - name: mash sketch - command: nextflow run ./tests/modules/mash/sketch -entry test_mash_sketch -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mash/sketch -entry test_mash_sketch -c ./tests/config/nextflow.config -c ./tests/modules/mash/sketch/nextflow.config tags: - mash/sketch files: diff --git a/tests/modules/mashtree/main.nf b/tests/modules/mashtree/main.nf index 47a7c12a..07f5e561 100644 --- a/tests/modules/mashtree/main.nf +++ b/tests/modules/mashtree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MASHTREE } from '../../../modules/mashtree/main.nf' addParams( options: [:] ) +include { MASHTREE } from '../../../modules/mashtree/main.nf' workflow test_mashtree { diff --git a/tests/modules/mashtree/nextflow.config b/tests/modules/mashtree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mashtree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mashtree/test.yml b/tests/modules/mashtree/test.yml index 83ff6272..bea9638c 100644 --- a/tests/modules/mashtree/test.yml +++ b/tests/modules/mashtree/test.yml @@ -1,5 +1,5 @@ - name: mashtree test_mashtree - command: nextflow run tests/modules/mashtree -entry test_mashtree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mashtree -entry test_mashtree -c ./tests/config/nextflow.config -c ./tests/modules/mashtree/nextflow.config tags: - mashtree files: diff --git a/tests/modules/maxbin2/main.nf b/tests/modules/maxbin2/main.nf index bede2c6a..3df417be 100644 --- a/tests/modules/maxbin2/main.nf +++ b/tests/modules/maxbin2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MAXBIN2 } from '../../../modules/maxbin2/main.nf' addParams( options: [:] ) +include { MAXBIN2 } from '../../../modules/maxbin2/main.nf' workflow test_maxbin2 { diff --git a/tests/modules/maxbin2/nextflow.config b/tests/modules/maxbin2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/maxbin2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/maxbin2/test.yml b/tests/modules/maxbin2/test.yml index 2721d17a..a8ba98f9 100644 --- a/tests/modules/maxbin2/test.yml +++ b/tests/modules/maxbin2/test.yml @@ -1,5 +1,5 @@ - name: maxbin2 - command: nextflow run ./tests/modules/maxbin2 -entry test_maxbin2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/maxbin2 -entry test_maxbin2 -c ./tests/config/nextflow.config -c ./tests/modules/maxbin2/nextflow.config tags: - maxbin2 files: diff --git a/tests/modules/medaka/main.nf b/tests/modules/medaka/main.nf index 300e086b..75fc135b 100644 --- a/tests/modules/medaka/main.nf +++ b/tests/modules/medaka/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MEDAKA } from '../../../modules/medaka/main.nf' addParams( options: [suffix:'.polished.genome'] ) +include { MEDAKA } from '../../../modules/medaka/main.nf' workflow test_medaka { diff --git a/tests/modules/medaka/nextflow.config b/tests/modules/medaka/nextflow.config new file mode 100644 index 00000000..1f89be62 --- /dev/null +++ b/tests/modules/medaka/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MEDAKA { + ext.suffix = '.polished.genome' + } + +} diff --git a/tests/modules/medaka/test.yml b/tests/modules/medaka/test.yml index 9ce5521e..54146bdc 100644 --- a/tests/modules/medaka/test.yml +++ b/tests/modules/medaka/test.yml @@ -1,5 +1,5 @@ - name: medaka test_medaka - command: nextflow run ./tests/modules/medaka -entry test_medaka -c tests/config/nextflow.config + command: nextflow run ./tests/modules/medaka -entry test_medaka -c ./tests/config/nextflow.config -c ./tests/modules/medaka/nextflow.config tags: - medaka files: diff --git a/tests/modules/megahit/main.nf b/tests/modules/megahit/main.nf index dcf07cd6..88acf3e3 100644 --- a/tests/modules/megahit/main.nf +++ b/tests/modules/megahit/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MEGAHIT } from '../../../modules/megahit/main.nf' addParams( options: [:] ) +include { MEGAHIT } from '../../../modules/megahit/main.nf' workflow test_megahit { diff --git a/tests/modules/megahit/nextflow.config b/tests/modules/megahit/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/megahit/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/megahit/test.yml b/tests/modules/megahit/test.yml index c390891b..2072ac12 100644 --- a/tests/modules/megahit/test.yml +++ b/tests/modules/megahit/test.yml @@ -1,5 +1,5 @@ - name: megahit - command: nextflow run ./tests/modules/megahit -entry test_megahit -c tests/config/nextflow.config -process.cpus 1 + command: nextflow run ./tests/modules/megahit -entry test_megahit -c ./tests/config/nextflow.config -process.cpus 1 -c ./tests/modules/megahit/nextflow.config tags: - megahit files: @@ -31,7 +31,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: megahit_single - command: nextflow run ./tests/modules/megahit -entry test_megahit_single -c tests/config/nextflow.config -process.cpus 1 + command: nextflow run ./tests/modules/megahit -entry test_megahit_single -c ./tests/config/nextflow.config -process.cpus 1 -c ./tests/modules/megahit/nextflow.config tags: - megahit files: diff --git a/tests/modules/meningotype/main.nf b/tests/modules/meningotype/main.nf index d660ec72..a2d0ff10 100644 --- a/tests/modules/meningotype/main.nf +++ b/tests/modules/meningotype/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MENINGOTYPE } from '../../../modules/meningotype/main.nf' addParams( options: [:] ) +include { MENINGOTYPE } from '../../../modules/meningotype/main.nf' workflow test_meningotype { diff --git a/tests/modules/meningotype/nextflow.config b/tests/modules/meningotype/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/meningotype/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/meningotype/test.yml b/tests/modules/meningotype/test.yml index c61e78a6..02ec8e1f 100644 --- a/tests/modules/meningotype/test.yml +++ b/tests/modules/meningotype/test.yml @@ -1,5 +1,5 @@ - name: meningotype test_meningotype - command: nextflow run tests/modules/meningotype -entry test_meningotype -c tests/config/nextflow.config + command: nextflow run ./tests/modules/meningotype -entry test_meningotype -c ./tests/config/nextflow.config -c ./tests/modules/meningotype/nextflow.config tags: - meningotype files: diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf index 2cfc2e2c..00309402 100644 --- a/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' workflow test_metabat2_jgisummarizebamcontigdepths { diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config b/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml index d318c6d4..86c49d26 100644 --- a/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml @@ -1,5 +1,5 @@ - name: metabat2 jgisummarizebamcontigdepths test_metabat2_jgisummarizebamcontigdepths - command: nextflow run tests/modules/metabat2/jgisummarizebamcontigdepths -entry test_metabat2_jgisummarizebamcontigdepths -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metabat2/jgisummarizebamcontigdepths -entry test_metabat2_jgisummarizebamcontigdepths -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config tags: - metabat2/jgisummarizebamcontigdepths - metabat2 diff --git a/tests/modules/metabat2/metabat2/main.nf b/tests/modules/metabat2/metabat2/main.nf index 3d01f194..0179e4c3 100644 --- a/tests/modules/metabat2/metabat2/main.nf +++ b/tests/modules/metabat2/metabat2/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' addParams( options: [args: '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2'] ) -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' addParams( options: [:] ) +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' workflow test_metabat2_no_depth { diff --git a/tests/modules/metabat2/metabat2/nextflow.config b/tests/modules/metabat2/metabat2/nextflow.config new file mode 100644 index 00000000..83754d8b --- /dev/null +++ b/tests/modules/metabat2/metabat2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + +} diff --git a/tests/modules/metabat2/metabat2/test.yml b/tests/modules/metabat2/metabat2/test.yml index 7b3435b7..1a8660a7 100644 --- a/tests/modules/metabat2/metabat2/test.yml +++ b/tests/modules/metabat2/metabat2/test.yml @@ -1,5 +1,5 @@ - name: metabat2 metabat2 test_metabat2_no_depth - command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config tags: - metabat2/metabat2 - metabat2 @@ -10,7 +10,7 @@ md5sum: ea77e8c4426d2337419905b57f1ec335 - name: metabat2 metabat2 test_metabat2_depth - command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config tags: - metabat2/metabat2 - metabat2 diff --git a/tests/modules/metaphlan3/main.nf b/tests/modules/metaphlan3/main.nf index 2d855683..3354d2d9 100644 --- a/tests/modules/metaphlan3/main.nf +++ b/tests/modules/metaphlan3/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../modules/untar/main.nf' addParams( options: [:] ) -include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' addParams( options: ['suffix': '.sam'] ) -include { METAPHLAN3 } from '../../../modules/metaphlan3/main.nf' addParams( options: [ 'args':'--index mpa_v30_CHOCOPhlAn_201901 --add_viruses --bt2_ps very-sensitive-local' ] ) +include { UNTAR } from '../../../modules/untar/main.nf' +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' +include { METAPHLAN3 } from '../../../modules/metaphlan3/main.nf' workflow test_metaphlan3_single_end { @@ -42,7 +42,7 @@ workflow test_metaphlan3_sam { UNTAR ( db ) - SAMTOOLS_VIEW ( input ) + SAMTOOLS_VIEW ( input, [] ) METAPHLAN3 ( SAMTOOLS_VIEW.out.bam, UNTAR.out.untar ) } diff --git a/tests/modules/metaphlan3/nextflow.config b/tests/modules/metaphlan3/nextflow.config new file mode 100644 index 00000000..2dde2212 --- /dev/null +++ b/tests/modules/metaphlan3/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_VIEW { + ext.suffix = '.sam' + } + + withName: METAPHLAN3 { + ext.args = '--index mpa_v30_CHOCOPhlAn_201901 --add_viruses --bt2_ps very-sensitive-local' + } + +} diff --git a/tests/modules/metaphlan3/test.yml b/tests/modules/metaphlan3/test.yml index fbd5e70b..92e731d2 100644 --- a/tests/modules/metaphlan3/test.yml +++ b/tests/modules/metaphlan3/test.yml @@ -1,5 +1,5 @@ - name: metaphlan3 test_metaphlan3_single_end - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_single_end -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -30,7 +30,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_paired_end - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -61,7 +61,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_sam - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_sam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_sam -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -92,7 +92,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_fasta - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_fasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_fasta -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: diff --git a/tests/modules/methyldackel/extract/main.nf b/tests/modules/methyldackel/extract/main.nf index 40e87b0b..92f92308 100644 --- a/tests/modules/methyldackel/extract/main.nf +++ b/tests/modules/methyldackel/extract/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METHYLDACKEL_EXTRACT } from '../../../../modules/methyldackel/extract/main.nf' addParams( options: [:] ) +include { METHYLDACKEL_EXTRACT } from '../../../../modules/methyldackel/extract/main.nf' workflow test_methyldackel_extract { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/methyldackel/extract/nextflow.config b/tests/modules/methyldackel/extract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/methyldackel/extract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/methyldackel/extract/test.yml b/tests/modules/methyldackel/extract/test.yml index 70c371d7..28f969f3 100644 --- a/tests/modules/methyldackel/extract/test.yml +++ b/tests/modules/methyldackel/extract/test.yml @@ -1,5 +1,5 @@ - name: methyldackel extract - command: nextflow run ./tests/modules/methyldackel/extract -entry test_methyldackel_extract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/methyldackel/extract -entry test_methyldackel_extract -c ./tests/config/nextflow.config -c ./tests/modules/methyldackel/extract/nextflow.config tags: - methyldackel - methyldackel/extract diff --git a/tests/modules/methyldackel/mbias/main.nf b/tests/modules/methyldackel/mbias/main.nf index 318dd663..f304e22f 100644 --- a/tests/modules/methyldackel/mbias/main.nf +++ b/tests/modules/methyldackel/mbias/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METHYLDACKEL_MBIAS } from '../../../../modules/methyldackel/mbias/main.nf' addParams( options: [:] ) +include { METHYLDACKEL_MBIAS } from '../../../../modules/methyldackel/mbias/main.nf' workflow test_methyldackel_mbias { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/methyldackel/mbias/nextflow.config b/tests/modules/methyldackel/mbias/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/methyldackel/mbias/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/methyldackel/mbias/test.yml b/tests/modules/methyldackel/mbias/test.yml index 43074291..8bb23f24 100644 --- a/tests/modules/methyldackel/mbias/test.yml +++ b/tests/modules/methyldackel/mbias/test.yml @@ -1,5 +1,5 @@ - name: methyldackel mbias - command: nextflow run ./tests/modules/methyldackel/mbias -entry test_methyldackel_mbias -c tests/config/nextflow.config + command: nextflow run ./tests/modules/methyldackel/mbias -entry test_methyldackel_mbias -c ./tests/config/nextflow.config -c ./tests/modules/methyldackel/mbias/nextflow.config tags: - methyldackel - methyldackel/mbias diff --git a/tests/modules/minia/main.nf b/tests/modules/minia/main.nf index e23f5cc4..5be4d17f 100644 --- a/tests/modules/minia/main.nf +++ b/tests/modules/minia/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIA } from '../../../modules/minia/main.nf' addParams( options: [:] ) +include { MINIA } from '../../../modules/minia/main.nf' workflow test_minia { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/minia/nextflow.config b/tests/modules/minia/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minia/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minia/test.yml b/tests/modules/minia/test.yml index 6836f51d..78b84f37 100644 --- a/tests/modules/minia/test.yml +++ b/tests/modules/minia/test.yml @@ -1,5 +1,5 @@ - name: minia - command: nextflow run tests/modules/minia -entry test_minia -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minia -entry test_minia -c ./tests/config/nextflow.config -c ./tests/modules/minia/nextflow.config tags: - minia files: diff --git a/tests/modules/miniasm/main.nf b/tests/modules/miniasm/main.nf index f3d23d56..949660ac 100644 --- a/tests/modules/miniasm/main.nf +++ b/tests/modules/miniasm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIASM } from '../../../modules/miniasm/main.nf' addParams( options: [suffix:'.assembly'] ) +include { MINIASM } from '../../../modules/miniasm/main.nf' workflow test_miniasm { diff --git a/tests/modules/miniasm/nextflow.config b/tests/modules/miniasm/nextflow.config new file mode 100644 index 00000000..844a0120 --- /dev/null +++ b/tests/modules/miniasm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MINIASM { + ext.suffix = '.assembly' + } + +} diff --git a/tests/modules/miniasm/test.yml b/tests/modules/miniasm/test.yml index 7596a269..0bdc350a 100644 --- a/tests/modules/miniasm/test.yml +++ b/tests/modules/miniasm/test.yml @@ -1,5 +1,5 @@ - name: miniasm test_miniasm - command: nextflow run tests/modules/miniasm -entry test_miniasm -c tests/config/nextflow.config + command: nextflow run ./tests/modules/miniasm -entry test_miniasm -c ./tests/config/nextflow.config -c ./tests/modules/miniasm/nextflow.config tags: - miniasm files: diff --git a/tests/modules/minimap2/align/main.nf b/tests/modules/minimap2/align/main.nf index b4dbf5bd..e507d3e5 100644 --- a/tests/modules/minimap2/align/main.nf +++ b/tests/modules/minimap2/align/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIMAP2_ALIGN } from '../../../../modules/minimap2/align/main.nf' addParams( options: [:] ) +include { MINIMAP2_ALIGN } from '../../../../modules/minimap2/align/main.nf' workflow test_minimap2_align_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/minimap2/align/nextflow.config b/tests/modules/minimap2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minimap2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minimap2/align/test.yml b/tests/modules/minimap2/align/test.yml index 3309bf4b..598a5d25 100644 --- a/tests/modules/minimap2/align/test.yml +++ b/tests/modules/minimap2/align/test.yml @@ -1,5 +1,5 @@ - name: minimap2 align single-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config tags: - minimap2 - minimap2/align @@ -8,7 +8,7 @@ md5sum: 70e8cf299ee3ecd33e629d10c1f588ce - name: minimap2 align paired-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config tags: - minimap2 - minimap2/align diff --git a/tests/modules/minimap2/index/main.nf b/tests/modules/minimap2/index/main.nf index 39aa93e0..a69efa85 100644 --- a/tests/modules/minimap2/index/main.nf +++ b/tests/modules/minimap2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIMAP2_INDEX } from '../../../../modules/minimap2/index/main.nf' addParams( options: [:] ) +include { MINIMAP2_INDEX } from '../../../../modules/minimap2/index/main.nf' workflow test_minimap2_index { diff --git a/tests/modules/minimap2/index/nextflow.config b/tests/modules/minimap2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minimap2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minimap2/index/test.yml b/tests/modules/minimap2/index/test.yml index 7a3cc8fa..95700452 100644 --- a/tests/modules/minimap2/index/test.yml +++ b/tests/modules/minimap2/index/test.yml @@ -1,5 +1,5 @@ - name: minimap2 index - command: nextflow run ./tests/modules/minimap2/index -entry test_minimap2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/index -entry test_minimap2_index -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/index/nextflow.config tags: - minimap2 - minimap2/index diff --git a/tests/modules/mlst/main.nf b/tests/modules/mlst/main.nf index 4b7d44be..f84ec622 100644 --- a/tests/modules/mlst/main.nf +++ b/tests/modules/mlst/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MLST } from '../../../modules/mlst/main.nf' addParams( options: [:] ) +include { MLST } from '../../../modules/mlst/main.nf' workflow test_mlst { diff --git a/tests/modules/mlst/nextflow.config b/tests/modules/mlst/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mlst/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mlst/test.yml b/tests/modules/mlst/test.yml index 5a7c7a0e..53eacc5a 100644 --- a/tests/modules/mlst/test.yml +++ b/tests/modules/mlst/test.yml @@ -1,5 +1,5 @@ - name: mlst test_mlst - command: nextflow run tests/modules/mlst -entry test_mlst -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mlst -entry test_mlst -c ./tests/config/nextflow.config -c ./tests/modules/mlst/nextflow.config tags: - mlst files: diff --git a/tests/modules/mosdepth/main.nf b/tests/modules/mosdepth/main.nf index c4d8e9c4..8862204d 100644 --- a/tests/modules/mosdepth/main.nf +++ b/tests/modules/mosdepth/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' addParams( options: [:] ) +include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' workflow test_mosdepth { input = [ [ id:'test', single_end:true ], diff --git a/tests/modules/mosdepth/nextflow.config b/tests/modules/mosdepth/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mosdepth/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mosdepth/test.yml b/tests/modules/mosdepth/test.yml index f5ab5608..e264ef3b 100644 --- a/tests/modules/mosdepth/test.yml +++ b/tests/modules/mosdepth/test.yml @@ -1,5 +1,5 @@ - name: mosdepth - command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config tags: - mosdepth files: diff --git a/tests/modules/msisensor/msi/main.nf b/tests/modules/msisensor/msi/main.nf index f8ce4187..259ec887 100644 --- a/tests/modules/msisensor/msi/main.nf +++ b/tests/modules/msisensor/msi/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' addParams( options: [:] ) -include { MSISENSOR_MSI } from '../../../../modules/msisensor/msi/main.nf' addParams( options: [:] ) +include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' +include { MSISENSOR_MSI } from '../../../../modules/msisensor/msi/main.nf' workflow test_msisensor_msi { diff --git a/tests/modules/msisensor/msi/nextflow.config b/tests/modules/msisensor/msi/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/msisensor/msi/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/msisensor/msi/test.yml b/tests/modules/msisensor/msi/test.yml index 1fc74ad3..0d0da1ee 100644 --- a/tests/modules/msisensor/msi/test.yml +++ b/tests/modules/msisensor/msi/test.yml @@ -1,5 +1,5 @@ - name: msisensor msi - command: nextflow run ./tests/modules/msisensor/msi -entry test_msisensor_msi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/msisensor/msi -entry test_msisensor_msi -c ./tests/config/nextflow.config -c ./tests/modules/msisensor/msi/nextflow.config tags: - msisensor - msisensor/msi diff --git a/tests/modules/msisensor/scan/main.nf b/tests/modules/msisensor/scan/main.nf index 2303d0b9..de46dd9b 100644 --- a/tests/modules/msisensor/scan/main.nf +++ b/tests/modules/msisensor/scan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' addParams( options: [:] ) +include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' workflow test_msisensor_scan { diff --git a/tests/modules/msisensor/scan/nextflow.config b/tests/modules/msisensor/scan/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/msisensor/scan/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/msisensor/scan/test.yml b/tests/modules/msisensor/scan/test.yml index 0d28c5a2..9e697a59 100644 --- a/tests/modules/msisensor/scan/test.yml +++ b/tests/modules/msisensor/scan/test.yml @@ -1,5 +1,5 @@ - name: msisensor scan - command: nextflow run ./tests/modules/msisensor/scan -entry test_msisensor_scan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/msisensor/scan -entry test_msisensor_scan -c ./tests/config/nextflow.config -c ./tests/modules/msisensor/scan/nextflow.config tags: - msisensor - msisensor/scan diff --git a/tests/modules/mtnucratio/main.nf b/tests/modules/mtnucratio/main.nf index dd9fc9db..6d6f5e1d 100644 --- a/tests/modules/mtnucratio/main.nf +++ b/tests/modules/mtnucratio/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MTNUCRATIO } from '../../../modules/mtnucratio/main.nf' addParams( options: [:] ) +include { MTNUCRATIO } from '../../../modules/mtnucratio/main.nf' workflow test_mtnucratio { diff --git a/tests/modules/mtnucratio/nextflow.config b/tests/modules/mtnucratio/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mtnucratio/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mtnucratio/test.yml b/tests/modules/mtnucratio/test.yml index 76cbaf32..24dc3d16 100644 --- a/tests/modules/mtnucratio/test.yml +++ b/tests/modules/mtnucratio/test.yml @@ -1,5 +1,5 @@ - name: mtnucratio - command: nextflow run tests/modules/mtnucratio -entry test_mtnucratio -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mtnucratio -entry test_mtnucratio -c ./tests/config/nextflow.config -c ./tests/modules/mtnucratio/nextflow.config tags: - mtnucratio files: diff --git a/tests/modules/multiqc/main.nf b/tests/modules/multiqc/main.nf index ddabb43a..43643985 100644 --- a/tests/modules/multiqc/main.nf +++ b/tests/modules/multiqc/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../modules/fastqc/main.nf' addParams( options: [:] ) -include { MULTIQC } from '../../../modules/multiqc/main.nf' addParams( options: [:] ) +include { FASTQC } from '../../../modules/fastqc/main.nf' +include { MULTIQC } from '../../../modules/multiqc/main.nf' workflow test_multiqc { input = [ [ id: 'test', single_end: false ], diff --git a/tests/modules/multiqc/nextflow.config b/tests/modules/multiqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/multiqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/multiqc/test.yml b/tests/modules/multiqc/test.yml index 69ded5d5..39796872 100644 --- a/tests/modules/multiqc/test.yml +++ b/tests/modules/multiqc/test.yml @@ -1,5 +1,5 @@ - name: multiqc - command: nextflow run ./tests/modules/multiqc -entry test_multiqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/multiqc -entry test_multiqc -c ./tests/config/nextflow.config -c ./tests/modules/multiqc/nextflow.config tags: - multiqc files: diff --git a/tests/modules/mummer/main.nf b/tests/modules/mummer/main.nf index b24f8b16..30c8c4b8 100644 --- a/tests/modules/mummer/main.nf +++ b/tests/modules/mummer/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MUMMER } from '../../../modules/mummer/main.nf' addParams( options: [:] ) +include { MUMMER } from '../../../modules/mummer/main.nf' workflow test_mummer { diff --git a/tests/modules/mummer/nextflow.config b/tests/modules/mummer/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mummer/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mummer/test.yml b/tests/modules/mummer/test.yml index 1d368d14..359fd4ad 100644 --- a/tests/modules/mummer/test.yml +++ b/tests/modules/mummer/test.yml @@ -1,5 +1,5 @@ - name: mummer test_mummer - command: nextflow run tests/modules/mummer -entry test_mummer -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mummer -entry test_mummer -c ./tests/config/nextflow.config -c ./tests/modules/mummer/nextflow.config tags: - mummer files: diff --git a/tests/modules/muscle/main.nf b/tests/modules/muscle/main.nf index 81a71761..a6294519 100644 --- a/tests/modules/muscle/main.nf +++ b/tests/modules/muscle/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { MUSCLE } from '../../../modules/muscle/main.nf' addParams( options: ['args': '-fasta -verbose -phys -phyi -maxiters 2']) -include { MUSCLE as MUSCLE_TREE } from '../../../modules/muscle/main.nf' addParams( options: ['args': '-maketree']) +include { MUSCLE } from '../../../modules/muscle/main.nf' +include { MUSCLE as MUSCLE_TREE } from '../../../modules/muscle/main.nf' workflow test_muscle { diff --git a/tests/modules/muscle/nextflow.config b/tests/modules/muscle/nextflow.config new file mode 100644 index 00000000..31331b0f --- /dev/null +++ b/tests/modules/muscle/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MUSCLE { + ext.args = '-fasta -verbose -phys -phyi -maxiters 2' + } + + withName: MUSCLE_TREE { + ext.args = '-maketree' + } + +} diff --git a/tests/modules/muscle/test.yml b/tests/modules/muscle/test.yml index 7f9d2a54..6995d71d 100644 --- a/tests/modules/muscle/test.yml +++ b/tests/modules/muscle/test.yml @@ -1,5 +1,5 @@ - name: muscle test_muscle - command: nextflow run tests/modules/muscle -entry test_muscle -c tests/config/nextflow.config + command: nextflow run ./tests/modules/muscle -entry test_muscle -c ./tests/config/nextflow.config -c ./tests/modules/muscle/nextflow.config tags: - muscle files: diff --git a/tests/modules/nanolyse/main.nf b/tests/modules/nanolyse/main.nf index 97941a6d..91013cd0 100644 --- a/tests/modules/nanolyse/main.nf +++ b/tests/modules/nanolyse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NANOLYSE } from '../../../modules/nanolyse/main.nf' addParams( options: [suffix: '.clean'] ) +include { NANOLYSE } from '../../../modules/nanolyse/main.nf' workflow test_nanolyse { input = [ diff --git a/tests/modules/nanolyse/nextflow.config b/tests/modules/nanolyse/nextflow.config new file mode 100644 index 00000000..ede080cc --- /dev/null +++ b/tests/modules/nanolyse/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: NANOLYSE { + ext.suffix = '.clean' + } + +} diff --git a/tests/modules/nanolyse/test.yml b/tests/modules/nanolyse/test.yml index 4938fe57..5af2e65e 100644 --- a/tests/modules/nanolyse/test.yml +++ b/tests/modules/nanolyse/test.yml @@ -1,5 +1,5 @@ - name: nanolyse - command: nextflow run ./tests/modules/nanolyse -entry test_nanolyse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanolyse -entry test_nanolyse -c ./tests/config/nextflow.config -c ./tests/modules/nanolyse/nextflow.config tags: - nanolyse files: diff --git a/tests/modules/nanoplot/main.nf b/tests/modules/nanoplot/main.nf index a483f5e2..04c923c2 100644 --- a/tests/modules/nanoplot/main.nf +++ b/tests/modules/nanoplot/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NANOPLOT } from '../../../modules/nanoplot/main.nf' addParams( options: [:] ) +include { NANOPLOT } from '../../../modules/nanoplot/main.nf' workflow test_nanoplot_summary { def input = [] diff --git a/tests/modules/nanoplot/nextflow.config b/tests/modules/nanoplot/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nanoplot/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nanoplot/test.yml b/tests/modules/nanoplot/test.yml index 475b90c9..6549953e 100644 --- a/tests/modules/nanoplot/test.yml +++ b/tests/modules/nanoplot/test.yml @@ -1,6 +1,6 @@ - name: nanoplot_summary - command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_summary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_summary -c ./tests/config/nextflow.config -c ./tests/modules/nanoplot/nextflow.config tags: - nanoplot files: @@ -8,7 +8,7 @@ contains: - "report" - name: nanoplot_fastq - command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_fastq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_fastq -c ./tests/config/nextflow.config -c ./tests/modules/nanoplot/nextflow.config tags: - nanoplot files: diff --git a/tests/modules/ncbigenomedownload/main.nf b/tests/modules/ncbigenomedownload/main.nf index f729b91d..2447b97c 100644 --- a/tests/modules/ncbigenomedownload/main.nf +++ b/tests/modules/ncbigenomedownload/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NCBIGENOMEDOWNLOAD } from '../../../modules/ncbigenomedownload/main.nf' addParams( options: [ args: '-A GCF_000013425.1 --formats genbank,fasta,assembly-stats bacteria '] ) +include { NCBIGENOMEDOWNLOAD } from '../../../modules/ncbigenomedownload/main.nf' workflow test_ncbigenomedownload { diff --git a/tests/modules/ncbigenomedownload/nextflow.config b/tests/modules/ncbigenomedownload/nextflow.config new file mode 100644 index 00000000..7e6ccf70 --- /dev/null +++ b/tests/modules/ncbigenomedownload/nextflow.config @@ -0,0 +1,8 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: NCBIGENOMEDOWNLOAD { + ext.args = '-A GCF_000013425.1 --formats genbank,fasta,assembly-stats bacteria' + } +} diff --git a/tests/modules/ncbigenomedownload/test.yml b/tests/modules/ncbigenomedownload/test.yml index 7d1f7c74..8765e04f 100644 --- a/tests/modules/ncbigenomedownload/test.yml +++ b/tests/modules/ncbigenomedownload/test.yml @@ -1,5 +1,5 @@ - name: ncbigenomedownload test_ncbigenomedownload - command: nextflow run tests/modules/ncbigenomedownload -entry test_ncbigenomedownload -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ncbigenomedownload -entry test_ncbigenomedownload -c ./tests/config/nextflow.config -c ./tests/modules/ncbigenomedownload/nextflow.config tags: - ncbigenomedownload files: diff --git a/tests/modules/nextclade/main.nf b/tests/modules/nextclade/main.nf index 93c50ca5..15750990 100755 --- a/tests/modules/nextclade/main.nf +++ b/tests/modules/nextclade/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NEXTCLADE } from '../../../modules/nextclade/main.nf' addParams( options: [:] ) +include { NEXTCLADE } from '../../../modules/nextclade/main.nf' workflow test_nextclade { input = [ diff --git a/tests/modules/nextclade/nextflow.config b/tests/modules/nextclade/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nextclade/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nextclade/test.yml b/tests/modules/nextclade/test.yml index 4d1d7743..36218aad 100755 --- a/tests/modules/nextclade/test.yml +++ b/tests/modules/nextclade/test.yml @@ -1,5 +1,5 @@ - name: nextclade test_nextclade - command: nextflow run tests/modules/nextclade -entry test_nextclade -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nextclade -entry test_nextclade -c ./tests/config/nextflow.config -c ./tests/modules/nextclade/nextflow.config tags: - nextclade files: diff --git a/tests/modules/ngmaster/main.nf b/tests/modules/ngmaster/main.nf index 8bc975ed..b23530bc 100644 --- a/tests/modules/ngmaster/main.nf +++ b/tests/modules/ngmaster/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NGMASTER } from '../../../modules/ngmaster/main.nf' addParams( options: [:] ) +include { NGMASTER } from '../../../modules/ngmaster/main.nf' workflow test_ngmaster { diff --git a/tests/modules/ngmaster/nextflow.config b/tests/modules/ngmaster/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ngmaster/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ngmaster/test.yml b/tests/modules/ngmaster/test.yml index 31584a54..fb8dec82 100644 --- a/tests/modules/ngmaster/test.yml +++ b/tests/modules/ngmaster/test.yml @@ -1,5 +1,5 @@ - name: ngmaster test_ngmaster - command: nextflow run tests/modules/ngmaster -entry test_ngmaster -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ngmaster -entry test_ngmaster -c ./tests/config/nextflow.config -c ./tests/modules/ngmaster/nextflow.config tags: - ngmaster files: diff --git a/tests/modules/nucmer/main.nf b/tests/modules/nucmer/main.nf index 8021f577..98e74b07 100644 --- a/tests/modules/nucmer/main.nf +++ b/tests/modules/nucmer/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NUCMER } from '../../../modules/nucmer/main.nf' addParams( options: [:] ) +include { NUCMER } from '../../../modules/nucmer/main.nf' workflow test_nucmer { diff --git a/tests/modules/nucmer/nextflow.config b/tests/modules/nucmer/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nucmer/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nucmer/test.yml b/tests/modules/nucmer/test.yml index 86b3df5d..62caced4 100644 --- a/tests/modules/nucmer/test.yml +++ b/tests/modules/nucmer/test.yml @@ -1,5 +1,5 @@ - name: nucmer test_nucmer - command: nextflow run tests/modules/nucmer -entry test_nucmer -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nucmer -entry test_nucmer -c ./tests/config/nextflow.config -c ./tests/modules/nucmer/nextflow.config tags: - nucmer files: diff --git a/tests/modules/optitype/main.nf b/tests/modules/optitype/main.nf index c27a5c99..55b46f0a 100644 --- a/tests/modules/optitype/main.nf +++ b/tests/modules/optitype/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { OPTITYPE } from '../../../modules/optitype/main.nf' addParams( options: ['args':'-e 1 -b 0.009', 'args2':'solver=glpk'] ) +include { OPTITYPE } from '../../../modules/optitype/main.nf' workflow test_optitype { input = [ [ id:'test', seq_type:'dna' ], // meta map diff --git a/tests/modules/optitype/nextflow.config b/tests/modules/optitype/nextflow.config new file mode 100644 index 00000000..14ad9e3f --- /dev/null +++ b/tests/modules/optitype/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: OPTITYPE { + ext.args = '-e 1 -b 0.009' + ext.args2 = 'solver=glpk' + } + +} diff --git a/tests/modules/optitype/test.yml b/tests/modules/optitype/test.yml index 41f35988..7c2ff0d0 100644 --- a/tests/modules/optitype/test.yml +++ b/tests/modules/optitype/test.yml @@ -1,5 +1,5 @@ - name: optitype test_optitype - command: nextflow run tests/modules/optitype -entry test_optitype -c tests/config/nextflow.config + command: nextflow run ./tests/modules/optitype -entry test_optitype -c ./tests/config/nextflow.config -c ./tests/modules/optitype/nextflow.config tags: - optitype files: diff --git a/tests/modules/pairix/main.nf b/tests/modules/pairix/main.nf index f1e2a44a..474bacbb 100644 --- a/tests/modules/pairix/main.nf +++ b/tests/modules/pairix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRIX } from '../../../modules/pairix/main.nf' addParams( options: [:] ) +include { PAIRIX } from '../../../modules/pairix/main.nf' workflow test_pairix { diff --git a/tests/modules/pairix/nextflow.config b/tests/modules/pairix/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pairix/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pairix/test.yml b/tests/modules/pairix/test.yml index 304a94b3..4cd9d37d 100644 --- a/tests/modules/pairix/test.yml +++ b/tests/modules/pairix/test.yml @@ -1,5 +1,5 @@ - name: pairix test_pairix - command: nextflow run tests/modules/pairix -entry test_pairix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairix -entry test_pairix -c ./tests/config/nextflow.config -c ./tests/modules/pairix/nextflow.config tags: - pairix files: diff --git a/tests/modules/pairtools/dedup/main.nf b/tests/modules/pairtools/dedup/main.nf index 2c10c85b..28121526 100644 --- a/tests/modules/pairtools/dedup/main.nf +++ b/tests/modules/pairtools/dedup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_DEDUP } from '../../../../modules/pairtools/dedup/main.nf' addParams( options: ['suffix':'.dedup'] ) +include { PAIRTOOLS_DEDUP } from '../../../../modules/pairtools/dedup/main.nf' workflow test_pairtools_dedup { diff --git a/tests/modules/pairtools/dedup/nextflow.config b/tests/modules/pairtools/dedup/nextflow.config new file mode 100644 index 00000000..1de3348f --- /dev/null +++ b/tests/modules/pairtools/dedup/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_DEDUP { + ext.suffix = '.dedup' + } + +} diff --git a/tests/modules/pairtools/dedup/test.yml b/tests/modules/pairtools/dedup/test.yml index 25fc51f7..6d7f99f4 100644 --- a/tests/modules/pairtools/dedup/test.yml +++ b/tests/modules/pairtools/dedup/test.yml @@ -1,5 +1,5 @@ - name: pairtools dedup test_pairtools_dedup - command: nextflow run tests/modules/pairtools/dedup -entry test_pairtools_dedup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/dedup -entry test_pairtools_dedup -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/dedup/nextflow.config tags: - pairtools/dedup - pairtools diff --git a/tests/modules/pairtools/flip/main.nf b/tests/modules/pairtools/flip/main.nf index ed980102..e4d740e2 100644 --- a/tests/modules/pairtools/flip/main.nf +++ b/tests/modules/pairtools/flip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_FLIP } from '../../../../modules/pairtools/flip/main.nf' addParams( options: [:] ) +include { PAIRTOOLS_FLIP } from '../../../../modules/pairtools/flip/main.nf' workflow test_pairtools_flip { diff --git a/tests/modules/pairtools/flip/nextflow.config b/tests/modules/pairtools/flip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pairtools/flip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pairtools/flip/test.yml b/tests/modules/pairtools/flip/test.yml index eeef6530..cec54976 100644 --- a/tests/modules/pairtools/flip/test.yml +++ b/tests/modules/pairtools/flip/test.yml @@ -1,5 +1,5 @@ - name: pairtools flip test_pairtools_flip - command: nextflow run tests/modules/pairtools/flip -entry test_pairtools_flip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/flip -entry test_pairtools_flip -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/flip/nextflow.config tags: - pairtools/flip - pairtools diff --git a/tests/modules/pairtools/parse/main.nf b/tests/modules/pairtools/parse/main.nf index 26ceaa4f..f006fd6a 100644 --- a/tests/modules/pairtools/parse/main.nf +++ b/tests/modules/pairtools/parse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_PARSE } from '../../../../modules/pairtools/parse/main.nf' addParams( options: ['suffix':'.raw'] ) +include { PAIRTOOLS_PARSE } from '../../../../modules/pairtools/parse/main.nf' workflow test_pairtools_parse { diff --git a/tests/modules/pairtools/parse/nextflow.config b/tests/modules/pairtools/parse/nextflow.config new file mode 100644 index 00000000..1a1182f6 --- /dev/null +++ b/tests/modules/pairtools/parse/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_PARSE { + ext.suffix = '.raw' + } + +} diff --git a/tests/modules/pairtools/parse/test.yml b/tests/modules/pairtools/parse/test.yml index e5d18e01..cf01038c 100644 --- a/tests/modules/pairtools/parse/test.yml +++ b/tests/modules/pairtools/parse/test.yml @@ -1,5 +1,5 @@ - name: pairtools parse test_pairtools_parse - command: nextflow run tests/modules/pairtools/parse -entry test_pairtools_parse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/parse -entry test_pairtools_parse -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/parse/nextflow.config tags: - pairtools - pairtools/parse diff --git a/tests/modules/pairtools/restrict/main.nf b/tests/modules/pairtools/restrict/main.nf index f785ed88..ae7e328b 100644 --- a/tests/modules/pairtools/restrict/main.nf +++ b/tests/modules/pairtools/restrict/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_RESTRICT } from '../../../../modules/pairtools/restrict/main.nf' addParams( options: ['suffix':'.restrict'] ) +include { PAIRTOOLS_RESTRICT } from '../../../../modules/pairtools/restrict/main.nf' workflow test_pairtools_restrict { diff --git a/tests/modules/pairtools/restrict/nextflow.config b/tests/modules/pairtools/restrict/nextflow.config new file mode 100644 index 00000000..857d7534 --- /dev/null +++ b/tests/modules/pairtools/restrict/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_RESTRICT { + ext.suffix = '.restrict' + } + +} diff --git a/tests/modules/pairtools/restrict/test.yml b/tests/modules/pairtools/restrict/test.yml index afc64930..484b3739 100644 --- a/tests/modules/pairtools/restrict/test.yml +++ b/tests/modules/pairtools/restrict/test.yml @@ -1,5 +1,5 @@ - name: pairtools restrict test_pairtools_restrict - command: nextflow run tests/modules/pairtools/restrict -entry test_pairtools_restrict -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/restrict -entry test_pairtools_restrict -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/restrict/nextflow.config tags: - pairtools/restrict - pairtools diff --git a/tests/modules/pairtools/select/main.nf b/tests/modules/pairtools/select/main.nf index 2efd29c7..ff65cd95 100644 --- a/tests/modules/pairtools/select/main.nf +++ b/tests/modules/pairtools/select/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_SELECT } from '../../../../modules/pairtools/select/main.nf' addParams( options: [args:"(pair_type == 'RU') or (pair_type == 'UR') or (pair_type == 'UU')"] ) +include { PAIRTOOLS_SELECT } from '../../../../modules/pairtools/select/main.nf' workflow test_pairtools_select { diff --git a/tests/modules/pairtools/select/nextflow.config b/tests/modules/pairtools/select/nextflow.config new file mode 100644 index 00000000..df33cd2e --- /dev/null +++ b/tests/modules/pairtools/select/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_SELECT { + ext.args = "(pair_type == \'RU\') or (pair_type == \'UR\') or (pair_type == \'UU\')" + } + +} diff --git a/tests/modules/pairtools/select/test.yml b/tests/modules/pairtools/select/test.yml index adeb50c3..431e8366 100644 --- a/tests/modules/pairtools/select/test.yml +++ b/tests/modules/pairtools/select/test.yml @@ -1,5 +1,5 @@ - name: pairtools select test_pairtools_select - command: nextflow run tests/modules/pairtools/select -entry test_pairtools_select -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/select -entry test_pairtools_select -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/select/nextflow.config tags: - pairtools/select - pairtools diff --git a/tests/modules/pairtools/sort/main.nf b/tests/modules/pairtools/sort/main.nf index dfb505e0..0e484c76 100644 --- a/tests/modules/pairtools/sort/main.nf +++ b/tests/modules/pairtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_SORT } from '../../../../modules/pairtools/sort/main.nf' addParams( options: ['suffix':'.sorted'] ) +include { PAIRTOOLS_SORT } from '../../../../modules/pairtools/sort/main.nf' workflow test_pairtools_sort { diff --git a/tests/modules/pairtools/sort/nextflow.config b/tests/modules/pairtools/sort/nextflow.config new file mode 100644 index 00000000..86b3d802 --- /dev/null +++ b/tests/modules/pairtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_SORT { + ext.suffix = '.sorted' + } + +} diff --git a/tests/modules/pairtools/sort/test.yml b/tests/modules/pairtools/sort/test.yml index 9eea74a0..4d4866aa 100644 --- a/tests/modules/pairtools/sort/test.yml +++ b/tests/modules/pairtools/sort/test.yml @@ -1,5 +1,5 @@ - name: pairtools sort test_pairtools_sort - command: nextflow run tests/modules/pairtools/sort -entry test_pairtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/sort -entry test_pairtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/sort/nextflow.config tags: - pairtools/sort - pairtools diff --git a/tests/modules/pangolin/main.nf b/tests/modules/pangolin/main.nf index b8130c5d..ab4aa4af 100644 --- a/tests/modules/pangolin/main.nf +++ b/tests/modules/pangolin/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PANGOLIN } from '../../../modules/pangolin/main.nf' addParams( options: [:] ) +include { PANGOLIN } from '../../../modules/pangolin/main.nf' workflow test_pangolin { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/pangolin/nextflow.config b/tests/modules/pangolin/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pangolin/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pangolin/test.yml b/tests/modules/pangolin/test.yml index 5fb5e79e..c77e4912 100644 --- a/tests/modules/pangolin/test.yml +++ b/tests/modules/pangolin/test.yml @@ -1,5 +1,5 @@ - name: pangolin - command: nextflow run ./tests/modules/pangolin -entry test_pangolin -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/pangolin -entry test_pangolin -c ./tests/config/nextflow.config -c ./tests/modules/pangolin/nextflow.config tags: - pangolin files: diff --git a/tests/modules/paraclu/main.nf b/tests/modules/paraclu/main.nf index f5101591..3bd75dc0 100644 --- a/tests/modules/paraclu/main.nf +++ b/tests/modules/paraclu/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PARACLU } from '../../../modules/paraclu/main.nf' addParams( options: [:] ) +include { PARACLU } from '../../../modules/paraclu/main.nf' workflow test_paraclu { diff --git a/tests/modules/paraclu/nextflow.config b/tests/modules/paraclu/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/paraclu/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/paraclu/test.yml b/tests/modules/paraclu/test.yml index 3aa3e8b4..36b37df5 100644 --- a/tests/modules/paraclu/test.yml +++ b/tests/modules/paraclu/test.yml @@ -1,5 +1,5 @@ - name: paraclu test_paraclu - command: nextflow run tests/modules/paraclu -entry test_paraclu -c tests/config/nextflow.config + command: nextflow run ./tests/modules/paraclu -entry test_paraclu -c ./tests/config/nextflow.config -c ./tests/modules/paraclu/nextflow.config tags: - paraclu files: diff --git a/tests/modules/pbbam/pbmerge/main.nf b/tests/modules/pbbam/pbmerge/main.nf index 9220af0c..34ed33a6 100644 --- a/tests/modules/pbbam/pbmerge/main.nf +++ b/tests/modules/pbbam/pbmerge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PBBAM_PBMERGE } from '../../../../modules/pbbam/pbmerge/main.nf' addParams( options: [suffix: '.merged'] ) +include { PBBAM_PBMERGE } from '../../../../modules/pbbam/pbmerge/main.nf' workflow test_pbbam_pbmerge { diff --git a/tests/modules/pbbam/pbmerge/nextflow.config b/tests/modules/pbbam/pbmerge/nextflow.config new file mode 100644 index 00000000..c897068b --- /dev/null +++ b/tests/modules/pbbam/pbmerge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PBBAM_PBMERGE { + ext.suffix = '.merged' + } + +} diff --git a/tests/modules/pbbam/pbmerge/test.yml b/tests/modules/pbbam/pbmerge/test.yml index 4f334c0e..0a6d7da3 100644 --- a/tests/modules/pbbam/pbmerge/test.yml +++ b/tests/modules/pbbam/pbmerge/test.yml @@ -1,5 +1,5 @@ - name: pbbam pbmerge test_pbbam_pbmerge - command: nextflow run tests/modules/pbbam/pbmerge -entry test_pbbam_pbmerge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pbbam/pbmerge -entry test_pbbam_pbmerge -c ./tests/config/nextflow.config -c ./tests/modules/pbbam/pbmerge/nextflow.config tags: - pbbam/pbmerge - pbbam diff --git a/tests/modules/pbccs/main.nf b/tests/modules/pbccs/main.nf index 74c1b864..91a2ab30 100644 --- a/tests/modules/pbccs/main.nf +++ b/tests/modules/pbccs/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PBCCS } from '../../../modules/pbccs/main.nf' addParams( options: [args:'--min-rq 0.9'] ) +include { PBCCS } from '../../../modules/pbccs/main.nf' workflow test_pbccs { diff --git a/tests/modules/pbccs/nextflow.config b/tests/modules/pbccs/nextflow.config new file mode 100644 index 00000000..869909ce --- /dev/null +++ b/tests/modules/pbccs/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PBCCS { + ext.args = '--min-rq 0.9' + } + +} diff --git a/tests/modules/pbccs/test.yml b/tests/modules/pbccs/test.yml index af225eb1..5d481923 100644 --- a/tests/modules/pbccs/test.yml +++ b/tests/modules/pbccs/test.yml @@ -1,5 +1,5 @@ - name: pbccs test_pbccs - command: nextflow run tests/modules/pbccs -entry test_pbccs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pbccs -entry test_pbccs -c ./tests/config/nextflow.config -c ./tests/modules/pbccs/nextflow.config tags: - pbccs files: diff --git a/tests/modules/peddy/main.nf b/tests/modules/peddy/main.nf index d6331752..e53e8152 100644 --- a/tests/modules/peddy/main.nf +++ b/tests/modules/peddy/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PEDDY } from '../../../modules/peddy/main.nf' addParams( options: [:] ) +include { PEDDY } from '../../../modules/peddy/main.nf' workflow test_peddy { @@ -13,5 +13,5 @@ workflow test_peddy { ] ped = file(params.test_data['homo_sapiens']['genome']['justhusky_ped'], checkIfExists: true) - PEDDY ( input , ped ) + PEDDY ( input, ped ) } diff --git a/tests/modules/peddy/nextflow.config b/tests/modules/peddy/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/peddy/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/peddy/test.yml b/tests/modules/peddy/test.yml index 77bf00f6..0ed6dc94 100644 --- a/tests/modules/peddy/test.yml +++ b/tests/modules/peddy/test.yml @@ -1,5 +1,5 @@ - name: peddy test_peddy - command: nextflow run tests/modules/peddy -entry test_peddy -c tests/config/nextflow.config + command: nextflow run ./tests/modules/peddy -entry test_peddy -c ./tests/config/nextflow.config ./tests/modules/peddy/nextflow.config tags: - peddy files: diff --git a/tests/modules/phyloflash/main.nf b/tests/modules/phyloflash/main.nf index 754d6747..412e0321 100644 --- a/tests/modules/phyloflash/main.nf +++ b/tests/modules/phyloflash/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PHYLOFLASH } from '../../../modules/phyloflash/main.nf' addParams( options: [:] ) +include { PHYLOFLASH } from '../../../modules/phyloflash/main.nf' process STUB_PHYLOFLASH_DATABASE { output: @@ -19,22 +19,22 @@ process STUB_PHYLOFLASH_DATABASE { workflow test_phyloflash_single_end { STUB_PHYLOFLASH_DATABASE () - - input = [ + + input = [ [ id:'test', single_end:true ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] - PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) } workflow test_phyloflash_paired_end { STUB_PHYLOFLASH_DATABASE () - input = [ + input = [ [ id:'test', single_end:false ], // meta map - [ + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] diff --git a/tests/modules/phyloflash/nextflow.config b/tests/modules/phyloflash/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/phyloflash/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/phyloflash/test.yml b/tests/modules/phyloflash/test.yml index 0cba41c5..81eac2f2 100644 --- a/tests/modules/phyloflash/test.yml +++ b/tests/modules/phyloflash/test.yml @@ -1,5 +1,5 @@ - name: phyloflash single-end - command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_single_end -c tests/config/nextflow.config -stub-run + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_single_end -c ./tests/config/nextflow.config -c ./tests/modules/phyloflash/nextflow.config -stub-run tags: - phyloflash files: @@ -7,7 +7,7 @@ md5sum: d41d8cd98f00b204e9800998ecf8427e - name: phyloflash paired-end - command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_paired_end -c tests/config/nextflow.config -stub-run + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/phyloflash/nextflow.config -stub-run tags: - phyloflash files: diff --git a/tests/modules/picard/collecthsmetrics/main.nf b/tests/modules/picard/collecthsmetrics/main.nf index 24b031fc..2e8727b5 100644 --- a/tests/modules/picard/collecthsmetrics/main.nf +++ b/tests/modules/picard/collecthsmetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsmetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsmetrics/main.nf' workflow test_picard_collecthsmetrics { diff --git a/tests/modules/picard/collecthsmetrics/nextflow.config b/tests/modules/picard/collecthsmetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collecthsmetrics/test.yml b/tests/modules/picard/collecthsmetrics/test.yml index 8c610abd..9232d508 100644 --- a/tests/modules/picard/collecthsmetrics/test.yml +++ b/tests/modules/picard/collecthsmetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collecthsmetrics test_picard_collecthsmetrics - command: nextflow run tests/modules/picard/collecthsmetrics -entry test_picard_collecthsmetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collecthsmetrics -entry test_picard_collecthsmetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collecthsmetrics/nextflow.config tags: - picard - picard/collecthsmetrics diff --git a/tests/modules/picard/collectmultiplemetrics/main.nf b/tests/modules/picard/collectmultiplemetrics/main.nf index 73ac0013..453ecc91 100644 --- a/tests/modules/picard/collectmultiplemetrics/main.nf +++ b/tests/modules/picard/collectmultiplemetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../../modules/picard/collectmultiplemetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../../modules/picard/collectmultiplemetrics/main.nf' workflow test_picard_collectmultiplemetrics { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/collectmultiplemetrics/nextflow.config b/tests/modules/picard/collectmultiplemetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collectmultiplemetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collectmultiplemetrics/test.yml b/tests/modules/picard/collectmultiplemetrics/test.yml index fc4d0347..8fecca73 100644 --- a/tests/modules/picard/collectmultiplemetrics/test.yml +++ b/tests/modules/picard/collectmultiplemetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collectmultiplemetrics - command: nextflow run ./tests/modules/picard/collectmultiplemetrics -entry test_picard_collectmultiplemetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collectmultiplemetrics -entry test_picard_collectmultiplemetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collectmultiplemetrics/nextflow.config tags: - picard - picard/collectmultiplemetrics diff --git a/tests/modules/picard/collectwgsmetrics/main.nf b/tests/modules/picard/collectwgsmetrics/main.nf index 5bdf17ab..1d75a2bd 100644 --- a/tests/modules/picard/collectwgsmetrics/main.nf +++ b/tests/modules/picard/collectwgsmetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTWGSMETRICS } from '../../../../modules/picard/collectwgsmetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTWGSMETRICS } from '../../../../modules/picard/collectwgsmetrics/main.nf' workflow test_picard_collectwgsmetrics { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/collectwgsmetrics/nextflow.config b/tests/modules/picard/collectwgsmetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collectwgsmetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collectwgsmetrics/test.yml b/tests/modules/picard/collectwgsmetrics/test.yml index 62e87e65..2daef406 100644 --- a/tests/modules/picard/collectwgsmetrics/test.yml +++ b/tests/modules/picard/collectwgsmetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collectwgsmetrics test_picard_collectwgsmetrics - command: nextflow run tests/modules/picard/collectwgsmetrics -entry test_picard_collectwgsmetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collectwgsmetrics -entry test_picard_collectwgsmetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collectwgsmetrics/nextflow.config tags: - picard/collectwgsmetrics - picard diff --git a/tests/modules/picard/filtersamreads/main.nf b/tests/modules/picard/filtersamreads/main.nf index a03471dd..847bee57 100644 --- a/tests/modules/picard/filtersamreads/main.nf +++ b/tests/modules/picard/filtersamreads/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) -include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' addParams( options: [suffix:'.filtered'] ) +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' +include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' workflow test_picard_filtersamreads { diff --git a/tests/modules/picard/filtersamreads/nextflow.config b/tests/modules/picard/filtersamreads/nextflow.config new file mode 100644 index 00000000..e9ce4914 --- /dev/null +++ b/tests/modules/picard/filtersamreads/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_SORTSAM { + ext.suffix = '.sorted' + } + + withName: PICARD_FILTERSAMREADS { + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/picard/filtersamreads/test.yml b/tests/modules/picard/filtersamreads/test.yml index e8e73ed0..a0ab712b 100644 --- a/tests/modules/picard/filtersamreads/test.yml +++ b/tests/modules/picard/filtersamreads/test.yml @@ -1,5 +1,5 @@ - name: picard filtersamreads - command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c ./tests/config/nextflow.config -c ./tests/modules/picard/filtersamreads/nextflow.config tags: - picard - picard/filtersamreads @@ -9,7 +9,7 @@ - name: picard filtersamreads readlist - command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c ./tests/config/nextflow.config -c ./tests/modules/picard/filtersamreads/nextflow.config tags: - picard - picard/filtersamreads diff --git a/tests/modules/picard/markduplicates/main.nf b/tests/modules/picard/markduplicates/main.nf index 7c9c63cd..12f3ac26 100644 --- a/tests/modules/picard/markduplicates/main.nf +++ b/tests/modules/picard/markduplicates/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [:] ) -include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : 'ASSUME_SORT_ORDER=queryname' ] ) +include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' +include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' workflow test_picard_markduplicates_sorted_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/markduplicates/nextflow.config b/tests/modules/picard/markduplicates/nextflow.config new file mode 100644 index 00000000..9178c5b1 --- /dev/null +++ b/tests/modules/picard/markduplicates/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_MARKDUPLICATES_UNSORTED { + ext.args = 'ASSUME_SORT_ORDER=queryname' + } + +} diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index 4c314814..beb54009 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -1,5 +1,5 @@ - name: picard markduplicates sorted bam - command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_sorted_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_sorted_bam -c ./tests/config/nextflow.config -c ./tests/modules/picard/markduplicates/nextflow.config tags: - picard - picard/markduplicates @@ -9,7 +9,7 @@ - "1.0 97 97" - path: ./output/picard/test.bam - name: picard markduplicates unsorted bam - command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c ./tests/config/nextflow.config -c ./tests/modules/picard/markduplicates/nextflow.config tags: - picard - picard/markduplicates diff --git a/tests/modules/picard/mergesamfiles/main.nf b/tests/modules/picard/mergesamfiles/main.nf index 5ddc849f..51c070b6 100644 --- a/tests/modules/picard/mergesamfiles/main.nf +++ b/tests/modules/picard/mergesamfiles/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_MERGESAMFILES } from '../../../../modules/picard/mergesamfiles/main.nf' addParams( options: [:] ) +include { PICARD_MERGESAMFILES } from '../../../../modules/picard/mergesamfiles/main.nf' workflow test_picard_mergesamfiles { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/mergesamfiles/nextflow.config b/tests/modules/picard/mergesamfiles/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/mergesamfiles/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/mergesamfiles/test.yml b/tests/modules/picard/mergesamfiles/test.yml index a331c96f..1cf59cb7 100644 --- a/tests/modules/picard/mergesamfiles/test.yml +++ b/tests/modules/picard/mergesamfiles/test.yml @@ -1,5 +1,5 @@ - name: picard mergesamfiles - command: nextflow run ./tests/modules/picard/mergesamfiles -entry test_picard_mergesamfiles -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/mergesamfiles -entry test_picard_mergesamfiles -c ./tests/config/nextflow.config -c ./tests/modules/picard/mergesamfiles/nextflow.config tags: - picard - picard/mergesamfiles diff --git a/tests/modules/picard/sortsam/main.nf b/tests/modules/picard/sortsam/main.nf index 0130fad6..1516682c 100644 --- a/tests/modules/picard/sortsam/main.nf +++ b/tests/modules/picard/sortsam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' workflow test_picard_sortsam { diff --git a/tests/modules/picard/sortsam/nextflow.config b/tests/modules/picard/sortsam/nextflow.config new file mode 100644 index 00000000..2c290cbe --- /dev/null +++ b/tests/modules/picard/sortsam/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_SORTSAM { + ext.suffix = '.sorted' + } + +} diff --git a/tests/modules/picard/sortsam/test.yml b/tests/modules/picard/sortsam/test.yml index 4443228e..61521850 100644 --- a/tests/modules/picard/sortsam/test.yml +++ b/tests/modules/picard/sortsam/test.yml @@ -1,5 +1,5 @@ - name: picard sortsam - command: nextflow run ./tests/modules/picard/sortsam -entry test_picard_sortsam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/sortsam -entry test_picard_sortsam -c ./tests/config/nextflow.config -c ./tests/modules/picard/sortsam/nextflow.config tags: - picard - picard/sortsam diff --git a/tests/modules/pirate/main.nf b/tests/modules/pirate/main.nf index 5957b1e6..05e5bdd8 100644 --- a/tests/modules/pirate/main.nf +++ b/tests/modules/pirate/main.nf @@ -2,15 +2,22 @@ nextflow.enable.dsl = 2 -include { PIRATE } from '../../../modules/pirate/main.nf' addParams( options: [:] ) +include { PIRATE } from '../../../modules/pirate/main.nf' workflow test_pirate { - - input = [ [ id:'test', single_end:false ], // meta map - [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test1_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test2_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) + ] ] + // [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), + // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), + // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + // ] PIRATE ( input ) } diff --git a/tests/modules/pirate/nextflow.config b/tests/modules/pirate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pirate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pirate/test.yml b/tests/modules/pirate/test.yml index d8c4d0c4..b8d36b95 100644 --- a/tests/modules/pirate/test.yml +++ b/tests/modules/pirate/test.yml @@ -1,5 +1,5 @@ - name: pirate test_pirate - command: nextflow run tests/modules/pirate -entry test_pirate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pirate -entry test_pirate -c ./tests/config/nextflow.config -c ./tests/modules/pirate/nextflow.config tags: - pirate files: diff --git a/tests/modules/plasmidid/main.nf b/tests/modules/plasmidid/main.nf index 1dd57daf..52d25a91 100644 --- a/tests/modules/plasmidid/main.nf +++ b/tests/modules/plasmidid/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLASMIDID } from '../../../modules/plasmidid/main.nf' addParams ( options: ['args' : '-k 0.8'] ) +include { PLASMIDID } from '../../../modules/plasmidid/main.nf' workflow test_plasmidid { diff --git a/tests/modules/plasmidid/nextflow.config b/tests/modules/plasmidid/nextflow.config new file mode 100644 index 00000000..2090bfae --- /dev/null +++ b/tests/modules/plasmidid/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLASMIDID { + ext.args = '-k 0.8' + } + +} diff --git a/tests/modules/plasmidid/test.yml b/tests/modules/plasmidid/test.yml index 838af394..cd0528cb 100644 --- a/tests/modules/plasmidid/test.yml +++ b/tests/modules/plasmidid/test.yml @@ -1,5 +1,5 @@ - name: plasmidid - command: nextflow run ./tests/modules/plasmidid -entry test_plasmidid -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plasmidid -entry test_plasmidid -c ./tests/config/nextflow.config -c ./tests/modules/plasmidid/nextflow.config tags: - plasmidid files: diff --git a/tests/modules/plink/extract/main.nf b/tests/modules/plink/extract/main.nf index e031a7b7..6beb0469 100644 --- a/tests/modules/plink/extract/main.nf +++ b/tests/modules/plink/extract/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams ( options: [args:'--make-bed --set-missing-var-ids @:#:\\$1:\\$2']) -include { PLINK_EXTRACT } from '../../../../modules/plink/extract/main.nf' addParams( options: [suffix:'.extract'] ) +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' +include { PLINK_EXTRACT } from '../../../../modules/plink/extract/main.nf' workflow test_plink_extract { diff --git a/tests/modules/plink/extract/nextflow.config b/tests/modules/plink/extract/nextflow.config new file mode 100644 index 00000000..12668b01 --- /dev/null +++ b/tests/modules/plink/extract/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK_VCF { + ext.args = '--make-bed --set-missing-var-ids @:#:\\$1:\\$2' + } + + withName: PLINK_EXTRACT { + ext.suffix = '.extract' + } + +} diff --git a/tests/modules/plink/extract/test.yml b/tests/modules/plink/extract/test.yml index 40569d9d..87cf82cc 100644 --- a/tests/modules/plink/extract/test.yml +++ b/tests/modules/plink/extract/test.yml @@ -1,5 +1,5 @@ - name: plink extract test_plink_extract - command: nextflow run tests/modules/plink/extract -entry test_plink_extract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plink/extract -entry test_plink_extract -c ./tests/config/nextflow.config -c ./tests/modules/plink/extract/nextflow.config tags: - plink - plink/extract diff --git a/tests/modules/plink/vcf/main.nf b/tests/modules/plink/vcf/main.nf index 096bacdd..4dac8978 100644 --- a/tests/modules/plink/vcf/main.nf +++ b/tests/modules/plink/vcf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams( options: ['args':" --make-bed --biallelic-only strict --vcf-half-call missing --double-id --recode ped --id-delim \'=\' --allow-extra-chr"]) +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' workflow test_plink_vcf { diff --git a/tests/modules/plink/vcf/nextflow.config b/tests/modules/plink/vcf/nextflow.config new file mode 100644 index 00000000..f0b72c8d --- /dev/null +++ b/tests/modules/plink/vcf/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK_VCF { + ext.args = ' --make-bed --biallelic-only strict --vcf-half-call missing --double-id --recode ped --id-delim \'=\' --allow-extra-chr' + } + +} diff --git a/tests/modules/plink/vcf/test.yml b/tests/modules/plink/vcf/test.yml index bfd54386..9042d14a 100644 --- a/tests/modules/plink/vcf/test.yml +++ b/tests/modules/plink/vcf/test.yml @@ -1,5 +1,5 @@ - name: plink vcf test_plink_vcf - command: nextflow run tests/modules/plink/vcf -entry test_plink_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plink/vcf -entry test_plink_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink/vcf/nextflow.config tags: - plink - plink/vcf diff --git a/tests/modules/plink2/vcf/main.nf b/tests/modules/plink2/vcf/main.nf index 409e7995..08d7dc61 100644 --- a/tests/modules/plink2/vcf/main.nf +++ b/tests/modules/plink2/vcf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' addParams( options: [args:'--allow-extra-chr'] ) +include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' workflow test_plink2_vcf { diff --git a/tests/modules/plink2/vcf/nextflow.config b/tests/modules/plink2/vcf/nextflow.config new file mode 100644 index 00000000..7f7e5e77 --- /dev/null +++ b/tests/modules/plink2/vcf/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK2_VCF { + ext.args = '--allow-extra-chr' + } + +} diff --git a/tests/modules/plink2/vcf/test.yml b/tests/modules/plink2/vcf/test.yml index 3f0cddc6..52f58a42 100644 --- a/tests/modules/plink2/vcf/test.yml +++ b/tests/modules/plink2/vcf/test.yml @@ -1,5 +1,5 @@ - name: plink2 vcf test_plink2_vcf - command: nextflow run tests/modules/plink2/vcf -entry test_plink2_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plink2/vcf -entry test_plink2_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink2/vcf/nextflow.config tags: - plink2/vcf - plink2 diff --git a/tests/modules/pmdtools/filter/main.nf b/tests/modules/pmdtools/filter/main.nf index c4832bbb..f1b2b4d3 100644 --- a/tests/modules/pmdtools/filter/main.nf +++ b/tests/modules/pmdtools/filter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PMDTOOLS_FILTER } from '../../../../modules/pmdtools/filter/main.nf' addParams( options: [:] ) +include { PMDTOOLS_FILTER } from '../../../../modules/pmdtools/filter/main.nf' workflow test_pmdtools_filter { diff --git a/tests/modules/pmdtools/filter/nextflow.config b/tests/modules/pmdtools/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pmdtools/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pmdtools/filter/test.yml b/tests/modules/pmdtools/filter/test.yml index 9171b02e..a7ebefbe 100644 --- a/tests/modules/pmdtools/filter/test.yml +++ b/tests/modules/pmdtools/filter/test.yml @@ -1,5 +1,5 @@ - name: pmdtools filter - command: nextflow run ./tests/modules/pmdtools/filter -entry test_pmdtools_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pmdtools/filter -entry test_pmdtools_filter -c ./tests/config/nextflow.config -c ./tests/modules/pmdtools/filter/nextflow.config tags: - pmdtools - pmdtools/filter diff --git a/tests/modules/porechop/main.nf b/tests/modules/porechop/main.nf index b6d7bafa..f20b7a6e 100644 --- a/tests/modules/porechop/main.nf +++ b/tests/modules/porechop/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PORECHOP } from '../../../modules/porechop/main.nf' addParams( options: [args: '', suffix: '_porechop'] ) +include { PORECHOP } from '../../../modules/porechop/main.nf' workflow test_porechop { diff --git a/tests/modules/porechop/nextflow.config b/tests/modules/porechop/nextflow.config new file mode 100644 index 00000000..3a0536b0 --- /dev/null +++ b/tests/modules/porechop/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PORECHOP { + ext.args = '' + ext.suffix = '_porechop' + } + +} diff --git a/tests/modules/porechop/test.yml b/tests/modules/porechop/test.yml index b37a7ec4..8790ab87 100644 --- a/tests/modules/porechop/test.yml +++ b/tests/modules/porechop/test.yml @@ -1,5 +1,5 @@ - name: porechop test_porechop - command: nextflow run tests/modules/porechop -entry test_porechop -c tests/config/nextflow.config + command: nextflow run ./tests/modules/porechop -entry test_porechop -c ./tests/config/nextflow.config -c ./tests/modules/porechop/nextflow.config tags: - porechop files: diff --git a/tests/modules/preseq/lcextrap/main.nf b/tests/modules/preseq/lcextrap/main.nf index 390039bd..4bbbd146 100644 --- a/tests/modules/preseq/lcextrap/main.nf +++ b/tests/modules/preseq/lcextrap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PRESEQ_LCEXTRAP } from '../../../../modules/preseq/lcextrap/main.nf' addParams( options: [:] ) +include { PRESEQ_LCEXTRAP } from '../../../../modules/preseq/lcextrap/main.nf' // // Test with single-end data diff --git a/tests/modules/preseq/lcextrap/nextflow.config b/tests/modules/preseq/lcextrap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/preseq/lcextrap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/preseq/lcextrap/test.yml b/tests/modules/preseq/lcextrap/test.yml index 4472a485..ecd1d046 100644 --- a/tests/modules/preseq/lcextrap/test.yml +++ b/tests/modules/preseq/lcextrap/test.yml @@ -1,5 +1,5 @@ - name: preseq lcextrap single-end - command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/lcextrap/nextflow.config tags: - preseq - preseq/lcextrap @@ -9,7 +9,7 @@ - path: output/preseq/test.command.log - name: preseq lcextrap paired-end - command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/lcextrap/nextflow.config tags: - preseq - preseq/lcextrap diff --git a/tests/modules/prodigal/main.nf b/tests/modules/prodigal/main.nf index 414585a1..6e282015 100644 --- a/tests/modules/prodigal/main.nf +++ b/tests/modules/prodigal/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PRODIGAL } from '../../../modules/prodigal/main.nf' addParams( options: [:] ) +include { PRODIGAL } from '../../../modules/prodigal/main.nf' workflow test_prodigal { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/prodigal/nextflow.config b/tests/modules/prodigal/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/prodigal/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/prodigal/test.yml b/tests/modules/prodigal/test.yml index 93caa998..7f0ab88c 100644 --- a/tests/modules/prodigal/test.yml +++ b/tests/modules/prodigal/test.yml @@ -1,5 +1,5 @@ - name: prodigal test_prodigal - command: nextflow run tests/modules/prodigal -entry test_prodigal -c tests/config/nextflow.config + command: nextflow run ./tests/modules/prodigal -entry test_prodigal -c ./tests/config/nextflow.config -c ./tests/modules/prodigal/nextflow.config tags: - prodigal files: diff --git a/tests/modules/prokka/main.nf b/tests/modules/prokka/main.nf index e35cb1d9..97e94ca8 100644 --- a/tests/modules/prokka/main.nf +++ b/tests/modules/prokka/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PROKKA } from '../../../modules/prokka/main.nf' addParams( options: [:] ) +include { PROKKA } from '../../../modules/prokka/main.nf' workflow test_prokka { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/prokka/nextflow.config b/tests/modules/prokka/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/prokka/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/prokka/test.yml b/tests/modules/prokka/test.yml index 2823353c..92f813a7 100644 --- a/tests/modules/prokka/test.yml +++ b/tests/modules/prokka/test.yml @@ -1,5 +1,5 @@ - name: prokka - command: nextflow run ./tests/modules/prokka -entry test_prokka -c tests/config/nextflow.config + command: nextflow run ./tests/modules/prokka -entry test_prokka -c ./tests/config/nextflow.config -c ./tests/modules/prokka/nextflow.config tags: - prokka files: diff --git a/tests/modules/pycoqc/main.nf b/tests/modules/pycoqc/main.nf index ab65dadc..c8a8ee2c 100644 --- a/tests/modules/pycoqc/main.nf +++ b/tests/modules/pycoqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PYCOQC } from '../../../modules/pycoqc/main.nf' addParams ( options: ['args' : '--min_pass_qual 0'] ) +include { PYCOQC } from '../../../modules/pycoqc/main.nf' workflow test_pycoqc { diff --git a/tests/modules/pycoqc/nextflow.config b/tests/modules/pycoqc/nextflow.config new file mode 100644 index 00000000..d532f8f7 --- /dev/null +++ b/tests/modules/pycoqc/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PYCOQC { + ext.args = '--min_pass_qual 0' + } + +} diff --git a/tests/modules/pycoqc/test.yml b/tests/modules/pycoqc/test.yml index 052e3e1a..becd911b 100644 --- a/tests/modules/pycoqc/test.yml +++ b/tests/modules/pycoqc/test.yml @@ -1,5 +1,5 @@ - name: pycoqc - command: nextflow run ./tests/modules/pycoqc -entry test_pycoqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pycoqc -entry test_pycoqc -c ./tests/config/nextflow.config -c ./tests/modules/pycoqc/nextflow.config tags: - pycoqc files: diff --git a/tests/modules/pydamage/analyze/main.nf b/tests/modules/pydamage/analyze/main.nf index ddf0b27a..920a4201 100644 --- a/tests/modules/pydamage/analyze/main.nf +++ b/tests/modules/pydamage/analyze/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' workflow test_pydamage { diff --git a/tests/modules/pydamage/analyze/nextflow.config b/tests/modules/pydamage/analyze/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pydamage/analyze/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pydamage/analyze/test.yml b/tests/modules/pydamage/analyze/test.yml index 157e947f..9d22f20e 100644 --- a/tests/modules/pydamage/analyze/test.yml +++ b/tests/modules/pydamage/analyze/test.yml @@ -1,5 +1,5 @@ - name: pydamage analyze test workflow - command: nextflow run tests/modules/pydamage/analyze -entry test_pydamage -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pydamage/analyze -entry test_pydamage -c ./tests/config/nextflow.config -c ./tests/modules/pydamage/analyze/nextflow.config tags: - pydamage - pydamage/analyze diff --git a/tests/modules/pydamage/filter/main.nf b/tests/modules/pydamage/filter/main.nf index 03e90408..dac03e78 100644 --- a/tests/modules/pydamage/filter/main.nf +++ b/tests/modules/pydamage/filter/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) -include { PYDAMAGE_FILTER } from '../../../../modules/pydamage/filter/main.nf' addParams( options: [:] ) +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' +include { PYDAMAGE_FILTER } from '../../../../modules/pydamage/filter/main.nf' workflow test_pydamage { diff --git a/tests/modules/pydamage/filter/nextflow.config b/tests/modules/pydamage/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pydamage/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pydamage/filter/test.yml b/tests/modules/pydamage/filter/test.yml index 248be44b..b6738e3d 100644 --- a/tests/modules/pydamage/filter/test.yml +++ b/tests/modules/pydamage/filter/test.yml @@ -1,5 +1,5 @@ - name: pydamage filter test workflow - command: nextflow run tests/modules/pydamage/filter -entry test_pydamage -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pydamage/filter -entry test_pydamage -c ./tests/config/nextflow.config -c ./tests/modules/pydamage/filter/nextflow.config tags: - pydamage - pydamage/filter @@ -7,4 +7,4 @@ - path: output/pydamage/pydamage_results/pydamage_filtered_results.csv md5sum: 9f297233cf4932d7d7e52cc72d4727dc - path: output/pydamage/pydamage_results/pydamage_results.csv - md5sum: 6847e0d5aa6dba85bbd2dd509772b7a0 + md5sum: 37ee6b4dee6890fd2ec8550337f21ac9 diff --git a/tests/modules/qcat/main.nf b/tests/modules/qcat/main.nf index 72c87e37..8a5cdd6d 100644 --- a/tests/modules/qcat/main.nf +++ b/tests/modules/qcat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QCAT } from '../../../modules/qcat/main.nf' addParams( options: [:] ) +include { QCAT } from '../../../modules/qcat/main.nf' workflow test_qcat { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/qcat/nextflow.config b/tests/modules/qcat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/qcat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/qcat/test.yml b/tests/modules/qcat/test.yml index 5c43841b..47ece983 100644 --- a/tests/modules/qcat/test.yml +++ b/tests/modules/qcat/test.yml @@ -1,5 +1,5 @@ - name: qcat - command: nextflow run ./tests/modules/qcat -entry test_qcat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/qcat -entry test_qcat -c ./tests/config/nextflow.config -c ./tests/modules/qcat/nextflow.config tags: - qcat files: diff --git a/tests/modules/qualimap/bamqc/main.nf b/tests/modules/qualimap/bamqc/main.nf index 803d0220..a17efd59 100644 --- a/tests/modules/qualimap/bamqc/main.nf +++ b/tests/modules/qualimap/bamqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QUALIMAP_BAMQC } from '../../../../modules/qualimap/bamqc/main.nf' addParams( options: [:] ) +include { QUALIMAP_BAMQC } from '../../../../modules/qualimap/bamqc/main.nf' workflow test_qualimap_bamqc { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/qualimap/bamqc/nextflow.config b/tests/modules/qualimap/bamqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/qualimap/bamqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/qualimap/bamqc/test.yml b/tests/modules/qualimap/bamqc/test.yml index 7d746a51..41c4199e 100644 --- a/tests/modules/qualimap/bamqc/test.yml +++ b/tests/modules/qualimap/bamqc/test.yml @@ -1,5 +1,5 @@ - name: qualimap bamqc test workflow - command: nextflow run ./tests/modules/qualimap/bamqc -entry test_qualimap_bamqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/qualimap/bamqc -entry test_qualimap_bamqc -c ./tests/config/nextflow.config -c ./tests/modules/qualimap/bamqc/nextflow.config tags: - qualimap - qualimap/bamqc diff --git a/tests/modules/quast/main.nf b/tests/modules/quast/main.nf index d263470c..c879a8a9 100644 --- a/tests/modules/quast/main.nf +++ b/tests/modules/quast/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QUAST } from '../../../modules/quast/main.nf' addParams(options: [:]) +include { QUAST } from '../../../modules/quast/main.nf' workflow test_quast_ref { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/quast/nextflow.config b/tests/modules/quast/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/quast/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/quast/test.yml b/tests/modules/quast/test.yml index 6e1f991f..166cd896 100644 --- a/tests/modules/quast/test.yml +++ b/tests/modules/quast/test.yml @@ -1,5 +1,5 @@ - name: quast with reference - command: nextflow run ./tests/modules/quast -entry test_quast_ref -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/quast -entry test_quast_ref -c ./tests/config/nextflow.config -c ./tests/modules/quast/nextflow.config tags: - quast files: @@ -82,7 +82,7 @@ - path: ./output/quast/quast/icarus_viewers/contig_size_viewer.html - name: quast without reference - command: nextflow run ./tests/modules/quast -entry test_quast_noref -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/quast -entry test_quast_noref -c ./tests/config/nextflow.config -c ./tests/modules/quast/nextflow.config tags: - quast files: diff --git a/tests/modules/racon/main.nf b/tests/modules/racon/main.nf index b6b864e1..507d8d8d 100644 --- a/tests/modules/racon/main.nf +++ b/tests/modules/racon/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RACON } from '../../../modules/racon/main.nf' addParams( options: [:] ) +include { RACON } from '../../../modules/racon/main.nf' workflow test_racon { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/racon/nextflow.config b/tests/modules/racon/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/racon/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/racon/test.yml b/tests/modules/racon/test.yml index dc8e57dc..0250fa36 100644 --- a/tests/modules/racon/test.yml +++ b/tests/modules/racon/test.yml @@ -1,5 +1,5 @@ - name: racon test_racon - command: nextflow run tests/modules/racon -entry test_racon -c tests/config/nextflow.config + command: nextflow run ./tests/modules/racon -entry test_racon -c ./tests/config/nextflow.config -c ./tests/modules/racon/nextflow.config tags: - racon files: diff --git a/tests/modules/rapidnj/main.nf b/tests/modules/rapidnj/main.nf index e23fa46f..66d19c3c 100644 --- a/tests/modules/rapidnj/main.nf +++ b/tests/modules/rapidnj/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RAPIDNJ } from '../../../modules/rapidnj/main.nf' addParams( options: [:] ) +include { RAPIDNJ } from '../../../modules/rapidnj/main.nf' workflow test_rapidnj { diff --git a/tests/modules/rapidnj/nextflow.config b/tests/modules/rapidnj/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rapidnj/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rapidnj/test.yml b/tests/modules/rapidnj/test.yml index 0b7ecff5..21f6ead9 100644 --- a/tests/modules/rapidnj/test.yml +++ b/tests/modules/rapidnj/test.yml @@ -1,5 +1,5 @@ - name: rapidnj - command: nextflow run ./tests/modules/rapidnj -entry test_rapidnj -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rapidnj -entry test_rapidnj -c ./tests/config/nextflow.config -c ./tests/modules/rapidnj/nextflow.config tags: - rapidnj files: diff --git a/tests/modules/rasusa/main.nf b/tests/modules/rasusa/main.nf index 9cc139ad..8a11627c 100644 --- a/tests/modules/rasusa/main.nf +++ b/tests/modules/rasusa/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RASUSA } from '../../../modules/rasusa/main.nf' addParams( options: ['suffix':'_100X']) +include { RASUSA } from '../../../modules/rasusa/main.nf' workflow test_rasusa { input = [ [ id:'test', single_end:false], // meta map diff --git a/tests/modules/rasusa/nextflow.config b/tests/modules/rasusa/nextflow.config new file mode 100644 index 00000000..fea844ae --- /dev/null +++ b/tests/modules/rasusa/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RASUSA { + ext.suffix = '_100X' + } + +} diff --git a/tests/modules/rasusa/test.yml b/tests/modules/rasusa/test.yml index bb30c99e..41c56b67 100644 --- a/tests/modules/rasusa/test.yml +++ b/tests/modules/rasusa/test.yml @@ -1,5 +1,5 @@ - name: rasusa test_rasusa - command: nextflow run tests/modules/rasusa -entry test_rasusa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rasusa -entry test_rasusa -c ./tests/config/nextflow.config -c ./tests/modules/rasusa/nextflow.config tags: - rasusa files: diff --git a/tests/modules/raxmlng/main.nf b/tests/modules/raxmlng/main.nf index 2cac6b31..5fad6953 100644 --- a/tests/modules/raxmlng/main.nf +++ b/tests/modules/raxmlng/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { RAXMLNG as RAXMLNG_NO_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' addParams( options: [args:'--model GTR+G'] ) -include { RAXMLNG as RAXMLNG_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' addParams( options: [args:'--all --model GTR+G --bs-trees 1000'] ) +include { RAXMLNG as RAXMLNG_NO_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' +include { RAXMLNG as RAXMLNG_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' // // Test without bootstrapping diff --git a/tests/modules/raxmlng/nextflow.config b/tests/modules/raxmlng/nextflow.config new file mode 100644 index 00000000..8c269a9b --- /dev/null +++ b/tests/modules/raxmlng/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RAXMLNG_NO_BOOTSTRAP { + ext.args = '--model GTR+G' + } + + withName: RAXMLNG_BOOTSTRAP { + ext.args = '--all --model GTR+G --bs-trees 1000' + } + +} diff --git a/tests/modules/raxmlng/test.yml b/tests/modules/raxmlng/test.yml index 950c48ad..735b6a74 100644 --- a/tests/modules/raxmlng/test.yml +++ b/tests/modules/raxmlng/test.yml @@ -1,5 +1,5 @@ - name: raxmlng no_bootstrap - command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_no_bootstrap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_no_bootstrap -c ./tests/config/nextflow.config -c ./tests/modules/raxmlng/nextflow.config tags: - raxmlng files: @@ -11,7 +11,7 @@ - 'sample4:0.111' - name: raxmlng bootstrap - command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_bootstrap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_bootstrap -c ./tests/config/nextflow.config -c ./tests/modules/raxmlng/nextflow.config tags: - raxmlng files: diff --git a/tests/modules/rmarkdownnotebook/main.nf b/tests/modules/rmarkdownnotebook/main.nf index e56d54ff..fdb7d3b9 100644 --- a/tests/modules/rmarkdownnotebook/main.nf +++ b/tests/modules/rmarkdownnotebook/main.nf @@ -2,12 +2,8 @@ nextflow.enable.dsl = 2 -include { RMARKDOWNNOTEBOOK } from '../../../modules/rmarkdownnotebook/main.nf' addParams( - parametrize: false, options: [:] -) -include { RMARKDOWNNOTEBOOK as RMARKDOWNNOTEBOOK_PARAMETRIZE } from '../../../modules/rmarkdownnotebook/main.nf' addParams( - options: [:] -) +include { RMARKDOWNNOTEBOOK } from '../../../modules/rmarkdownnotebook/main.nf' +include { RMARKDOWNNOTEBOOK as RMARKDOWNNOTEBOOK_PARAMETRIZE } from '../../../modules/rmarkdownnotebook/main.nf' workflow test_rmarkdown { diff --git a/tests/modules/rmarkdownnotebook/nextflow.config b/tests/modules/rmarkdownnotebook/nextflow.config new file mode 100644 index 00000000..c99f5250 --- /dev/null +++ b/tests/modules/rmarkdownnotebook/nextflow.config @@ -0,0 +1,15 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RMARKDOWNNOTEBOOK { + ext = ['parametrize': false] + } + + // this should be the default options, but need to work around + // https://github.com/nextflow-io/nextflow/issues/2422 + withName: RMARKDOWNNOTEBOOK_PARAMETRIZE { + ext = ['parametrize': true] + } + +} diff --git a/tests/modules/rmarkdownnotebook/test.yml b/tests/modules/rmarkdownnotebook/test.yml index bef6086a..3645514a 100644 --- a/tests/modules/rmarkdownnotebook/test.yml +++ b/tests/modules/rmarkdownnotebook/test.yml @@ -1,5 +1,5 @@ - name: rmarkdownnotebook test_rmarkdown - command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rmarkdownnotebook -entry test_rmarkdown -c ./tests/config/nextflow.config -c ./tests/modules/rmarkdownnotebook/nextflow.config tags: - rmarkdownnotebook files: @@ -12,7 +12,7 @@ - "n_iter = 10" - name: rmarkdownnotebook test_rmarkdown_parametrize - command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown_parametrize -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rmarkdownnotebook -entry test_rmarkdown_parametrize -c ./tests/config/nextflow.config -c ./tests/modules/rmarkdownnotebook/nextflow.config tags: - rmarkdownnotebook files: diff --git a/tests/modules/roary/main.nf b/tests/modules/roary/main.nf index a4a96d6e..3fae516c 100644 --- a/tests/modules/roary/main.nf +++ b/tests/modules/roary/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { ROARY } from '../../../modules/roary/main.nf' addParams( options: [:] ) +include { ROARY } from '../../../modules/roary/main.nf' workflow test_roary { - - input = [ [ id:'test', single_end:false ], // meta map - [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test1_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test2_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) + ] ] ROARY ( input ) diff --git a/tests/modules/roary/nextflow.config b/tests/modules/roary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/roary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/roary/test.yml b/tests/modules/roary/test.yml index c8e8c33d..981ab51c 100644 --- a/tests/modules/roary/test.yml +++ b/tests/modules/roary/test.yml @@ -1,5 +1,5 @@ - name: roary test_roary - command: nextflow run tests/modules/roary -entry test_roary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/roary -entry test_roary -c ./tests/config/nextflow.config -c ./tests/modules/roary/nextflow.config tags: - roary files: @@ -8,9 +8,9 @@ - path: output/roary/results/accessory.tab contains: ['FT'] - path: output/roary/results/accessory_binary_genes.fa - md5sum: 0baeea4947bf17a2bf29d43a44f0278f + md5sum: d4191cf748dd8016ad877857a034bef3 - path: output/roary/results/accessory_binary_genes.fa.newick - md5sum: b1f8c76ab231bd38b850c1f8d3c1584b + md5sum: d4a2a64e781263ca1b9b3a4bc9d3a6ea - path: output/roary/results/accessory_graph.dot contains: ['/* list of nodes */'] - path: output/roary/results/blast_identity_frequency.Rtab @@ -20,7 +20,7 @@ - path: output/roary/results/core_accessory.header.embl contains: ['ID Genome standard; DNA; PRO; 1234 BP.'] - path: output/roary/results/core_accessory.tab - contains: ['FT /taxa="GCF_000292685 GCF_000298385 GCF_002849995"'] + contains: ['FT /taxa="test1 test2 test3"'] - path: output/roary/results/core_accessory_graph.dot contains: ['/* list of nodes */'] - path: output/roary/results/gene_presence_absence.Rtab diff --git a/tests/modules/rsem/calculateexpression/main.nf b/tests/modules/rsem/calculateexpression/main.nf index e7de83a4..9d6d3c5c 100644 --- a/tests/modules/rsem/calculateexpression/main.nf +++ b/tests/modules/rsem/calculateexpression/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [args: "--star"]) -include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' addParams(options: [args: "--star --star-gzipped-read-file"]) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' +include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' workflow test_rsem_calculateexpression { diff --git a/tests/modules/rsem/calculateexpression/nextflow.config b/tests/modules/rsem/calculateexpression/nextflow.config new file mode 100644 index 00000000..b17a1cf2 --- /dev/null +++ b/tests/modules/rsem/calculateexpression/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RSEM_PREPAREREFERENCE { + ext.args = '--star' + } + + withName: RSEM_CALCULATEEXPRESSION { + ext.args = '--star --star-gzipped-read-file' + } + +} diff --git a/tests/modules/rsem/calculateexpression/test.yml b/tests/modules/rsem/calculateexpression/test.yml index ac0866ea..f19c3398 100644 --- a/tests/modules/rsem/calculateexpression/test.yml +++ b/tests/modules/rsem/calculateexpression/test.yml @@ -1,55 +1,55 @@ - name: rsem calculateexpression test_rsem_calculateexpression - command: nextflow run tests/modules/rsem/calculateexpression -entry test_rsem_calculateexpression -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rsem/calculateexpression -entry test_rsem_calculateexpression -c ./tests/config/nextflow.config -c ./tests/modules/rsem/calculateexpression/nextflow.config tags: - rsem - rsem/calculateexpression files: - - path: output/index/rsem/Genome + - path: output/rsem/rsem/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/rsem/Log.out - - path: output/index/rsem/SA + - path: output/rsem/rsem/Log.out + - path: output/rsem/rsem/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/rsem/SAindex + - path: output/rsem/rsem/SAindex md5sum: fd05c149960e72642a8d7c860528ae81 - - path: output/index/rsem/chrLength.txt + - path: output/rsem/rsem/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/rsem/chrName.txt + - path: output/rsem/rsem/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/rsem/chrNameLength.txt + - path: output/rsem/rsem/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/rsem/chrStart.txt + - path: output/rsem/rsem/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/rsem/exonGeTrInfo.tab + - path: output/rsem/rsem/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/rsem/exonInfo.tab + - path: output/rsem/rsem/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/rsem/geneInfo.tab + - path: output/rsem/rsem/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/rsem/genome.chrlist + - path: output/rsem/rsem/genome.chrlist md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/rsem/genome.fasta + - path: output/rsem/rsem/genome.fasta md5sum: f315020d899597c1b57e5fe9f60f4c3e - - path: output/index/rsem/genome.grp + - path: output/rsem/rsem/genome.grp md5sum: c2848a8b6d495956c11ec53efc1de67e - - path: output/index/rsem/genome.idx.fa + - path: output/rsem/rsem/genome.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.n2g.idx.fa + - path: output/rsem/rsem/genome.n2g.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.seq + - path: output/rsem/rsem/genome.seq md5sum: 94da0c6b88c33e63c9a052a11f4f57c1 - - path: output/index/rsem/genome.ti + - path: output/rsem/rsem/genome.ti md5sum: c9e4ae8d4d13a504eec2acf1b8589a66 - - path: output/index/rsem/genome.transcripts.fa + - path: output/rsem/rsem/genome.transcripts.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genomeParameters.txt + - path: output/rsem/rsem/genomeParameters.txt md5sum: 2fe3a030e1706c3e8cd4df3818e6dd2f - - path: output/index/rsem/sjdbInfo.txt + - path: output/rsem/rsem/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/rsem/sjdbList.fromGTF.out.tab + - path: output/rsem/rsem/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/rsem/sjdbList.out.tab + - path: output/rsem/rsem/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/rsem/transcriptInfo.tab + - path: output/rsem/rsem/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/rsem/test.genes.results md5sum: c7ec226f76736ea805771e73553ae359 diff --git a/tests/modules/rsem/preparereference/main.nf b/tests/modules/rsem/preparereference/main.nf index 2d4a9053..8062737d 100644 --- a/tests/modules/rsem/preparereference/main.nf +++ b/tests/modules/rsem/preparereference/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [publish_dir:'rsem']) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' workflow test_rsem_preparereference { diff --git a/tests/modules/rsem/preparereference/nextflow.config b/tests/modules/rsem/preparereference/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rsem/preparereference/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rsem/preparereference/test.yml b/tests/modules/rsem/preparereference/test.yml index 734a92b2..1f058bea 100644 --- a/tests/modules/rsem/preparereference/test.yml +++ b/tests/modules/rsem/preparereference/test.yml @@ -1,5 +1,5 @@ - name: rsem preparereference test_rsem_preparereference - command: nextflow run tests/modules/rsem/preparereference -entry test_rsem_preparereference -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rsem/preparereference -entry test_rsem_preparereference -c ./tests/config/nextflow.config -c ./tests/modules/rsem/preparereference/nextflow.config tags: - rsem - rsem/preparereference diff --git a/tests/modules/rseqc/bamstat/main.nf b/tests/modules/rseqc/bamstat/main.nf index c13e7f97..4c53a1af 100644 --- a/tests/modules/rseqc/bamstat/main.nf +++ b/tests/modules/rseqc/bamstat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_BAMSTAT } from '../../../../modules/rseqc/bamstat/main.nf' addParams(options: [:]) +include { RSEQC_BAMSTAT } from '../../../../modules/rseqc/bamstat/main.nf' workflow test_rseqc_bamstat { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/rseqc/bamstat/nextflow.config b/tests/modules/rseqc/bamstat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/bamstat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/bamstat/test.yml b/tests/modules/rseqc/bamstat/test.yml index 75d62672..4cb35d0c 100644 --- a/tests/modules/rseqc/bamstat/test.yml +++ b/tests/modules/rseqc/bamstat/test.yml @@ -1,5 +1,5 @@ - name: rseqc bamstat test_rseqc_bamstat - command: nextflow run tests/modules/rseqc/bamstat -entry test_rseqc_bamstat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/bamstat -entry test_rseqc_bamstat -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/bamstat/nextflow.config tags: - rseqc - rseqc/bamstat diff --git a/tests/modules/rseqc/inferexperiment/main.nf b/tests/modules/rseqc/inferexperiment/main.nf index ae8c53a9..6337063d 100644 --- a/tests/modules/rseqc/inferexperiment/main.nf +++ b/tests/modules/rseqc/inferexperiment/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_INFEREXPERIMENT } from '../../../../modules/rseqc/inferexperiment/main.nf' addParams(options: [:]) +include { RSEQC_INFEREXPERIMENT } from '../../../../modules/rseqc/inferexperiment/main.nf' workflow test_rseqc_inferexperiment { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/rseqc/inferexperiment/nextflow.config b/tests/modules/rseqc/inferexperiment/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/inferexperiment/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/inferexperiment/test.yml b/tests/modules/rseqc/inferexperiment/test.yml index 59d6f3d5..554f8317 100644 --- a/tests/modules/rseqc/inferexperiment/test.yml +++ b/tests/modules/rseqc/inferexperiment/test.yml @@ -1,5 +1,5 @@ - name: rseqc inferexperiment test_rseqc_inferexperiment - command: nextflow run tests/modules/rseqc/inferexperiment -entry test_rseqc_inferexperiment -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/inferexperiment -entry test_rseqc_inferexperiment -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/inferexperiment/nextflow.config tags: - rseqc - rseqc/inferexperiment diff --git a/tests/modules/rseqc/innerdistance/main.nf b/tests/modules/rseqc/innerdistance/main.nf index 003e8a14..8cc0ec3e 100644 --- a/tests/modules/rseqc/innerdistance/main.nf +++ b/tests/modules/rseqc/innerdistance/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_INNERDISTANCE } from '../../../../modules/rseqc/innerdistance/main.nf' addParams(options: [:]) +include { RSEQC_INNERDISTANCE } from '../../../../modules/rseqc/innerdistance/main.nf' workflow test_rseqc_innerdistance { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/innerdistance/nextflow.config b/tests/modules/rseqc/innerdistance/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/innerdistance/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/innerdistance/test.yml b/tests/modules/rseqc/innerdistance/test.yml index b0ee0283..c0f480e9 100644 --- a/tests/modules/rseqc/innerdistance/test.yml +++ b/tests/modules/rseqc/innerdistance/test.yml @@ -1,5 +1,5 @@ - name: rseqc innerdistance test_rseqc_innerdistance - command: nextflow run tests/modules/rseqc/innerdistance -entry test_rseqc_innerdistance -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/innerdistance -entry test_rseqc_innerdistance -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/innerdistance/nextflow.config tags: - rseqc - rseqc/innerdistance diff --git a/tests/modules/rseqc/junctionannotation/main.nf b/tests/modules/rseqc/junctionannotation/main.nf index a6913850..303dcd85 100644 --- a/tests/modules/rseqc/junctionannotation/main.nf +++ b/tests/modules/rseqc/junctionannotation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_JUNCTIONANNOTATION } from '../../../../modules/rseqc/junctionannotation/main.nf' addParams(options: [:]) +include { RSEQC_JUNCTIONANNOTATION } from '../../../../modules/rseqc/junctionannotation/main.nf' workflow test_rseqc_junctionannotation { input = [ diff --git a/tests/modules/rseqc/junctionannotation/nextflow.config b/tests/modules/rseqc/junctionannotation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/junctionannotation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/junctionannotation/test.yml b/tests/modules/rseqc/junctionannotation/test.yml index 39326f67..f2020b10 100644 --- a/tests/modules/rseqc/junctionannotation/test.yml +++ b/tests/modules/rseqc/junctionannotation/test.yml @@ -1,5 +1,5 @@ - name: rseqc junctionannotation test_rseqc_junctionannotation - command: nextflow run tests/modules/rseqc/junctionannotation -entry test_rseqc_junctionannotation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/junctionannotation -entry test_rseqc_junctionannotation -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/junctionannotation/nextflow.config tags: - rseqc - rseqc/junctionannotation diff --git a/tests/modules/rseqc/junctionsaturation/main.nf b/tests/modules/rseqc/junctionsaturation/main.nf index 047fb372..eefbb492 100644 --- a/tests/modules/rseqc/junctionsaturation/main.nf +++ b/tests/modules/rseqc/junctionsaturation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_JUNCTIONSATURATION } from '../../../../modules/rseqc/junctionsaturation/main.nf' addParams(options: [:]) +include { RSEQC_JUNCTIONSATURATION } from '../../../../modules/rseqc/junctionsaturation/main.nf' workflow test_rseqc_junctionsaturation { input = [ diff --git a/tests/modules/rseqc/junctionsaturation/nextflow.config b/tests/modules/rseqc/junctionsaturation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/junctionsaturation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/junctionsaturation/test.yml b/tests/modules/rseqc/junctionsaturation/test.yml index dfadb371..db977360 100644 --- a/tests/modules/rseqc/junctionsaturation/test.yml +++ b/tests/modules/rseqc/junctionsaturation/test.yml @@ -1,5 +1,5 @@ - name: rseqc junctionsaturation test_rseqc_junctionsaturation - command: nextflow run tests/modules/rseqc/junctionsaturation -entry test_rseqc_junctionsaturation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/junctionsaturation -entry test_rseqc_junctionsaturation -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/junctionsaturation/nextflow.config tags: - rseqc/junctionsaturation - rseqc diff --git a/tests/modules/rseqc/readdistribution/main.nf b/tests/modules/rseqc/readdistribution/main.nf index 415aed9a..180367f2 100644 --- a/tests/modules/rseqc/readdistribution/main.nf +++ b/tests/modules/rseqc/readdistribution/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_READDISTRIBUTION } from '../../../../modules/rseqc/readdistribution/main.nf' addParams(options: [:]) +include { RSEQC_READDISTRIBUTION } from '../../../../modules/rseqc/readdistribution/main.nf' workflow test_rseqc_readdistribution { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/readdistribution/nextflow.config b/tests/modules/rseqc/readdistribution/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/readdistribution/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/readdistribution/test.yml b/tests/modules/rseqc/readdistribution/test.yml index 79e7e1d3..e530e92a 100644 --- a/tests/modules/rseqc/readdistribution/test.yml +++ b/tests/modules/rseqc/readdistribution/test.yml @@ -1,5 +1,5 @@ - name: rseqc readdistribution test_rseqc_readdistribution - command: nextflow run tests/modules/rseqc/readdistribution -entry test_rseqc_readdistribution -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/readdistribution -entry test_rseqc_readdistribution -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/readdistribution/nextflow.config tags: - rseqc - rseqc/readdistribution diff --git a/tests/modules/rseqc/readduplication/main.nf b/tests/modules/rseqc/readduplication/main.nf index b94f6945..bcccde5d 100644 --- a/tests/modules/rseqc/readduplication/main.nf +++ b/tests/modules/rseqc/readduplication/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_READDUPLICATION } from '../../../../modules/rseqc/readduplication/main.nf' addParams(options: [:]) +include { RSEQC_READDUPLICATION } from '../../../../modules/rseqc/readduplication/main.nf' workflow test_rseqc_readduplication { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/readduplication/nextflow.config b/tests/modules/rseqc/readduplication/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/readduplication/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/readduplication/test.yml b/tests/modules/rseqc/readduplication/test.yml index 2a4c9546..b0c35071 100644 --- a/tests/modules/rseqc/readduplication/test.yml +++ b/tests/modules/rseqc/readduplication/test.yml @@ -1,5 +1,5 @@ - name: rseqc readduplication test_rseqc_readduplication - command: nextflow run tests/modules/rseqc/readduplication -entry test_rseqc_readduplication -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/readduplication -entry test_rseqc_readduplication -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/readduplication/nextflow.config tags: - rseqc/readduplication - rseqc diff --git a/tests/modules/salmon/index/main.nf b/tests/modules/salmon/index/main.nf index 98804733..680b4c6e 100644 --- a/tests/modules/salmon/index/main.nf +++ b/tests/modules/salmon/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [publish_dir:'salmon'] ) +include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' workflow test_salmon_index { genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/salmon/index/nextflow.config b/tests/modules/salmon/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/salmon/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/salmon/index/test.yml b/tests/modules/salmon/index/test.yml index acefb044..07815e37 100644 --- a/tests/modules/salmon/index/test.yml +++ b/tests/modules/salmon/index/test.yml @@ -1,5 +1,5 @@ - name: salmon index - command: nextflow run ./tests/modules/salmon/index -entry test_salmon_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/index -entry test_salmon_index -c ./tests/config/nextflow.config -c ./tests/modules/salmon/index/nextflow.config tags: - salmon - salmon/index diff --git a/tests/modules/salmon/quant/main.nf b/tests/modules/salmon/quant/main.nf index ad15870c..a970f6c5 100644 --- a/tests/modules/salmon/quant/main.nf +++ b/tests/modules/salmon/quant/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [:] ) -include { SALMON_QUANT } from '../../../../modules/salmon/quant/main.nf' addParams( options: [args: '--minAssignedFrags 1'] ) +include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' +include { SALMON_QUANT } from '../../../../modules/salmon/quant/main.nf' workflow test_salmon_quant_single_end { - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -21,10 +24,13 @@ workflow test_salmon_quant_single_end { workflow test_salmon_quant_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -36,9 +42,12 @@ workflow test_salmon_quant_paired_end { workflow test_salmon_quant_single_end_lib_type_A { - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/salmon/quant/nextflow.config b/tests/modules/salmon/quant/nextflow.config new file mode 100644 index 00000000..7a8c911a --- /dev/null +++ b/tests/modules/salmon/quant/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SALMON_QUANT { + ext.args = '--minAssignedFrags 1' + } + +} diff --git a/tests/modules/salmon/quant/test.yml b/tests/modules/salmon/quant/test.yml index d7ed0d0f..514718fa 100644 --- a/tests/modules/salmon/quant/test.yml +++ b/tests/modules/salmon/quant/test.yml @@ -1,5 +1,5 @@ - name: salmon quant single-end - command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon - salmon/quant @@ -23,34 +23,34 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: ./output/salmon/test/libParams/flenDist.txt md5sum: 2de170bdc9f6fd237d286429b292bb28 - - path: ./output/index/salmon/ref_indexing.log - - path: ./output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: ./output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: ./output/index/salmon/versionInfo.json + - path: ./output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: ./output/index/salmon/complete_ref_lens.bin + - path: ./output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/mphf.bin + - path: ./output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: ./output/index/salmon/pre_indexing.log - - path: ./output/index/salmon/ctable.bin - - path: ./output/index/salmon/duplicate_clusters.tsv + - path: ./output/salmon/salmon/pre_indexing.log + - path: ./output/salmon/salmon/ctable.bin + - path: ./output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: ./output/index/salmon/reflengths.bin + - path: ./output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/info.json + - path: ./output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: ./output/index/salmon/refAccumLengths.bin + - path: ./output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: ./output/index/salmon/ctg_offsets.bin + - path: ./output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: ./output/index/salmon/rank.bin + - path: ./output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin - name: salmon quant paired end - command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon - salmon/quant @@ -74,35 +74,35 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: ./output/salmon/test/libParams/flenDist.txt md5sum: 221f754ed55dd1e34874f9b7b3f9d240 - - path: ./output/index/salmon/ref_indexing.log - - path: ./output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: ./output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: ./output/index/salmon/versionInfo.json + - path: ./output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: ./output/index/salmon/complete_ref_lens.bin + - path: ./output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/mphf.bin + - path: ./output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: ./output/index/salmon/pre_indexing.log - - path: ./output/index/salmon/ctable.bin - - path: ./output/index/salmon/duplicate_clusters.tsv + - path: ./output/salmon/salmon/pre_indexing.log + - path: ./output/salmon/salmon/ctable.bin + - path: ./output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: ./output/index/salmon/reflengths.bin + - path: ./output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/info.json + - path: ./output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: ./output/index/salmon/refAccumLengths.bin + - path: ./output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: ./output/index/salmon/ctg_offsets.bin + - path: ./output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: ./output/index/salmon/rank.bin + - path: ./output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin - name: salmon quant test_salmon_quant_single_end_lib_type_A - command: nextflow run tests/modules/salmon/quant -entry test_salmon_quant_single_end_lib_type_A -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end_lib_type_A -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon/quant - salmon @@ -126,26 +126,26 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: output/salmon/test/libParams/flenDist.txt md5sum: 2de170bdc9f6fd237d286429b292bb28 - - path: ./output/index/salmon/ref_indexing.log - - path: output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: output/index/salmon/versionInfo.json + - path: output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: output/index/salmon/complete_ref_lens.bin + - path: output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: output/index/salmon/mphf.bin + - path: output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: output/index/salmon/duplicate_clusters.tsv + - path: output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: output/index/salmon/reflengths.bin + - path: output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: output/index/salmon/info.json + - path: output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: output/index/salmon/refAccumLengths.bin + - path: output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: output/index/salmon/ctg_offsets.bin + - path: output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: output/index/salmon/rank.bin + - path: output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin diff --git a/tests/modules/samblaster/main.nf b/tests/modules/samblaster/main.nf index 5983d130..5831ecfc 100644 --- a/tests/modules/samblaster/main.nf +++ b/tests/modules/samblaster/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMBLASTER } from '../../../modules/samblaster/main.nf' addParams( options: [args: "-M --addMateTags", suffix:'.processed'] ) +include { SAMBLASTER } from '../../../modules/samblaster/main.nf' workflow test_samblaster { diff --git a/tests/modules/samblaster/nextflow.config b/tests/modules/samblaster/nextflow.config new file mode 100644 index 00000000..3018088b --- /dev/null +++ b/tests/modules/samblaster/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMBLASTER { + ext.args = '-M --addMateTags' + ext.suffix = '.processed' + } + +} diff --git a/tests/modules/samblaster/test.yml b/tests/modules/samblaster/test.yml index d56d4330..acc6d0f0 100644 --- a/tests/modules/samblaster/test.yml +++ b/tests/modules/samblaster/test.yml @@ -1,5 +1,5 @@ - name: samblaster test_samblaster - command: nextflow run tests/modules/samblaster -entry test_samblaster -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samblaster -entry test_samblaster -c ./tests/config/nextflow.config -c ./tests/modules/samblaster/nextflow.config tags: - samblaster files: diff --git a/tests/modules/samtools/ampliconclip/main.nf b/tests/modules/samtools/ampliconclip/main.nf index a8d8609f..eae70b06 100644 --- a/tests/modules/samtools/ampliconclip/main.nf +++ b/tests/modules/samtools/ampliconclip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_AMPLICONCLIP } from '../../../../modules/samtools/ampliconclip/main.nf' addParams([:]) +include { SAMTOOLS_AMPLICONCLIP } from '../../../../modules/samtools/ampliconclip/main.nf' workflow test_samtools_ampliconclip_no_stats_no_rejects { diff --git a/tests/modules/samtools/ampliconclip/nextflow.config b/tests/modules/samtools/ampliconclip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/ampliconclip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml index 9e8e1f9f..e8fd456c 100644 --- a/tests/modules/samtools/ampliconclip/test.yml +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml samtools/ampliconclip - name: samtools ampliconclip no stats no rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_no_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_no_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip @@ -10,7 +8,7 @@ md5sum: 678f9ab04fbe3206f0f96e170fd833e9 - name: samtools ampliconclip no stats with rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip @@ -21,7 +19,7 @@ md5sum: a0bee15aead020d16d0c81bd9667df46 - name: samtools ampliconclip with stats with rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip diff --git a/tests/modules/samtools/bam2fq/main.nf b/tests/modules/samtools/bam2fq/main.nf index f8614ad0..928bfe08 100644 --- a/tests/modules/samtools/bam2fq/main.nf +++ b/tests/modules/samtools/bam2fq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' addParams( options: [args: "-T RX"] ) +include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' workflow test_samtools_bam2fq_nosplit { diff --git a/tests/modules/samtools/bam2fq/nextflow.config b/tests/modules/samtools/bam2fq/nextflow.config new file mode 100644 index 00000000..cf886bb2 --- /dev/null +++ b/tests/modules/samtools/bam2fq/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + +} diff --git a/tests/modules/samtools/bam2fq/test.yml b/tests/modules/samtools/bam2fq/test.yml index ff1762b3..feb994fd 100644 --- a/tests/modules/samtools/bam2fq/test.yml +++ b/tests/modules/samtools/bam2fq/test.yml @@ -1,5 +1,5 @@ - name: samtools bam2fq test_samtools_bam2fq_nosplit - command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bam2fq/nextflow.config tags: - samtools/bam2fq - samtools @@ -8,7 +8,7 @@ md5sum: d733e66d29a4b366bf9df8c42f845256 - name: samtools bam2fq test_samtools_bam2fq_withsplit - command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bam2fq/nextflow.config tags: - samtools/bam2fq - samtools diff --git a/tests/modules/samtools/depth/main.nf b/tests/modules/samtools/depth/main.nf index 90497534..c6d2dc0e 100644 --- a/tests/modules/samtools/depth/main.nf +++ b/tests/modules/samtools/depth/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' addParams( options: [:] ) +include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' workflow test_samtools_depth { diff --git a/tests/modules/samtools/depth/nextflow.config b/tests/modules/samtools/depth/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/depth/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/depth/test.yml b/tests/modules/samtools/depth/test.yml index 4d5007c8..978134ad 100644 --- a/tests/modules/samtools/depth/test.yml +++ b/tests/modules/samtools/depth/test.yml @@ -1,5 +1,5 @@ - name: samtools depth - command: nextflow run tests/modules/samtools/depth -entry test_samtools_depth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/depth -entry test_samtools_depth -c ./tests/config/nextflow.config -c ./tests/modules/samtools/depth/nextflow.config tags: - samtools/depth - samtools diff --git a/tests/modules/samtools/faidx/main.nf b/tests/modules/samtools/faidx/main.nf index 0102af28..bc47c847 100644 --- a/tests/modules/samtools/faidx/main.nf +++ b/tests/modules/samtools/faidx/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' workflow test_samtools_faidx { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/samtools/faidx/nextflow.config b/tests/modules/samtools/faidx/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/faidx/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index 49a92265..f0224f34 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -1,5 +1,5 @@ - name: samtools faidx test workflow - command: nextflow run tests/modules/samtools/faidx -entry test_samtools_faidx -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/faidx -entry test_samtools_faidx -c ./tests/config/nextflow.config -c ./tests/modules/samtools/faidx/nextflow.config tags: - samtools - samtools/faidx diff --git a/tests/modules/samtools/fastq/main.nf b/tests/modules/samtools/fastq/main.nf index 94ad9471..6e7e323c 100644 --- a/tests/modules/samtools/fastq/main.nf +++ b/tests/modules/samtools/fastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FASTQ } from '../../../../modules/samtools/fastq/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FASTQ } from '../../../../modules/samtools/fastq/main.nf' workflow test_samtools_fastq { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/fastq/nextflow.config b/tests/modules/samtools/fastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/fastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/fastq/test.yml b/tests/modules/samtools/fastq/test.yml index bfcf5c92..39da9889 100644 --- a/tests/modules/samtools/fastq/test.yml +++ b/tests/modules/samtools/fastq/test.yml @@ -1,5 +1,5 @@ - name: samtools fastq test_samtools_fastq - command: nextflow run tests/modules/samtools/fastq -entry test_samtools_fastq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/fastq -entry test_samtools_fastq -c ./tests/config/nextflow.config -c ./tests/modules/samtools/fastq/nextflow.config tags: - samtools - samtools/fastq diff --git a/tests/modules/samtools/fixmate/main.nf b/tests/modules/samtools/fixmate/main.nf index 5174beab..cb7c136d 100644 --- a/tests/modules/samtools/fixmate/main.nf +++ b/tests/modules/samtools/fixmate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FIXMATE } from '../../../../modules/samtools/fixmate/main.nf' addParams( options: [args:'-r -c -m'] ) +include { SAMTOOLS_FIXMATE } from '../../../../modules/samtools/fixmate/main.nf' workflow test_samtools_fixmate { diff --git a/tests/modules/samtools/fixmate/nextflow.config b/tests/modules/samtools/fixmate/nextflow.config new file mode 100644 index 00000000..b9402bcf --- /dev/null +++ b/tests/modules/samtools/fixmate/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_FIXMATE { + ext.args = '-r -c -m' + } + +} diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml index 0b3aa2a9..8e87e059 100644 --- a/tests/modules/samtools/fixmate/test.yml +++ b/tests/modules/samtools/fixmate/test.yml @@ -1,5 +1,5 @@ - name: samtools fixmate test_samtools_fixmate - command: nextflow run tests/modules/samtools/fixmate -entry test_samtools_fixmate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/fixmate -entry test_samtools_fixmate -c ./tests/config/nextflow.config -c ./tests/modules/samtools/fixmate/nextflow.config tags: - samtools - samtools/fixmate diff --git a/tests/modules/samtools/flagstat/main.nf b/tests/modules/samtools/flagstat/main.nf index a31a7d22..a0e86422 100644 --- a/tests/modules/samtools/flagstat/main.nf +++ b/tests/modules/samtools/flagstat/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FLAGSTAT } from '../../../../modules/samtools/flagstat/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FLAGSTAT } from '../../../../modules/samtools/flagstat/main.nf' workflow test_samtools_flagstat { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) + ] SAMTOOLS_FLAGSTAT ( input ) } diff --git a/tests/modules/samtools/flagstat/nextflow.config b/tests/modules/samtools/flagstat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/flagstat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/flagstat/test.yml b/tests/modules/samtools/flagstat/test.yml index 0da6c2f4..a5f28b36 100644 --- a/tests/modules/samtools/flagstat/test.yml +++ b/tests/modules/samtools/flagstat/test.yml @@ -1,5 +1,5 @@ - name: samtools flagstat - command: nextflow run ./tests/modules/samtools/flagstat -entry test_samtools_flagstat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/flagstat -entry test_samtools_flagstat -c ./tests/config/nextflow.config -c ./tests/modules/samtools/flagstat/nextflow.config tags: - samtools - samtools/flagstat diff --git a/tests/modules/samtools/idxstats/main.nf b/tests/modules/samtools/idxstats/main.nf index 9919c3e4..f3de76a0 100644 --- a/tests/modules/samtools/idxstats/main.nf +++ b/tests/modules/samtools/idxstats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_IDXSTATS } from '../../../../modules/samtools/idxstats/main.nf' addParams( options: [:] ) +include { SAMTOOLS_IDXSTATS } from '../../../../modules/samtools/idxstats/main.nf' workflow test_samtools_idxstats { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/idxstats/nextflow.config b/tests/modules/samtools/idxstats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/idxstats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/idxstats/test.yml b/tests/modules/samtools/idxstats/test.yml index 6064ca56..88786eef 100644 --- a/tests/modules/samtools/idxstats/test.yml +++ b/tests/modules/samtools/idxstats/test.yml @@ -1,5 +1,5 @@ - name: samtools idxstats - command: nextflow run ./tests/modules/samtools/idxstats -entry test_samtools_idxstats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/idxstats -entry test_samtools_idxstats -c ./tests/config/nextflow.config -c ./tests/modules/samtools/idxstats/nextflow.config tags: - samtools - samtools/idxstats diff --git a/tests/modules/samtools/index/main.nf b/tests/modules/samtools/index/main.nf index 737936fb..3592a99a 100644 --- a/tests/modules/samtools/index/main.nf +++ b/tests/modules/samtools/index/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CRAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' addParams( options: [args:'-c'] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CRAI } from '../../../../modules/samtools/index/main.nf' +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' workflow test_samtools_index_bai { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/index/nextflow.config b/tests/modules/samtools/index/nextflow.config new file mode 100644 index 00000000..d3a4c785 --- /dev/null +++ b/tests/modules/samtools/index/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_INDEX_CSI { + ext.args = '-c' + } + +} diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 279b99d8..6972ed65 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -1,5 +1,5 @@ - name: samtools index test_samtools_index_bai - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_bai -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_bai -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index @@ -8,7 +8,7 @@ md5sum: 704c10dd1326482448ca3073fdebc2f4 - name: samtools index test_samtools_index_crai - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_crai -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_crai -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index @@ -17,7 +17,7 @@ md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 - name: samtools index test_samtools_index_csi - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_csi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index diff --git a/tests/modules/samtools/merge/main.nf b/tests/modules/samtools/merge/main.nf index 07485df1..ad5c56e3 100644 --- a/tests/modules/samtools/merge/main.nf +++ b/tests/modules/samtools/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' addParams( options: [suffix:'_merged'] ) +include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' workflow test_samtools_merge { input = [ [ id: 'test' ], // meta map diff --git a/tests/modules/samtools/merge/nextflow.config b/tests/modules/samtools/merge/nextflow.config new file mode 100644 index 00000000..cb350bf7 --- /dev/null +++ b/tests/modules/samtools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_MERGE { + ext.suffix = '_merged' + } + +} diff --git a/tests/modules/samtools/merge/test.yml b/tests/modules/samtools/merge/test.yml index f04aa74b..948c6191 100644 --- a/tests/modules/samtools/merge/test.yml +++ b/tests/modules/samtools/merge/test.yml @@ -1,5 +1,5 @@ - name: samtools merge test_samtools_merge - command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/merge -entry test_samtools_merge -c ./tests/config/nextflow.config -c ./tests/modules/samtools/merge/nextflow.config tags: - samtools - samtools/merge @@ -7,7 +7,7 @@ - path: output/samtools/test_merged.bam - name: samtools merge test_samtools_merge_cram - command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/merge -entry test_samtools_merge_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/merge/nextflow.config tags: - samtools - samtools/merge diff --git a/tests/modules/samtools/mpileup/main.nf b/tests/modules/samtools/mpileup/main.nf index b8db0275..dc58cc2c 100644 --- a/tests/modules/samtools/mpileup/main.nf +++ b/tests/modules/samtools/mpileup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_MPILEUP } from '../../../../modules/samtools/mpileup/main.nf' addParams( options: [:] ) +include { SAMTOOLS_MPILEUP } from '../../../../modules/samtools/mpileup/main.nf' workflow test_samtools_mpileup { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/mpileup/nextflow.config b/tests/modules/samtools/mpileup/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/mpileup/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/mpileup/test.yml b/tests/modules/samtools/mpileup/test.yml index 25c39d63..53a9c142 100644 --- a/tests/modules/samtools/mpileup/test.yml +++ b/tests/modules/samtools/mpileup/test.yml @@ -1,5 +1,5 @@ - name: samtools mpileup - command: nextflow run ./tests/modules/samtools/mpileup -entry test_samtools_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/mpileup -entry test_samtools_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/samtools/mpileup/nextflow.config tags: - samtools - samtools/mpileup diff --git a/tests/modules/samtools/sort/main.nf b/tests/modules/samtools/sort/main.nf index b76cdb1a..9853b355 100644 --- a/tests/modules/samtools/sort/main.nf +++ b/tests/modules/samtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' addParams( options: ['suffix': '.sorted'] ) +include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' workflow test_samtools_sort { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/sort/nextflow.config b/tests/modules/samtools/sort/nextflow.config new file mode 100644 index 00000000..57ae6280 --- /dev/null +++ b/tests/modules/samtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_SORT { + ext.suffix = '.sorted' + } + +} diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 785ec03b..dfd2eb69 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -1,5 +1,5 @@ - name: samtools sort - command: nextflow run tests/modules/samtools/sort -entry test_samtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/sort -entry test_samtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/samtools/sort/nextflow.config tags: - samtools - samtools/sort diff --git a/tests/modules/samtools/stats/main.nf b/tests/modules/samtools/stats/main.nf index 4e92b366..d83cbf4a 100644 --- a/tests/modules/samtools/stats/main.nf +++ b/tests/modules/samtools/stats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_STATS } from '../../../../modules/samtools/stats/main.nf' addParams( options: [:] ) +include { SAMTOOLS_STATS } from '../../../../modules/samtools/stats/main.nf' workflow test_samtools_stats { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/stats/nextflow.config b/tests/modules/samtools/stats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/stats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index c186665a..178eba72 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -1,5 +1,5 @@ - name: samtools stats test_samtools_stats - command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config tags: - samtools/stats - samtools @@ -8,7 +8,7 @@ md5sum: 09146eeecfcae2a84fb8615c86cd8d64 - name: samtools stats test_samtools_stats_cram - command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config tags: - samtools/stats - samtools diff --git a/tests/modules/samtools/view/main.nf b/tests/modules/samtools/view/main.nf index bd270cd8..8ee27ef8 100644 --- a/tests/modules/samtools/view/main.nf +++ b/tests/modules/samtools/view/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' addParams( options: [:] ) +include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' workflow test_samtools_view { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/view/nextflow.config b/tests/modules/samtools/view/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/view/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/view/test.yml b/tests/modules/samtools/view/test.yml index ceaa0e89..1287d455 100644 --- a/tests/modules/samtools/view/test.yml +++ b/tests/modules/samtools/view/test.yml @@ -1,5 +1,5 @@ - name: samtools view test_samtools_view - command: nextflow run tests/modules/samtools/view -entry test_samtools_view -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/view -entry test_samtools_view -c ./tests/config/nextflow.config -c ./tests/modules/samtools/view/nextflow.config tags: - samtools/view - samtools @@ -8,7 +8,7 @@ md5sum: 8fb1e82f76416e9e30fc6b2357e2cf13 - name: samtools view test_samtools_view_cram - command: nextflow run tests/modules/samtools/view -entry test_samtools_view_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/view -entry test_samtools_view_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/view/nextflow.config tags: - samtools/view - samtools diff --git a/tests/modules/scoary/main.nf b/tests/modules/scoary/main.nf index ec3f6e9f..5f080b7d 100644 --- a/tests/modules/scoary/main.nf +++ b/tests/modules/scoary/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SCOARY } from '../../../modules/scoary/main.nf' addParams( options: [:] ) +include { SCOARY } from '../../../modules/scoary/main.nf' workflow test_scoary { diff --git a/tests/modules/scoary/nextflow.config b/tests/modules/scoary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/scoary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/scoary/test.yml b/tests/modules/scoary/test.yml index c5269293..71344093 100644 --- a/tests/modules/scoary/test.yml +++ b/tests/modules/scoary/test.yml @@ -1,5 +1,5 @@ - name: scoary test_scoary - command: nextflow run tests/modules/scoary -entry test_scoary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/scoary -entry test_scoary -c ./tests/config/nextflow.config -c ./tests/modules/scoary/nextflow.config tags: - scoary files: diff --git a/tests/modules/seacr/callpeak/main.nf b/tests/modules/seacr/callpeak/main.nf index a1aeb76e..230d3a4c 100644 --- a/tests/modules/seacr/callpeak/main.nf +++ b/tests/modules/seacr/callpeak/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' addParams( options: [ args:'norm stringent' ] ) +include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' workflow test_seacr_callpeak { input = [ [ id:'test_1'], diff --git a/tests/modules/seacr/callpeak/nextflow.config b/tests/modules/seacr/callpeak/nextflow.config new file mode 100644 index 00000000..54c19e6b --- /dev/null +++ b/tests/modules/seacr/callpeak/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEACR_CALLPEAK { + ext.args = 'norm stringent' + } + +} diff --git a/tests/modules/seacr/callpeak/test.yml b/tests/modules/seacr/callpeak/test.yml index 2cf75b06..63104bd0 100644 --- a/tests/modules/seacr/callpeak/test.yml +++ b/tests/modules/seacr/callpeak/test.yml @@ -1,5 +1,5 @@ - name: seacr callpeak - command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak -c ./tests/config/nextflow.config -c ./tests/modules/seacr/callpeak/nextflow.config tags: - seacr - seacr/callpeak @@ -8,7 +8,7 @@ md5sum: a3cb0c7c4ffa895788da3f0d6371b7df - name: seacr callpeak threshold - command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak_threshold -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak_threshold -c ./tests/config/nextflow.config -c ./tests/modules/seacr/callpeak/nextflow.config tags: - seacr - seacr/callpeak diff --git a/tests/modules/seqkit/split2/main.nf b/tests/modules/seqkit/split2/main.nf index 21626cac..acb9d41b 100644 --- a/tests/modules/seqkit/split2/main.nf +++ b/tests/modules/seqkit/split2/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_LENGTH } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-length 8K'] ) -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_SIZE } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-size 50' ] ) -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_PART } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-part 3'] ) +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_LENGTH } from '../../../../modules/seqkit/split2/main.nf' +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_SIZE } from '../../../../modules/seqkit/split2/main.nf' +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_PART } from '../../../../modules/seqkit/split2/main.nf' workflow test_seqkit_split2_single_end_length { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/seqkit/split2/nextflow.config b/tests/modules/seqkit/split2/nextflow.config new file mode 100644 index 00000000..e4f64931 --- /dev/null +++ b/tests/modules/seqkit/split2/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQKIT_SPLIT2_LENGTH { + ext.args = '--by-length 8K' + } + + withName: SEQKIT_SPLIT2_SIZE { + ext.args = '--by-size 50' + } + + withName: SEQKIT_SPLIT2_PART { + ext.args = '--by-part 3' + } + +} diff --git a/tests/modules/seqkit/split2/test.yml b/tests/modules/seqkit/split2/test.yml index 13f3b003..12b02072 100644 --- a/tests/modules/seqkit/split2/test.yml +++ b/tests/modules/seqkit/split2/test.yml @@ -1,5 +1,5 @@ - name: seqkit split2 single-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -10,7 +10,7 @@ md5sum: cf38c51506e45380fe25abdd1bd5ccc6 - name: seqkit split2 single-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -21,7 +21,7 @@ md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 - name: seqkit split2 single-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -34,7 +34,7 @@ md5sum: 8bc86ba83a611c54f592f4eae19b680f - name: seqkit split2 paired-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -49,7 +49,7 @@ md5sum: 927097c6ac7522199a9e016333181a8e - name: seqkit split2 paired-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 @@ -64,7 +64,7 @@ md5sum: 8796c3f327b1094244bfcdb36d536526 - name: seqkit split2 paired-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config tags: - seqkit - seqkit/split2 diff --git a/tests/modules/seqsero2/main.nf b/tests/modules/seqsero2/main.nf index 04ee8e27..9587bf9f 100644 --- a/tests/modules/seqsero2/main.nf +++ b/tests/modules/seqsero2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQSERO2 } from '../../../modules/seqsero2/main.nf' addParams( options: [args: '-m k -t 4'] ) +include { SEQSERO2 } from '../../../modules/seqsero2/main.nf' workflow test_seqsero2 { diff --git a/tests/modules/seqsero2/nextflow.config b/tests/modules/seqsero2/nextflow.config new file mode 100644 index 00000000..b46fa7e2 --- /dev/null +++ b/tests/modules/seqsero2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQSERO2 { + ext.args = '-m k -t 4' + } + +} diff --git a/tests/modules/seqsero2/test.yml b/tests/modules/seqsero2/test.yml index 2aa49686..e2dec062 100644 --- a/tests/modules/seqsero2/test.yml +++ b/tests/modules/seqsero2/test.yml @@ -1,5 +1,5 @@ - name: seqsero2 test_seqsero2 - command: nextflow run tests/modules/seqsero2 -entry test_seqsero2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqsero2 -entry test_seqsero2 -c ./tests/config/nextflow.config -c ./tests/modules/seqsero2/nextflow.config tags: - seqsero2 files: diff --git a/tests/modules/seqtk/mergepe/main.nf b/tests/modules/seqtk/mergepe/main.nf index 13654dc6..b8e12213 100644 --- a/tests/modules/seqtk/mergepe/main.nf +++ b/tests/modules/seqtk/mergepe/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [ 'suffix':'.processed' ] ) +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' // // Test with single-end data diff --git a/tests/modules/seqtk/mergepe/nextflow.config b/tests/modules/seqtk/mergepe/nextflow.config new file mode 100644 index 00000000..b14e72ff --- /dev/null +++ b/tests/modules/seqtk/mergepe/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_MERGEPE { + ext.suffix = '.processed' + } + +} diff --git a/tests/modules/seqtk/mergepe/test.yml b/tests/modules/seqtk/mergepe/test.yml index 8ae95354..2a6d4d33 100644 --- a/tests/modules/seqtk/mergepe/test.yml +++ b/tests/modules/seqtk/mergepe/test.yml @@ -1,5 +1,5 @@ - name: seqtk mergepe test_seqtk_mergepe_single_end - command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/mergepe/nextflow.config tags: - seqtk/mergepe - seqtk @@ -8,7 +8,7 @@ md5sum: e325ef7deb4023447a1f074e285761af - name: seqtk mergepe test_seqtk_mergepe_paired_end - command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/mergepe/nextflow.config tags: - seqtk/mergepe - seqtk diff --git a/tests/modules/seqtk/sample/main.nf b/tests/modules/seqtk/sample/main.nf index 4508db84..6899ef62 100644 --- a/tests/modules/seqtk/sample/main.nf +++ b/tests/modules/seqtk/sample/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_SAMPLE } from '../../../../modules/seqtk/sample/main.nf' addParams( options: [ 'args': '-s100', 'suffix':'.sampled' ] ) +include { SEQTK_SAMPLE } from '../../../../modules/seqtk/sample/main.nf' // // Test with single-end data diff --git a/tests/modules/seqtk/sample/nextflow.config b/tests/modules/seqtk/sample/nextflow.config new file mode 100644 index 00000000..3efac50d --- /dev/null +++ b/tests/modules/seqtk/sample/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_SAMPLE { + ext.args = '-s100' + ext.suffix = '.sampled' + } + +} diff --git a/tests/modules/seqtk/sample/test.yml b/tests/modules/seqtk/sample/test.yml index d4cf2ca9..df24b3a4 100644 --- a/tests/modules/seqtk/sample/test.yml +++ b/tests/modules/seqtk/sample/test.yml @@ -1,5 +1,5 @@ - name: seqtk sample test_seqtk_sample_single_end - command: nextflow run tests/modules/seqtk/sample -entry test_seqtk_sample_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/sample -entry test_seqtk_sample_single_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/sample/nextflow.config tags: - seqtk - seqtk/sample @@ -8,7 +8,7 @@ md5sum: 73c3e8f113860244f3ed3866a8b9d555 - name: seqtk sample test_seqtk_sample_paired_end - command: nextflow run tests/modules/seqtk/sample -entry test_seqtk_sample_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/sample -entry test_seqtk_sample_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/sample/nextflow.config tags: - seqtk - seqtk/sample diff --git a/tests/modules/seqtk/subseq/main.nf b/tests/modules/seqtk/subseq/main.nf index 7c5dc7b2..608b7c2f 100644 --- a/tests/modules/seqtk/subseq/main.nf +++ b/tests/modules/seqtk/subseq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_SUBSEQ } from '../../../../modules/seqtk/subseq/main.nf' addParams( options: ['suffix':'.filtered'] ) +include { SEQTK_SUBSEQ } from '../../../../modules/seqtk/subseq/main.nf' workflow test_seqtk_subseq { diff --git a/tests/modules/seqtk/subseq/nextflow.config b/tests/modules/seqtk/subseq/nextflow.config new file mode 100644 index 00000000..c61c4a74 --- /dev/null +++ b/tests/modules/seqtk/subseq/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_SUBSEQ { + ext.suffix = '.filtered' + } + +} diff --git a/tests/modules/seqtk/subseq/test.yml b/tests/modules/seqtk/subseq/test.yml index fca64804..4003e3ab 100644 --- a/tests/modules/seqtk/subseq/test.yml +++ b/tests/modules/seqtk/subseq/test.yml @@ -1,5 +1,5 @@ - name: seqtk subseq test_seqtk_subseq - command: nextflow run tests/modules/seqtk/subseq -entry test_seqtk_subseq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/subseq -entry test_seqtk_subseq -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/subseq/nextflow.config tags: - seqtk - seqtk/subseq diff --git a/tests/modules/sequenzautils/bam2seqz/main.nf b/tests/modules/sequenzautils/bam2seqz/main.nf index ae478b88..fcd4c7c7 100755 --- a/tests/modules/sequenzautils/bam2seqz/main.nf +++ b/tests/modules/sequenzautils/bam2seqz/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQUENZAUTILS_BAM2SEQZ } from '../../../../modules/sequenzautils/bam2seqz/main.nf' addParams( options: [:] ) +include { SEQUENZAUTILS_BAM2SEQZ } from '../../../../modules/sequenzautils/bam2seqz/main.nf' workflow test_sequenzautils_bam2seqz { diff --git a/tests/modules/sequenzautils/bam2seqz/nextflow.config b/tests/modules/sequenzautils/bam2seqz/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sequenzautils/bam2seqz/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sequenzautils/bam2seqz/test.yml b/tests/modules/sequenzautils/bam2seqz/test.yml index 0b9cac53..f3ea6cf0 100644 --- a/tests/modules/sequenzautils/bam2seqz/test.yml +++ b/tests/modules/sequenzautils/bam2seqz/test.yml @@ -1,5 +1,5 @@ - name: sequenzautils bam2seqz - command: nextflow run ./tests/modules/sequenzautils/bam2seqz -entry test_sequenzautils_bam2seqz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sequenzautils/bam2seqz -entry test_sequenzautils_bam2seqz -c ./tests/config/nextflow.config -c ./tests/modules/sequenzautils/bam2seqz/nextflow.config tags: - sequenzautils - sequenzautils/bam2seqz diff --git a/tests/modules/sequenzautils/gcwiggle/main.nf b/tests/modules/sequenzautils/gcwiggle/main.nf index e314f1e0..b25e037e 100644 --- a/tests/modules/sequenzautils/gcwiggle/main.nf +++ b/tests/modules/sequenzautils/gcwiggle/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQUENZAUTILS_GCWIGGLE } from '../../../../modules/sequenzautils/gcwiggle/main.nf' addParams( options: [ 'args': '-w 50' ] ) +include { SEQUENZAUTILS_GCWIGGLE } from '../../../../modules/sequenzautils/gcwiggle/main.nf' workflow test_sequenzautils_gcwiggle { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/sequenzautils/gcwiggle/nextflow.config b/tests/modules/sequenzautils/gcwiggle/nextflow.config new file mode 100644 index 00000000..62e68935 --- /dev/null +++ b/tests/modules/sequenzautils/gcwiggle/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQUENZAUTILS_GCWIGGLE { + ext.args = '-w 50' + } + +} diff --git a/tests/modules/sequenzautils/gcwiggle/test.yml b/tests/modules/sequenzautils/gcwiggle/test.yml index aa7a3167..21ddc4ab 100644 --- a/tests/modules/sequenzautils/gcwiggle/test.yml +++ b/tests/modules/sequenzautils/gcwiggle/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml sequenzautils/gcwiggle - name: sequenzautils gcwiggle - command: nextflow run ./tests/modules/sequenzautils/gcwiggle -entry test_sequenzautils_gcwiggle -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sequenzautils/gcwiggle -entry test_sequenzautils_gcwiggle -c ./tests/config/nextflow.config -c ./tests/modules/sequenzautils/gcwiggle/nextflow.config tags: - sequenzautils - sequenzautils/gcwiggle diff --git a/tests/modules/seqwish/induce/main.nf b/tests/modules/seqwish/induce/main.nf index 356ca705..6388fea2 100644 --- a/tests/modules/seqwish/induce/main.nf +++ b/tests/modules/seqwish/induce/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQWISH_INDUCE } from '../../../../modules/seqwish/induce/main.nf' addParams( options: [:] ) +include { SEQWISH_INDUCE } from '../../../../modules/seqwish/induce/main.nf' workflow test_seqwish_induce { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/seqwish/induce/nextflow.config b/tests/modules/seqwish/induce/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/seqwish/induce/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/seqwish/induce/test.yml b/tests/modules/seqwish/induce/test.yml index d27de3c4..d5a8a7cd 100644 --- a/tests/modules/seqwish/induce/test.yml +++ b/tests/modules/seqwish/induce/test.yml @@ -1,5 +1,5 @@ - name: seqwish induce - command: nextflow run ./tests/modules/seqwish/induce -entry test_seqwish_induce -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqwish/induce -entry test_seqwish_induce -c ./tests/config/nextflow.config -c ./tests/modules/seqwish/induce/nextflow.config tags: - seqwish - seqwish/induce diff --git a/tests/modules/shovill/main.nf b/tests/modules/shovill/main.nf index acc65169..2416022f 100644 --- a/tests/modules/shovill/main.nf +++ b/tests/modules/shovill/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { SHOVILL } from '../../../modules/shovill/main.nf' addParams( options: [args: '--gsize 2800000 --kmers 31'] ) -include { SHOVILL as SHOVILL_SKESA } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler skesa --gsize 2800000'] ) -include { SHOVILL as SHOVILL_MEGAHIT } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler megahit --gsize 2800000'] ) -include { SHOVILL as SHOVILL_VELVET } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler velvet --gsize 2800000'] ) +include { SHOVILL } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_SKESA } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_MEGAHIT } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_VELVET } from '../../../modules/shovill/main.nf' workflow test_shovill { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/shovill/nextflow.config b/tests/modules/shovill/nextflow.config new file mode 100644 index 00000000..0599f80b --- /dev/null +++ b/tests/modules/shovill/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SHOVILL { + ext.args = '--gsize 2800000 --kmers 31' + } + + withName: SHOVILL_SKESA { + ext.args = '--assembler skesa --gsize 2800000' + } + + withName: SHOVILL_MEGAHIT { + ext.args = '--assembler megahit --gsize 2800000' + } + + withName: SHOVILL_VELVET { + ext.args = '--assembler velvet --gsize 2800000' + } + +} diff --git a/tests/modules/shovill/test.yml b/tests/modules/shovill/test.yml index a716bc66..6fdd2f3f 100644 --- a/tests/modules/shovill/test.yml +++ b/tests/modules/shovill/test.yml @@ -1,5 +1,5 @@ - name: shovill with spades - command: nextflow run ./tests/modules/shovill -entry test_shovill -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -13,7 +13,7 @@ - path: output/shovill/shovill.log - name: shovill with megahit - command: nextflow run ./tests/modules/shovill -entry test_shovill_megahit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_megahit -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -26,7 +26,7 @@ - path: output/shovill/shovill.log - name: shovill with skesa - command: nextflow run ./tests/modules/shovill -entry test_shovill_skesa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_skesa -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -39,7 +39,7 @@ - path: output/shovill/shovill.log - name: shovill with velvet - command: nextflow run ./tests/modules/shovill -entry test_shovill_velvet -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_velvet -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: diff --git a/tests/modules/snpdists/main.nf b/tests/modules/snpdists/main.nf index 8a29effa..be6d745c 100644 --- a/tests/modules/snpdists/main.nf +++ b/tests/modules/snpdists/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SNPDISTS } from '../../../modules/snpdists/main.nf' addParams( options: [:] ) +include { SNPDISTS } from '../../../modules/snpdists/main.nf' workflow test_snpdists { diff --git a/tests/modules/snpdists/nextflow.config b/tests/modules/snpdists/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/snpdists/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/snpdists/test.yml b/tests/modules/snpdists/test.yml index d140ce6e..c23945ce 100644 --- a/tests/modules/snpdists/test.yml +++ b/tests/modules/snpdists/test.yml @@ -1,5 +1,5 @@ - name: snpdists - command: nextflow run ./tests/modules/snpdists -entry test_snpdists -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpdists -entry test_snpdists -c ./tests/config/nextflow.config -c ./tests/modules/snpdists/nextflow.config tags: - snpdists files: diff --git a/tests/modules/snpeff/main.nf b/tests/modules/snpeff/main.nf index 923f98f4..4e8a982d 100644 --- a/tests/modules/snpeff/main.nf +++ b/tests/modules/snpeff/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { SNPEFF } from '../../../modules/snpeff/main.nf' addParams( snpeff_tag: '5.0.WBcel235', use_cache: false ) +include { SNPEFF } from '../../../modules/snpeff/main.nf' workflow test_snpeff { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + SNPEFF ( input, "WBcel235.99", [] ) } diff --git a/tests/modules/snpeff/nextflow.config b/tests/modules/snpeff/nextflow.config new file mode 100644 index 00000000..589c8cfb --- /dev/null +++ b/tests/modules/snpeff/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SNPEFF { + ext.snpeff_tag = '5.0.WBcel235' + ext.use_cache = false + } + +} diff --git a/tests/modules/snpeff/test.yml b/tests/modules/snpeff/test.yml index 44eba200..8f4d980f 100644 --- a/tests/modules/snpeff/test.yml +++ b/tests/modules/snpeff/test.yml @@ -1,5 +1,5 @@ - name: snpeff test_snpeff - command: nextflow run tests/modules/snpeff -entry test_snpeff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpeff -entry test_snpeff -c ./tests/config/nextflow.config -c ./tests/modules/snpeff/nextflow.config tags: - snpeff files: diff --git a/tests/modules/snpsites/main.nf b/tests/modules/snpsites/main.nf index df2a6852..f7801673 100644 --- a/tests/modules/snpsites/main.nf +++ b/tests/modules/snpsites/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SNPSITES } from '../../../modules/snpsites/main.nf' addParams( options: [:] ) +include { SNPSITES } from '../../../modules/snpsites/main.nf' workflow test_snpsites { diff --git a/tests/modules/snpsites/nextflow.config b/tests/modules/snpsites/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/snpsites/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/snpsites/test.yml b/tests/modules/snpsites/test.yml index 8361cd05..d9c19cd5 100644 --- a/tests/modules/snpsites/test.yml +++ b/tests/modules/snpsites/test.yml @@ -1,5 +1,5 @@ - name: snpsites - command: nextflow run ./tests/modules/snpsites -entry test_snpsites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpsites -entry test_snpsites -c ./tests/config/nextflow.config -c ./tests/modules/snpsites/nextflow.config tags: - snpsites files: diff --git a/tests/modules/spades/main.nf b/tests/modules/spades/main.nf index b09a4266..3710eeb7 100644 --- a/tests/modules/spades/main.nf +++ b/tests/modules/spades/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SPADES } from '../../../modules/spades/main.nf' addParams( options: ['args': '--rnaviral'] ) +include { SPADES } from '../../../modules/spades/main.nf' workflow test_spades_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/spades/nextflow.config b/tests/modules/spades/nextflow.config new file mode 100644 index 00000000..5fabafae --- /dev/null +++ b/tests/modules/spades/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SPADES { + ext.args = '--rnaviral' + } + +} diff --git a/tests/modules/spades/test.yml b/tests/modules/spades/test.yml index a400e79d..98bc9c8c 100644 --- a/tests/modules/spades/test.yml +++ b/tests/modules/spades/test.yml @@ -1,5 +1,5 @@ - name: spades test_spades_single_end - command: nextflow run tests/modules/spades -entry test_spades_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_single_end -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: @@ -12,7 +12,7 @@ - path: output/spades/test.spades.log - name: spades test_spades_paired_end - command: nextflow run tests/modules/spades -entry test_spades_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: @@ -24,7 +24,7 @@ - path: output/spades/warnings.log - name: spades test_spades_illumina_nanopore - command: nextflow run tests/modules/spades -entry test_spades_illumina_nanopore -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_illumina_nanopore -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: @@ -38,7 +38,7 @@ - path: output/spades/warnings.log - name: spades test_spades_illumina_pacbio - command: nextflow run tests/modules/spades -entry test_spades_illumina_pacbio -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spades -entry test_spades_illumina_pacbio -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: diff --git a/tests/modules/spatyper/main.nf b/tests/modules/spatyper/main.nf index 65729cc0..655845c7 100644 --- a/tests/modules/spatyper/main.nf +++ b/tests/modules/spatyper/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { SPATYPER } from '../../../modules/spatyper/main.nf' addParams( options: [:] ) -include { SPATYPER as SPATYPER_ENRICH } from '../../../modules/spatyper/main.nf' addParams( options: [args: '--do_enrich'] ) +include { SPATYPER } from '../../../modules/spatyper/main.nf' +include { SPATYPER as SPATYPER_ENRICH } from '../../../modules/spatyper/main.nf' workflow test_spatyper { input = [ [ id:'test' ], diff --git a/tests/modules/spatyper/nextflow.config b/tests/modules/spatyper/nextflow.config new file mode 100644 index 00000000..ac90a452 --- /dev/null +++ b/tests/modules/spatyper/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SPATYPER_ENRICH { + ext.args = '--do_enrich' + } + +} diff --git a/tests/modules/spatyper/test.yml b/tests/modules/spatyper/test.yml index 49516812..6e1f8144 100644 --- a/tests/modules/spatyper/test.yml +++ b/tests/modules/spatyper/test.yml @@ -1,5 +1,5 @@ - name: spatyper test_spatyper - command: nextflow run tests/modules/spatyper -entry test_spatyper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spatyper -entry test_spatyper -c ./tests/config/nextflow.config -c ./tests/modules/spatyper/nextflow.config tags: - spatyper files: @@ -7,7 +7,7 @@ md5sum: a698352823875171696e5e7ed7015c13 - name: spatyper test_spatyper_enrich - command: nextflow run tests/modules/spatyper -entry test_spatyper_enrich -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spatyper -entry test_spatyper_enrich -c ./tests/config/nextflow.config -c ./tests/modules/spatyper/nextflow.config tags: - spatyper files: diff --git a/tests/modules/sratools/fasterqdump/main.nf b/tests/modules/sratools/fasterqdump/main.nf index 1a0e0c7a..2f838fd2 100644 --- a/tests/modules/sratools/fasterqdump/main.nf +++ b/tests/modules/sratools/fasterqdump/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { UNTAR } from '../../../../modules/untar/main.nf' -include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/main.nf' addParams( options: [:] ) +include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/main.nf' workflow test_sratools_fasterqdump_single_end { diff --git a/tests/modules/sratools/fasterqdump/nextflow.config b/tests/modules/sratools/fasterqdump/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sratools/fasterqdump/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sratools/fasterqdump/test.yml b/tests/modules/sratools/fasterqdump/test.yml index 7d022a0d..64cf2404 100644 --- a/tests/modules/sratools/fasterqdump/test.yml +++ b/tests/modules/sratools/fasterqdump/test.yml @@ -1,7 +1,8 @@ - name: sratools fasterqdump test_sratools_fasterqdump_single_end - command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c ./tests/config/nextflow.config -c ./tests/modules/sratools/fasterqdump/nextflow.config tags: - - subworkflows/sra_fastq + - sratools + - sratools/fasterqdump files: - path: output/sratools/SRR13255544.fastq.gz md5sum: 1054c7b71884acdb5eed8a378f18be82 @@ -9,9 +10,10 @@ md5sum: 466d05dafb2eec672150754168010b4d - name: sratools fasterqdump test_sratools_fasterqdump_paired_end - command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/sratools/fasterqdump/nextflow.config tags: - - subworkflows/sra_fastq + - sratools + - sratools/fasterqdump files: - path: output/sratools/SRR11140744_1.fastq.gz md5sum: 193809c784a4ea132ab2a253fa4f55b6 diff --git a/tests/modules/sratools/prefetch/main.nf b/tests/modules/sratools/prefetch/main.nf index 99439a7f..aa6252a1 100644 --- a/tests/modules/sratools/prefetch/main.nf +++ b/tests/modules/sratools/prefetch/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SRATOOLS_PREFETCH } from '../../../../modules/sratools/prefetch/main.nf' addParams( options: [:] ) +include { SRATOOLS_PREFETCH } from '../../../../modules/sratools/prefetch/main.nf' workflow test_sratools_prefetch { diff --git a/tests/modules/sratools/prefetch/nextflow.config b/tests/modules/sratools/prefetch/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sratools/prefetch/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sratools/prefetch/test.yml b/tests/modules/sratools/prefetch/test.yml index c23db12a..a2efef77 100644 --- a/tests/modules/sratools/prefetch/test.yml +++ b/tests/modules/sratools/prefetch/test.yml @@ -1,5 +1,5 @@ - name: sratools prefetch test_sratools_prefetch - command: nextflow run tests/modules/sratools/prefetch -entry test_sratools_prefetch -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/prefetch -entry test_sratools_prefetch -c ./tests/config/nextflow.config -c ./tests/modules/sratools/prefetch/nextflow.config tags: - sratools/prefetch - sratools diff --git a/tests/modules/staphopiasccmec/main.nf b/tests/modules/staphopiasccmec/main.nf index ec1b48e4..8ea310ce 100644 --- a/tests/modules/staphopiasccmec/main.nf +++ b/tests/modules/staphopiasccmec/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { STAPHOPIASCCMEC } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [:] ) -include { STAPHOPIASCCMEC as STAPHOPIASCCMEC_HAMMING } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [args: '--hamming'] ) +include { STAPHOPIASCCMEC } from '../../../modules/staphopiasccmec/main.nf' +include { STAPHOPIASCCMEC as STAPHOPIASCCMEC_HAMMING } from '../../../modules/staphopiasccmec/main.nf' workflow test_staphopiasccmec { diff --git a/tests/modules/staphopiasccmec/nextflow.config b/tests/modules/staphopiasccmec/nextflow.config new file mode 100644 index 00000000..7ee97c2f --- /dev/null +++ b/tests/modules/staphopiasccmec/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAPHOPIASCCMEC_HAMMING { + ext.args = '--hamming' + } + +} diff --git a/tests/modules/staphopiasccmec/test.yml b/tests/modules/staphopiasccmec/test.yml index aadfec3e..ac3f66da 100644 --- a/tests/modules/staphopiasccmec/test.yml +++ b/tests/modules/staphopiasccmec/test.yml @@ -1,5 +1,5 @@ - name: staphopiasccmec test_staphopiasccmec - command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec -c tests/config/nextflow.config + command: nextflow run ./tests/modules/staphopiasccmec -entry test_staphopiasccmec -c ./tests/config/nextflow.config -c ./tests/modules/staphopiasccmec/nextflow.config tags: - staphopiasccmec files: @@ -7,7 +7,7 @@ md5sum: e6460d4164f3af5b290c5ccdb11343bf - name: staphopiasccmec test_staphopiasccmec_hamming - command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec_hamming -c tests/config/nextflow.config + command: nextflow run ./tests/modules/staphopiasccmec -entry test_staphopiasccmec_hamming -c ./tests/config/nextflow.config -c ./tests/modules/staphopiasccmec/nextflow.config tags: - staphopiasccmec files: diff --git a/tests/modules/star/align/main.nf b/tests/modules/star/align/main.nf index d7a7ef96..bf305d54 100644 --- a/tests/modules/star/align/main.nf +++ b/tests/modules/star/align/main.nf @@ -2,51 +2,77 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9']) -include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'], seq_platform: 'illumina') -include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'], seq_platform: 'illumina') -include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' +include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' +include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' +include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' workflow test_star_alignment_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end_for_fusion { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end_for_starfusion { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = false + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } diff --git a/tests/modules/star/align/nextflow.config b/tests/modules/star/align/nextflow.config new file mode 100644 index 00000000..751f7837 --- /dev/null +++ b/tests/modules/star/align/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAR_GENOMEGENERATE { + ext.args = '--genomeSAindexNbases 9' + } + + withName: STAR_ALIGN { + ext.args = '--readFilesCommand zcat' + } + + withName: STAR_FOR_ARRIBA { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50' + } + + withName: STAR_FOR_STARFUSION { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30' + } + +} diff --git a/tests/modules/star/align/test.yml b/tests/modules/star/align/test.yml index 47731c5c..af5bebe5 100644 --- a/tests/modules/star/align/test.yml +++ b/tests/modules/star/align/test.yml @@ -1,39 +1,39 @@ - name: star align test_star_alignment_single_end - command: nextflow run tests/modules/star/align -entry test_star_alignment_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_single_end -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: b9f5e2f6a624b64c300fe25dc3ac801f @@ -43,41 +43,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: 38d08f0b944a2a1b981a250d675aa0d9 @@ -87,41 +87,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end_for_fusion - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: c740d5177067c1fcc48ab7a16cd639d7 @@ -131,41 +131,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end_for_starfusion - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: a1bd1b40950a58ea2776908076160052 diff --git a/tests/modules/star/genomegenerate/main.nf b/tests/modules/star/genomegenerate/main.nf index 7f9e3072..31601478 100644 --- a/tests/modules/star/genomegenerate/main.nf +++ b/tests/modules/star/genomegenerate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [publish_dir:'star'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' workflow test_star_genomegenerate { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/star/genomegenerate/nextflow.config b/tests/modules/star/genomegenerate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/star/genomegenerate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/star/genomegenerate/test.yml b/tests/modules/star/genomegenerate/test.yml index df8d5efc..0e397009 100644 --- a/tests/modules/star/genomegenerate/test.yml +++ b/tests/modules/star/genomegenerate/test.yml @@ -1,5 +1,5 @@ - name: star genomegenerate test_star_genomegenerate - command: nextflow run tests/modules/star/genomegenerate -entry test_star_genomegenerate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/genomegenerate -entry test_star_genomegenerate -c ./tests/config/nextflow.config -c ./tests/modules/star/genomegenerate/nextflow.config tags: - star - star/genomegenerate diff --git a/tests/modules/strelka/germline/main.nf b/tests/modules/strelka/germline/main.nf index 0d5193bb..c50d76e1 100644 --- a/tests/modules/strelka/germline/main.nf +++ b/tests/modules/strelka/germline/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' addParams( options: [:] ) +include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' workflow test_strelka_germline { input = [ diff --git a/tests/modules/strelka/germline/nextflow.config b/tests/modules/strelka/germline/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/strelka/germline/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/strelka/germline/test.yml b/tests/modules/strelka/germline/test.yml index a3ab3ef6..8db81aa0 100644 --- a/tests/modules/strelka/germline/test.yml +++ b/tests/modules/strelka/germline/test.yml @@ -1,5 +1,5 @@ - name: strelka germline test_strelka_germline - command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline -c ./tests/config/nextflow.config -c ./tests/modules/strelka/germline/nextflow.config tags: - strelka - strelka/germline @@ -10,7 +10,7 @@ - path: output/strelka/test.variants.vcf.gz.tbi - name: strelka germline test_strelka_germline_target_bed - command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/strelka/germline/nextflow.config tags: - strelka - strelka/germline diff --git a/tests/modules/strelka/somatic/main.nf b/tests/modules/strelka/somatic/main.nf index 60127f58..b1d4efeb 100644 --- a/tests/modules/strelka/somatic/main.nf +++ b/tests/modules/strelka/somatic/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRELKA_SOMATIC } from '../../../../modules/strelka/somatic/main.nf' addParams( options: [:] ) +include { STRELKA_SOMATIC } from '../../../../modules/strelka/somatic/main.nf' workflow test_strelka_somatic { diff --git a/tests/modules/strelka/somatic/nextflow.config b/tests/modules/strelka/somatic/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/strelka/somatic/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/strelka/somatic/test.yml b/tests/modules/strelka/somatic/test.yml index b461d335..a56f955a 100644 --- a/tests/modules/strelka/somatic/test.yml +++ b/tests/modules/strelka/somatic/test.yml @@ -1,5 +1,5 @@ - name: strelka somatic test_strelka_somatic - command: nextflow run tests/modules/strelka/somatic -entry test_strelka_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/somatic -entry test_strelka_somatic -c ./tests/config/nextflow.config -c ./tests/modules/strelka/somatic/nextflow.config tags: - strelka - strelka/somatic @@ -12,7 +12,7 @@ md5sum: 4cb176febbc8c26d717a6c6e67b9c905 - name: strelka somatic test_strelka__best_practices_somatic - command: nextflow run tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c ./tests/config/nextflow.config -c ./tests/modules/strelka/somatic/nextflow.config tags: - strelka - strelka/somatic diff --git a/tests/modules/stringtie/merge/main.nf b/tests/modules/stringtie/merge/main.nf index 49ff5a41..7851e755 100644 --- a/tests/modules/stringtie/merge/main.nf +++ b/tests/modules/stringtie/merge/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) -include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' addParams( options: [:] ) +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' +include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' /* * Test with forward strandedness diff --git a/tests/modules/stringtie/merge/nextflow.config b/tests/modules/stringtie/merge/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/stringtie/merge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/stringtie/merge/test.yml b/tests/modules/stringtie/merge/test.yml index e6436612..392a1d7c 100644 --- a/tests/modules/stringtie/merge/test.yml +++ b/tests/modules/stringtie/merge/test.yml @@ -1,5 +1,5 @@ - name: stringtie merge forward-strand - command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_forward_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/merge -entry test_stringtie_forward_merge -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/merge/nextflow.config tags: - stringtie - stringtie/merge @@ -24,7 +24,7 @@ md5sum: 0e42709bfe30c2c7f2574ba664f5fa9f - name: stringtie merge test_stringtie_reverse_merge - command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/merge/nextflow.config tags: - stringtie - stringtie/merge diff --git a/tests/modules/stringtie/stringtie/main.nf b/tests/modules/stringtie/stringtie/main.nf index b902cc41..ae6abe67 100644 --- a/tests/modules/stringtie/stringtie/main.nf +++ b/tests/modules/stringtie/stringtie/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' // // Test with forward strandedness // diff --git a/tests/modules/stringtie/stringtie/nextflow.config b/tests/modules/stringtie/stringtie/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/stringtie/stringtie/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/stringtie/stringtie/test.yml b/tests/modules/stringtie/stringtie/test.yml index 28c1b3c2..732b9fd1 100644 --- a/tests/modules/stringtie/stringtie/test.yml +++ b/tests/modules/stringtie/stringtie/test.yml @@ -1,5 +1,5 @@ - name: stringtie stringtie forward - command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_forward -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_forward -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/stringtie/nextflow.config tags: - stringtie - stringtie/stringtie @@ -21,7 +21,7 @@ md5sum: e981c0038295ae54b63cedb1083f1540 - name: stringtie stringtie reverse - command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_reverse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_reverse -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/stringtie/nextflow.config tags: - stringtie - stringtie/stringtie diff --git a/tests/modules/subread/featurecounts/main.nf b/tests/modules/subread/featurecounts/main.nf index eae60f80..a8fa5c75 100644 --- a/tests/modules/subread/featurecounts/main.nf +++ b/tests/modules/subread/featurecounts/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SUBREAD_FEATURECOUNTS } from '../../../../modules/subread/featurecounts/main.nf' addParams( options: [args:'-t CDS'] ) +include { SUBREAD_FEATURECOUNTS } from '../../../../modules/subread/featurecounts/main.nf' workflow test_subread_featurecounts_forward { diff --git a/tests/modules/subread/featurecounts/nextflow.config b/tests/modules/subread/featurecounts/nextflow.config new file mode 100644 index 00000000..d9fd4fd5 --- /dev/null +++ b/tests/modules/subread/featurecounts/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SUBREAD_FEATURECOUNTS { + ext.args = '-t CDS' + } + +} diff --git a/tests/modules/subread/featurecounts/test.yml b/tests/modules/subread/featurecounts/test.yml index be6bed47..7cc24457 100644 --- a/tests/modules/subread/featurecounts/test.yml +++ b/tests/modules/subread/featurecounts/test.yml @@ -1,5 +1,5 @@ - name: subread featurecounts test_subread_featurecounts_forward - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_forward -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_forward -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts @@ -10,7 +10,7 @@ md5sum: 8f602ff9a8ef467af43294e80b367cdf - name: subread featurecounts test_subread_featurecounts_reverse - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_reverse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_reverse -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts @@ -21,7 +21,7 @@ md5sum: 7cfa30ad678b9bc1bc63afbb0281547b - name: subread featurecounts test_subread_featurecounts_unstranded - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_unstranded -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_unstranded -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts diff --git a/tests/modules/tabix/bgzip/main.nf b/tests/modules/tabix/bgzip/main.nf index 8756b17d..4d349890 100644 --- a/tests/modules/tabix/bgzip/main.nf +++ b/tests/modules/tabix/bgzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TABIX_BGZIP } from '../../../../modules/tabix/bgzip/main.nf' addParams( options: [:] ) +include { TABIX_BGZIP } from '../../../../modules/tabix/bgzip/main.nf' workflow test_tabix_bgzip { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/tabix/bgzip/nextflow.config b/tests/modules/tabix/bgzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tabix/bgzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tabix/bgzip/test.yml b/tests/modules/tabix/bgzip/test.yml index 58412979..19357655 100644 --- a/tests/modules/tabix/bgzip/test.yml +++ b/tests/modules/tabix/bgzip/test.yml @@ -1,5 +1,5 @@ - name: tabix bgzip - command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgzip/nextflow.config tags: - tabix - tabix/bgzip diff --git a/tests/modules/tabix/bgziptabix/main.nf b/tests/modules/tabix/bgziptabix/main.nf index 51e242fd..b2ff70d0 100644 --- a/tests/modules/tabix/bgziptabix/main.nf +++ b/tests/modules/tabix/bgziptabix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TABIX_BGZIPTABIX } from '../../../../modules/tabix/bgziptabix/main.nf' addParams( options: ['args2': '-p vcf'] ) +include { TABIX_BGZIPTABIX } from '../../../../modules/tabix/bgziptabix/main.nf' workflow test_tabix_bgziptabix { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/tabix/bgziptabix/nextflow.config b/tests/modules/tabix/bgziptabix/nextflow.config new file mode 100644 index 00000000..041bfa6a --- /dev/null +++ b/tests/modules/tabix/bgziptabix/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TABIX_BGZIPTABIX { + ext.args2 = '-p vcf' + } + +} diff --git a/tests/modules/tabix/bgziptabix/test.yml b/tests/modules/tabix/bgziptabix/test.yml index 31048109..1bcfa88a 100644 --- a/tests/modules/tabix/bgziptabix/test.yml +++ b/tests/modules/tabix/bgziptabix/test.yml @@ -1,5 +1,5 @@ - name: tabix bgziptabix - command: nextflow run ./tests/modules/tabix/bgziptabix -entry test_tabix_bgziptabix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/bgziptabix -entry test_tabix_bgziptabix -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgziptabix/nextflow.config tags: - tabix - tabix/bgziptabix diff --git a/tests/modules/tabix/tabix/main.nf b/tests/modules/tabix/tabix/main.nf index 0963ffcd..993ee812 100644 --- a/tests/modules/tabix/tabix/main.nf +++ b/tests/modules/tabix/tabix/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p bed'] ) -include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p gff'] ) -include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p vcf'] ) +include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf' workflow test_tabix_tabix_bed { input = [ [ id:'B.bed' ], // meta map diff --git a/tests/modules/tabix/tabix/nextflow.config b/tests/modules/tabix/tabix/nextflow.config new file mode 100644 index 00000000..aa97a873 --- /dev/null +++ b/tests/modules/tabix/tabix/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TABIX_BED { + ext.args = '-p bed' + } + + withName: TABIX_GFF { + ext.args = '-p gff' + } + + withName: TABIX_VCF { + ext.args = '-p vcf' + } + +} diff --git a/tests/modules/tabix/tabix/test.yml b/tests/modules/tabix/tabix/test.yml index 646215c8..46be28dd 100644 --- a/tests/modules/tabix/tabix/test.yml +++ b/tests/modules/tabix/tabix/test.yml @@ -1,5 +1,5 @@ - name: tabix tabix bed - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_bed -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix @@ -7,7 +7,7 @@ - path: ./output/tabix/test.bed.gz.tbi md5sum: 5b40851ab6b8ccf7946313c86481c0df - name: tabix tabix gff - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_gff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_gff -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix @@ -15,7 +15,7 @@ - path: ./output/tabix/genome.gff3.gz.tbi md5sum: f79a67d95a98076e04fbe0455d825926 - name: tabix tabix vcf - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix diff --git a/tests/modules/tbprofiler/profile/main.nf b/tests/modules/tbprofiler/profile/main.nf index e0c6ef56..0141a77f 100644 --- a/tests/modules/tbprofiler/profile/main.nf +++ b/tests/modules/tbprofiler/profile/main.nf @@ -2,23 +2,27 @@ nextflow.enable.dsl = 2 -include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_ILLUMINA } from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform illumina'] ) -include { TBPROFILER_PROFILE as TBPROFILER_PROFILE_NANOPORE} from '../../../../modules/tbprofiler/profile/main.nf' addParams( options: [args: '--platform nanopore'] ) +include { TBPROFILER_PROFILE } from '../../../../modules/tbprofiler/profile/main.nf' workflow test_tbprofiler_profile_illumina { - - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - TBPROFILER_PROFILE_ILLUMINA ( input ) + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + TBPROFILER_PROFILE ( input ) } - workflow test_tbprofiler_profile_nanopore { - - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] - TBPROFILER_PROFILE_NANOPORE ( input ) + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) + ] + + TBPROFILER_PROFILE ( input ) } diff --git a/tests/modules/tbprofiler/profile/nextflow.config b/tests/modules/tbprofiler/profile/nextflow.config new file mode 100644 index 00000000..50cb99c6 --- /dev/null +++ b/tests/modules/tbprofiler/profile/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TBPROFILER_PROFILE_ILLUMINA { + ext.args = '--platform illumina' + } + + withName: TBPROFILER_PROFILE_NANOPORE { + ext.args = '--platform nanopore' + } + +} diff --git a/tests/modules/tbprofiler/profile/test.yml b/tests/modules/tbprofiler/profile/test.yml index abfb552d..8b40f1fa 100644 --- a/tests/modules/tbprofiler/profile/test.yml +++ b/tests/modules/tbprofiler/profile/test.yml @@ -1,5 +1,5 @@ - name: tbprofiler profile illumina - command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_illumina -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_illumina -c ./tests/config/nextflow.config -c ./tests/modules/tbprofiler/profile/nextflow.config tags: - tbprofiler - tbprofiler/profile @@ -10,7 +10,7 @@ - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz - name: tbprofiler profile nanopore - command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_nanopore -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_nanopore -c ./tests/config/nextflow.config -c ./tests/modules/tbprofiler/profile/nextflow.config tags: - tbprofiler - tbprofiler/profile diff --git a/tests/modules/tiddit/cov/main.nf b/tests/modules/tiddit/cov/main.nf index aed3516c..1bb35145 100644 --- a/tests/modules/tiddit/cov/main.nf +++ b/tests/modules/tiddit/cov/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf' addParams( options: [:] ) +include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf' workflow test_tiddit_cov { diff --git a/tests/modules/tiddit/cov/nextflow.config b/tests/modules/tiddit/cov/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tiddit/cov/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tiddit/cov/test.yml b/tests/modules/tiddit/cov/test.yml index c2aa6439..90c4cbb3 100644 --- a/tests/modules/tiddit/cov/test.yml +++ b/tests/modules/tiddit/cov/test.yml @@ -1,5 +1,5 @@ - name: tiddit cov test_tiddit_cov - command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config tags: - tiddit - tiddit/cov @@ -8,7 +8,7 @@ md5sum: f7974948f809f94879d8a60b726194f5 - name: tiddit cov test_tiddit_cov_no_ref - command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config tags: - tiddit - tiddit/cov diff --git a/tests/modules/tiddit/sv/main.nf b/tests/modules/tiddit/sv/main.nf index 8a5a8140..8dae4950 100644 --- a/tests/modules/tiddit/sv/main.nf +++ b/tests/modules/tiddit/sv/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TIDDIT_SV } from '../../../../modules/tiddit/sv/main.nf' addParams( options: [:] ) +include { TIDDIT_SV } from '../../../../modules/tiddit/sv/main.nf' workflow test_tiddit_sv { input = [ diff --git a/tests/modules/tiddit/sv/nextflow.config b/tests/modules/tiddit/sv/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tiddit/sv/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tiddit/sv/test.yml b/tests/modules/tiddit/sv/test.yml index ed19bf14..168d21c5 100644 --- a/tests/modules/tiddit/sv/test.yml +++ b/tests/modules/tiddit/sv/test.yml @@ -1,5 +1,5 @@ - name: tiddit sv - command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config tags: - tiddit - tiddit/sv @@ -11,7 +11,7 @@ - path: output/tiddit/test.vcf - name: tiddit sv no ref - command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config tags: - tiddit - tiddit/sv diff --git a/tests/modules/trimgalore/main.nf b/tests/modules/trimgalore/main.nf index 3001469d..adeda539 100644 --- a/tests/modules/trimgalore/main.nf +++ b/tests/modules/trimgalore/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TRIMGALORE } from '../../../modules/trimgalore/main.nf' addParams( options: [:] ) +include { TRIMGALORE } from '../../../modules/trimgalore/main.nf' // // Test with single-end data diff --git a/tests/modules/trimgalore/nextflow.config b/tests/modules/trimgalore/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/trimgalore/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/trimgalore/test.yml b/tests/modules/trimgalore/test.yml index c176f592..ecbd2b5a 100644 --- a/tests/modules/trimgalore/test.yml +++ b/tests/modules/trimgalore/test.yml @@ -1,5 +1,5 @@ - name: trimgalore single-end - command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_single_end -c ./tests/config/nextflow.config -c ./tests/modules/trimgalore/nextflow.config tags: - trimgalore files: @@ -9,7 +9,7 @@ - path: ./output/trimgalore/test_trimmed.fq.gz - name: trimgalore paired-end - command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/trimgalore/nextflow.config tags: - trimgalore files: diff --git a/tests/modules/ucsc/bed12tobigbed/main.nf b/tests/modules/ucsc/bed12tobigbed/main.nf index 8ed64166..7590fc0e 100644 --- a/tests/modules/ucsc/bed12tobigbed/main.nf +++ b/tests/modules/ucsc/bed12tobigbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BED12TOBIGBED } from '../../../../modules/ucsc/bed12tobigbed/main.nf' addParams( options: [:] ) +include { UCSC_BED12TOBIGBED } from '../../../../modules/ucsc/bed12tobigbed/main.nf' workflow test_ucsc_bed12tobigbed { input = [ [ id: 'test' ], // meta map diff --git a/tests/modules/ucsc/bed12tobigbed/nextflow.config b/tests/modules/ucsc/bed12tobigbed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bed12tobigbed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bed12tobigbed/test.yml b/tests/modules/ucsc/bed12tobigbed/test.yml index e0ee6f75..6bd4262d 100644 --- a/tests/modules/ucsc/bed12tobigbed/test.yml +++ b/tests/modules/ucsc/bed12tobigbed/test.yml @@ -1,5 +1,5 @@ - name: ucsc bed12tobigbed - command: nextflow run ./tests/modules/ucsc/bed12tobigbed -entry test_ucsc_bed12tobigbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bed12tobigbed -entry test_ucsc_bed12tobigbed -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bed12tobigbed/nextflow.config tags: - ucsc/bed12tobigbed files: diff --git a/tests/modules/ucsc/bedclip/main.nf b/tests/modules/ucsc/bedclip/main.nf index 162c2eb4..8ccfd3b0 100755 --- a/tests/modules/ucsc/bedclip/main.nf +++ b/tests/modules/ucsc/bedclip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BEDCLIP } from '../../../../modules/ucsc/bedclip/main.nf' addParams( options: [suffix:'.clip'] ) +include { UCSC_BEDCLIP } from '../../../../modules/ucsc/bedclip/main.nf' workflow test_ucsc_bedclip { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/ucsc/bedclip/nextflow.config b/tests/modules/ucsc/bedclip/nextflow.config new file mode 100644 index 00000000..4adc3b8f --- /dev/null +++ b/tests/modules/ucsc/bedclip/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: UCSC_BEDCLIP { + ext.suffix = '.clip' + } + +} diff --git a/tests/modules/ucsc/bedclip/test.yml b/tests/modules/ucsc/bedclip/test.yml index 103795da..bcf22c71 100755 --- a/tests/modules/ucsc/bedclip/test.yml +++ b/tests/modules/ucsc/bedclip/test.yml @@ -1,5 +1,5 @@ - name: ucsc bedclip - command: nextflow run ./tests/modules/ucsc/bedclip -entry test_ucsc_bedclip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bedclip -entry test_ucsc_bedclip -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bedclip/nextflow.config tags: - ucsc - ucsc/bedclip diff --git a/tests/modules/ucsc/bedgraphtobigwig/main.nf b/tests/modules/ucsc/bedgraphtobigwig/main.nf index 8d83e235..c6db7225 100644 --- a/tests/modules/ucsc/bedgraphtobigwig/main.nf +++ b/tests/modules/ucsc/bedgraphtobigwig/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BEDGRAPHTOBIGWIG } from '../../../../modules/ucsc/bedgraphtobigwig/main.nf' addParams( options: [:] ) +include { UCSC_BEDGRAPHTOBIGWIG } from '../../../../modules/ucsc/bedgraphtobigwig/main.nf' workflow test_ucsc_bedgraphtobigwig { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/ucsc/bedgraphtobigwig/nextflow.config b/tests/modules/ucsc/bedgraphtobigwig/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bedgraphtobigwig/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bedgraphtobigwig/test.yml b/tests/modules/ucsc/bedgraphtobigwig/test.yml index 726a07ca..c00a0231 100644 --- a/tests/modules/ucsc/bedgraphtobigwig/test.yml +++ b/tests/modules/ucsc/bedgraphtobigwig/test.yml @@ -1,5 +1,5 @@ - name: ucsc bedgraphtobigwig - command: nextflow run ./tests/modules/ucsc/bedgraphtobigwig -entry test_ucsc_bedgraphtobigwig -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bedgraphtobigwig -entry test_ucsc_bedgraphtobigwig -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bedgraphtobigwig/nextflow.config tags: - ucsc/bedgraphtobigwig files: diff --git a/tests/modules/ucsc/bigwigaverageoverbed/main.nf b/tests/modules/ucsc/bigwigaverageoverbed/main.nf index 9bd5a5e2..3b20dc32 100644 --- a/tests/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/tests/modules/ucsc/bigwigaverageoverbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BIGWIGAVERAGEOVERBED } from '../../../../modules/ucsc/bigwigaverageoverbed/main.nf' addParams( options: [:] ) +include { UCSC_BIGWIGAVERAGEOVERBED } from '../../../../modules/ucsc/bigwigaverageoverbed/main.nf' workflow test_ucsc_bigwigaverageoverbed { input = [ diff --git a/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config b/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bigwigaverageoverbed/test.yml b/tests/modules/ucsc/bigwigaverageoverbed/test.yml index 641e9be5..7344c944 100644 --- a/tests/modules/ucsc/bigwigaverageoverbed/test.yml +++ b/tests/modules/ucsc/bigwigaverageoverbed/test.yml @@ -1,5 +1,5 @@ - name: ucsc bigwigaverageoverbed test_ucsc_bigwigaverageoverbed - command: nextflow run tests/modules/ucsc/bigwigaverageoverbed -entry test_ucsc_bigwigaverageoverbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bigwigaverageoverbed -entry test_ucsc_bigwigaverageoverbed -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bigwigaverageoverbed/nextflow.config tags: - ucsc - ucsc/bigwigaverageoverbed diff --git a/tests/modules/ucsc/liftover/main.nf b/tests/modules/ucsc/liftover/main.nf index 9670759a..168193f4 100644 --- a/tests/modules/ucsc/liftover/main.nf +++ b/tests/modules/ucsc/liftover/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_LIFTOVER } from '../../../../modules/ucsc/liftover/main.nf' addParams( options: [:] ) +include { UCSC_LIFTOVER } from '../../../../modules/ucsc/liftover/main.nf' workflow test_ucsc_liftover { diff --git a/tests/modules/ucsc/liftover/nextflow.config b/tests/modules/ucsc/liftover/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/liftover/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/liftover/test.yml b/tests/modules/ucsc/liftover/test.yml index 74df6512..c3016189 100644 --- a/tests/modules/ucsc/liftover/test.yml +++ b/tests/modules/ucsc/liftover/test.yml @@ -1,5 +1,5 @@ - name: ucsc liftover test_ucsc_liftover - command: nextflow run tests/modules/ucsc/liftover -entry test_ucsc_liftover -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/liftover -entry test_ucsc_liftover -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/liftover/nextflow.config tags: - ucsc - ucsc/liftover diff --git a/tests/modules/ucsc/wigtobigwig/main.nf b/tests/modules/ucsc/wigtobigwig/main.nf index 81296ac4..614d4150 100644 --- a/tests/modules/ucsc/wigtobigwig/main.nf +++ b/tests/modules/ucsc/wigtobigwig/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_WIGTOBIGWIG } from '../../../../modules/ucsc/wigtobigwig/main.nf' addParams( options: [:] ) +include { UCSC_WIGTOBIGWIG } from '../../../../modules/ucsc/wigtobigwig/main.nf' workflow test_ucsc_wigtobigwig { diff --git a/tests/modules/ucsc/wigtobigwig/nextflow.config b/tests/modules/ucsc/wigtobigwig/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/wigtobigwig/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/wigtobigwig/test.yml b/tests/modules/ucsc/wigtobigwig/test.yml index 15388adb..08d4bce4 100644 --- a/tests/modules/ucsc/wigtobigwig/test.yml +++ b/tests/modules/ucsc/wigtobigwig/test.yml @@ -1,5 +1,5 @@ - name: ucsc wigtobigwig test_ucsc_wigtobigwig - command: nextflow run tests/modules/ucsc/wigtobigwig -entry test_ucsc_wigtobigwig -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/wigtobigwig -entry test_ucsc_wigtobigwig -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/wigtobigwig/nextflow.config tags: - ucsc - ucsc/wigtobigwig diff --git a/tests/modules/ultra/pipeline/main.nf b/tests/modules/ultra/pipeline/main.nf index 1404712b..483d48fc 100644 --- a/tests/modules/ultra/pipeline/main.nf +++ b/tests/modules/ultra/pipeline/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' addParams( options: [:] ) -include { GUNZIP } from '../../../../modules/gunzip/main.nf' addParams( options: [:] ) -include { GFFREAD } from '../../../../modules/gffread/main.nf' addParams( options: [args: "--sort-alpha --keep-genes -T", suffix: "_sorted"] ) +include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' +include { GUNZIP } from '../../../../modules/gunzip/main.nf' +include { GFFREAD } from '../../../../modules/gffread/main.nf' workflow test_ultra_pipeline { diff --git a/tests/modules/ultra/pipeline/nextflow.config b/tests/modules/ultra/pipeline/nextflow.config new file mode 100644 index 00000000..a3b88ea3 --- /dev/null +++ b/tests/modules/ultra/pipeline/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GFFREAD { + ext.args = '--sort-alpha --keep-genes -T' + ext.suffix = '_sorted' + } + +} diff --git a/tests/modules/ultra/pipeline/test.yml b/tests/modules/ultra/pipeline/test.yml index 7140193b..d424ba73 100644 --- a/tests/modules/ultra/pipeline/test.yml +++ b/tests/modules/ultra/pipeline/test.yml @@ -1,5 +1,5 @@ - name: ultra pipeline test_ultra_pipeline - command: nextflow run tests/modules/ultra/pipeline -entry test_ultra_pipeline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ultra/pipeline -entry test_ultra_pipeline -c ./tests/config/nextflow.config -c ./tests/modules/ultra/pipeline/nextflow.config tags: - ultra - ultra/pipeline diff --git a/tests/modules/unicycler/main.nf b/tests/modules/unicycler/main.nf index 5352fc8b..861b139b 100644 --- a/tests/modules/unicycler/main.nf +++ b/tests/modules/unicycler/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNICYCLER } from '../../../modules/unicycler/main.nf' addParams( options: [:] ) +include { UNICYCLER } from '../../../modules/unicycler/main.nf' workflow test_unicycler_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/unicycler/nextflow.config b/tests/modules/unicycler/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/unicycler/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/unicycler/test.yml b/tests/modules/unicycler/test.yml index 124ac3e2..e25845aa 100644 --- a/tests/modules/unicycler/test.yml +++ b/tests/modules/unicycler/test.yml @@ -1,5 +1,5 @@ - name: unicycler test_unicycler_single_end - command: nextflow run tests/modules/unicycler -entry test_unicycler_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_single_end -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: @@ -10,7 +10,7 @@ - "Assembly complete" - name: unicycler test_unicycler_paired_end - command: nextflow run tests/modules/unicycler -entry test_unicycler_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: @@ -21,7 +21,7 @@ - "Assembly complete" - name: unicycler test_unicycler_shortreads_longreads - command: nextflow run tests/modules/unicycler -entry test_unicycler_shortreads_longreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_shortreads_longreads -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: diff --git a/tests/modules/untar/main.nf b/tests/modules/untar/main.nf index b7317bd9..056e3ea7 100644 --- a/tests/modules/untar/main.nf +++ b/tests/modules/untar/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../modules/untar/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../modules/untar/main.nf' workflow test_untar { input = file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) diff --git a/tests/modules/untar/nextflow.config b/tests/modules/untar/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/untar/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/untar/test.yml b/tests/modules/untar/test.yml index 9f48e86c..6d0d1d12 100644 --- a/tests/modules/untar/test.yml +++ b/tests/modules/untar/test.yml @@ -1,5 +1,5 @@ - name: untar - command: nextflow run ./tests/modules/untar -entry test_untar -c tests/config/nextflow.config + command: nextflow run ./tests/modules/untar -entry test_untar -c ./tests/config/nextflow.config -c ./tests/modules/untar/nextflow.config tags: - untar files: diff --git a/tests/modules/unzip/main.nf b/tests/modules/unzip/main.nf index b5b208be..520fe31e 100644 --- a/tests/modules/unzip/main.nf +++ b/tests/modules/unzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../modules/unzip/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../modules/unzip/main.nf' workflow test_unzip { diff --git a/tests/modules/unzip/nextflow.config b/tests/modules/unzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/unzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/unzip/test.yml b/tests/modules/unzip/test.yml index 1b0b1a97..8016b4fa 100644 --- a/tests/modules/unzip/test.yml +++ b/tests/modules/unzip/test.yml @@ -1,5 +1,5 @@ - name: unzip - command: nextflow run ./tests/modules/unzip -entry test_unzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unzip -entry test_unzip -c ./tests/config/nextflow.config -c ./tests/modules/unzip/nextflow.config tags: - unzip files: diff --git a/tests/modules/variantbam/main.nf b/tests/modules/variantbam/main.nf index 3ea09197..016a9104 100644 --- a/tests/modules/variantbam/main.nf +++ b/tests/modules/variantbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { VARIANTBAM } from '../../../modules/variantbam/main.nf' addParams( options: [args: '-m 1'] ) +include { VARIANTBAM } from '../../../modules/variantbam/main.nf' workflow test_variantbam { diff --git a/tests/modules/variantbam/nextflow.config b/tests/modules/variantbam/nextflow.config new file mode 100644 index 00000000..d0314010 --- /dev/null +++ b/tests/modules/variantbam/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: VARIANTBAM { + ext.args = '-m 1' + } + +} diff --git a/tests/modules/variantbam/test.yml b/tests/modules/variantbam/test.yml index 51b824cd..1c9550ed 100644 --- a/tests/modules/variantbam/test.yml +++ b/tests/modules/variantbam/test.yml @@ -1,5 +1,5 @@ - name: variantbam test_variantbam - command: nextflow run tests/modules/variantbam -entry test_variantbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/variantbam -entry test_variantbam -c ./tests/config/nextflow.config -c ./tests/modules/variantbam/nextflow.config tags: - variantbam files: diff --git a/tests/modules/vcftools/main.nf b/tests/modules/vcftools/main.nf index 2d4997de..21f9aa88 100644 --- a/tests/modules/vcftools/main.nf +++ b/tests/modules/vcftools/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { VCFTOOLS as VCFTOOLS_BASE } from '../../../modules/vcftools/main.nf' addParams( options: ['args': '--freq'] ) -include { VCFTOOLS as VCFTOOLS_OPTIONAL } from '../../../modules/vcftools/main.nf' addParams( options: ['args': '--freq --exclude-bed'] ) +include { VCFTOOLS as VCFTOOLS_BASE } from '../../../modules/vcftools/main.nf' +include { VCFTOOLS as VCFTOOLS_OPTIONAL } from '../../../modules/vcftools/main.nf' workflow test_vcftools_vcf_base { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/vcftools/nextflow.config b/tests/modules/vcftools/nextflow.config new file mode 100644 index 00000000..6865bbea --- /dev/null +++ b/tests/modules/vcftools/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: VCFTOOLS_BASE { + ext.args = '--freq' + } + + withName: VCFTOOLS_OPTIONAL { + ext.args = '--freq --exclude-bed' + } + +} diff --git a/tests/modules/vcftools/test.yml b/tests/modules/vcftools/test.yml index 81529be2..5314ea75 100644 --- a/tests/modules/vcftools/test.yml +++ b/tests/modules/vcftools/test.yml @@ -1,5 +1,5 @@ - name: vcftools test_vcftools_vcf_base - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcf_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcf_base -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -7,7 +7,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcfgz_base - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcfgz_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcfgz_base -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -15,7 +15,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcf_optional - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcf_optional -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcf_optional -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -23,7 +23,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcfgz_optional - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcfgz_optional -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcfgz_optional -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: diff --git a/tests/modules/yara/index/main.nf b/tests/modules/yara/index/main.nf index 35a86182..89eb0f7d 100644 --- a/tests/modules/yara/index/main.nf +++ b/tests/modules/yara/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams( options: [publish_dir:'yara'] ) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' workflow test_yara_index { diff --git a/tests/modules/yara/index/nextflow.config b/tests/modules/yara/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/yara/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/yara/index/test.yml b/tests/modules/yara/index/test.yml index de6f1cf6..a8d17866 100644 --- a/tests/modules/yara/index/test.yml +++ b/tests/modules/yara/index/test.yml @@ -1,5 +1,5 @@ - name: yara index test_yara_index - command: nextflow run tests/modules/yara/index -entry test_yara_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/index -entry test_yara_index -c ./tests/config/nextflow.config -c ./tests/modules/yara/index/nextflow.config tags: - yara/index - yara diff --git a/tests/modules/yara/mapper/main.nf b/tests/modules/yara/mapper/main.nf index 9cdce40d..18800eb3 100644 --- a/tests/modules/yara/mapper/main.nf +++ b/tests/modules/yara/mapper/main.nf @@ -3,15 +3,18 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams(options: ['args': '-e 3']) -include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' addParams(options: ['args': '-e 3']) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' +include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' workflow test_yara_single_end { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - YARA_INDEX ( fasta ) YARA_MAPPER ( input, YARA_INDEX.out.index ) @@ -19,12 +22,15 @@ workflow test_yara_single_end { workflow test_yara_paired_end { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - YARA_INDEX ( fasta ) YARA_MAPPER ( input, YARA_INDEX.out.index ) } diff --git a/tests/modules/yara/mapper/nextflow.config b/tests/modules/yara/mapper/nextflow.config new file mode 100644 index 00000000..a626a8fc --- /dev/null +++ b/tests/modules/yara/mapper/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: YARA_INDEX { + ext.args = '-e 3' + } + + withName: YARA_MAPPER { + ext.args = '-e 3' + } + +} diff --git a/tests/modules/yara/mapper/test.yml b/tests/modules/yara/mapper/test.yml index 51b056df..186f70b4 100644 --- a/tests/modules/yara/mapper/test.yml +++ b/tests/modules/yara/mapper/test.yml @@ -1,68 +1,68 @@ - name: yara mapper test_yara_single_end - command: nextflow run tests/modules/yara/mapper -entry test_yara_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/mapper -entry test_yara_single_end -c ./tests/config/nextflow.config -c ./tests/modules/yara/mapper/nextflow.config tags: - yara/mapper - yara files: - path: output/yara/test.mapped.bam - - path: output/index/yara/yara.txt.size + - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - - path: output/index/yara/yara.lf.drs + - path: output/yara/yara/yara.lf.drs md5sum: 55a54008ad1ba589aa210d2629c1df41 - - path: output/index/yara/yara.lf.pst + - path: output/yara/yara/yara.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/yara/yara.sa.len + - path: output/yara/yara/yara.sa.len md5sum: 45677f66c28c79c02250ceb8b58645e8 - - path: output/index/yara/yara.rid.concat + - path: output/yara/yara/yara.rid.concat md5sum: 1e4e4c88ddeaf907a12f02f0d88367c5 - - path: output/index/yara/yara.txt.concat + - path: output/yara/yara/yara.txt.concat md5sum: 6074d1933c9e7e5ab05fa0def5ce28c0 - - path: output/index/yara/yara.sa.val + - path: output/yara/yara/yara.sa.val md5sum: ce57cc82e2d3ae7b9824210f54168ce9 - - path: output/index/yara/yara.sa.ind + - path: output/yara/yara/yara.sa.ind md5sum: 464314583efb5f07260b0efecc29a1ce - - path: output/index/yara/yara.rid.limits + - path: output/yara/yara/yara.rid.limits md5sum: 8b814661f30a0c9e350bfbcb454930ce - - path: output/index/yara/yara.lf.drp + - path: output/yara/yara/yara.lf.drp md5sum: 3ef99a87a4e44513f46d42f4261f7842 - - path: output/index/yara/yara.txt.limits + - path: output/yara/yara/yara.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 - - path: output/index/yara/yara.lf.drv + - path: output/yara/yara/yara.lf.drv md5sum: cf6408307fe9fd7f99c33f521bf95550 - - path: output/index/yara/yara.fasta + - path: output/yara/yara/yara.fasta md5sum: 6e9fe4042a72f2345f644f239272b7e6 - name: yara mapper test_yara_paired_end - command: nextflow run tests/modules/yara/mapper -entry test_yara_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/mapper -entry test_yara_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/yara/mapper/nextflow.config tags: - yara/mapper - yara files: - path: output/yara/test_2.mapped.bam - path: output/yara/test_1.mapped.bam - - path: output/index/yara/yara.txt.size + - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - - path: output/index/yara/yara.lf.drs + - path: output/yara/yara/yara.lf.drs md5sum: 55a54008ad1ba589aa210d2629c1df41 - - path: output/index/yara/yara.lf.pst + - path: output/yara/yara/yara.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/yara/yara.sa.len + - path: output/yara/yara/yara.sa.len md5sum: 45677f66c28c79c02250ceb8b58645e8 - - path: output/index/yara/yara.rid.concat + - path: output/yara/yara/yara.rid.concat md5sum: 1e4e4c88ddeaf907a12f02f0d88367c5 - - path: output/index/yara/yara.txt.concat + - path: output/yara/yara/yara.txt.concat md5sum: 6074d1933c9e7e5ab05fa0def5ce28c0 - - path: output/index/yara/yara.sa.val + - path: output/yara/yara/yara.sa.val md5sum: ce57cc82e2d3ae7b9824210f54168ce9 - - path: output/index/yara/yara.sa.ind + - path: output/yara/yara/yara.sa.ind md5sum: 464314583efb5f07260b0efecc29a1ce - - path: output/index/yara/yara.rid.limits + - path: output/yara/yara/yara.rid.limits md5sum: 8b814661f30a0c9e350bfbcb454930ce - - path: output/index/yara/yara.lf.drp + - path: output/yara/yara/yara.lf.drp md5sum: 3ef99a87a4e44513f46d42f4261f7842 - - path: output/index/yara/yara.txt.limits + - path: output/yara/yara/yara.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 - - path: output/index/yara/yara.lf.drv + - path: output/yara/yara/yara.lf.drv md5sum: cf6408307fe9fd7f99c33f521bf95550 - - path: output/index/yara/yara.fasta + - path: output/yara/yara/yara.fasta md5sum: 6e9fe4042a72f2345f644f239272b7e6 diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml index 07c0b1b4..beea38c2 100644 --- a/tests/subworkflows/nf-core/align_bowtie2/test.yml +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -1,9 +1,10 @@ - name: align bowtie2 single-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_single_end -c tests/config/nextflow.config tags: - - subworkflows/align_bowtie2 - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/align_bowtie2 + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules # - bowtie2 # - bowtie2/align @@ -42,9 +43,10 @@ - name: align bowtie2 paired-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_paired_end -c tests/config/nextflow.config tags: - - subworkflows/align_bowtie2 - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/align_bowtie2 + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules # - bowtie2 # - bowtie2/align diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml index b84735e5..7dc73c80 100644 --- a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -1,15 +1,16 @@ - name: bam sort samtools single-end command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_single_end -c tests/config/nextflow.config tags: - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam md5sum: 8b56bb7d26ced04112f712250d915aaa @@ -25,8 +26,9 @@ - name: bam sort samtools paired-end command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_paired_end -c tests/config/nextflow.config tags: - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules # - samtools # - samtools/index diff --git a/tests/subworkflows/nf-core/bam_stats_samtools/test.yml b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml index d93c95a5..2b2e45d1 100644 --- a/tests/subworkflows/nf-core/bam_stats_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml @@ -1,12 +1,13 @@ - name: bam stats samtools single-end command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_single_end -c tests/config/nextflow.config tags: - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.single_end.sorted.bam.flagstat md5sum: 2191911d72575a2358b08b1df64ccb53 @@ -17,12 +18,13 @@ - name: bam stats samtools paired-end command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_paired_end -c tests/config/nextflow.config tags: - - subworkflows/bam_stats_samtools - # Modules - - samtools - - samtools/stats - - samtools/idxstats - - samtools/flagstat + - subworkflows + # - subworkflows/bam_stats_samtools + # # Modules + # - samtools + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.paired_end.sorted.bam.flagstat md5sum: 4f7ffd1e6a5e85524d443209ac97d783 diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml index e6d80409..63cf64f8 100644 --- a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -1,8 +1,9 @@ - name: gatk_create_som_pon command: nextflow run ./tests/subworkflows/nf-core/gatk_create_som_pon -entry test_gatk_create_som_pon -c tests/config/nextflow.config tags: - - subworkflows/gatk_create_som_pon - - gatk4 + - subworkflows + # - subworkflows/gatk_create_som_pon + # - gatk4 # Modules # - gatk4/genomicsdbimport # - gatk4/createsomaticpanelofnormals diff --git a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml index 3c6753fb..4b335065 100644 --- a/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml +++ b/tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/test.yml @@ -1,7 +1,8 @@ - name: gatk_tumor_normal_somatic_variant_calling command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling -entry test_gatk_tumor_normal_somatic_variant_calling -c tests/config/nextflow.config tags: - - subworkflows/gatk_tumor_normal_somatic_variant_calling + - subworkflows + # - subworkflows/gatk_tumor_normal_somatic_variant_calling # Modules # - gatk4/mutect2 # - gatk4/learnreadorientationmodel diff --git a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml index 797ae936..9d2d5c10 100644 --- a/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml +++ b/tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/test.yml @@ -1,7 +1,8 @@ - name: gatk_tumor_only_somatic_variant_calling command: nextflow run ./tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling -entry test_gatk_tumor_only_somatic_variant_calling -c tests/config/nextflow.config tags: - - subworkflows/gatk_tumor_only_somatic_variant_calling + - subworkflows + # - subworkflows/gatk_tumor_only_somatic_variant_calling # Modules # - gatk4/mutect2 # - gatk4/getpileupsummaries diff --git a/tests/subworkflows/nf-core/sra_fastq/test.yml b/tests/subworkflows/nf-core/sra_fastq/test.yml index 6f953ccf..4b75431f 100644 --- a/tests/subworkflows/nf-core/sra_fastq/test.yml +++ b/tests/subworkflows/nf-core/sra_fastq/test.yml @@ -1,11 +1,12 @@ - name: sra fastq single-end command: nextflow run ./tests/subworkflows/nf-core/sra_fastq -entry test_sra_fastq_single_end -c tests/config/nextflow.config tags: - - subworkflows/sra_fastq + - subworkflows + # - subworkflows/sra_fastq # Modules - - sratools - - sratools/prefetch - - sratools/fasterqdump + # - sratools + # - sratools/prefetch + # - sratools/fasterqdump files: - path: output/sratools/SRR13255544.fastq.gz md5sum: 1054c7b71884acdb5eed8a378f18be82 @@ -13,11 +14,12 @@ - name: sra fastq paired-end command: nextflow run ./tests/subworkflows/nf-core/sra_fastq -entry test_sra_fastq_paired_end -c tests/config/nextflow.config tags: - - subworkflows/sra_fastq + - subworkflows + # - subworkflows/sra_fastq # Modules - - sratools - - sratools/prefetch - - sratools/fasterqdump + # - sratools + # - sratools/prefetch + # - sratools/fasterqdump files: - path: output/sratools/SRR11140744_1.fastq.gz md5sum: 193809c784a4ea132ab2a253fa4f55b6 From 1f8f86b79361fb9999b56680105f439df81d718d Mon Sep 17 00:00:00 2001 From: louperelo <44900284+louperelo@users.noreply.github.com> Date: Mon, 29 Nov 2021 14:32:23 +0100 Subject: [PATCH 076/101] takes gunzip out of fargene main.nf (#1090) * takes gunzip out of fargene main.nf * update definition of input * options.args -> args Co-authored-by: James A. Fellows Yates --- modules/fargene/main.nf | 15 ++++++--------- modules/fargene/meta.yml | 2 +- tests/modules/fargene/main.nf | 4 +++- tests/modules/fargene/test.yml | 8 +++++--- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf index 5bf1c604..ac3f8338 100644 --- a/modules/fargene/main.nf +++ b/modules/fargene/main.nf @@ -35,15 +35,12 @@ process FARGENE { def args = task.ext.args ?: '' prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" """ - gzip \\ - -cdf $input \\ - > unziped.fa | - fargene \\ - $args \\ - -p $task.cpus \\ - -i unziped.fa \\ - --hmm-model $hmm_model \\ - -o $prefix + fargene \\ + $args \\ + -p $task.cpus \\ + -i $input \\ + --hmm-model $hmm_model \\ + -o $prefix cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/fargene/meta.yml b/modules/fargene/meta.yml index 98ec12bb..35e98008 100644 --- a/modules/fargene/meta.yml +++ b/modules/fargene/meta.yml @@ -23,7 +23,7 @@ input: e.g. [ id:'test', single_end:false ] - input: type: file - description: fasta or paired-end fastq file containing either genomes or longer contigs as nucleotide or protein sequences (fasta) or fragmented metagenomic reads (fastq) + description: uncompressed fasta file or paired-end fastq files containing either genomes or longer contigs as nucleotide or protein sequences (fasta) or fragmented metagenomic reads (fastq) pattern: "*.{fasta}" - hmm_model: type: string diff --git a/tests/modules/fargene/main.nf b/tests/modules/fargene/main.nf index 6600015b..471862e1 100644 --- a/tests/modules/fargene/main.nf +++ b/tests/modules/fargene/main.nf @@ -2,6 +2,7 @@ nextflow.enable.dsl = 2 +include { GUNZIP } from '../../../modules/gunzip/main.nf' include { FARGENE } from '../../../modules/fargene/main.nf' workflow test_fargene { @@ -10,5 +11,6 @@ workflow test_fargene { file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) ] hmm_model = 'class_a' - FARGENE ( input, hmm_model ) + GUNZIP ( input ) + FARGENE ( GUNZIP.out.gunzip, hmm_model ) } diff --git a/tests/modules/fargene/test.yml b/tests/modules/fargene/test.yml index 622e44b0..d97e2257 100644 --- a/tests/modules/fargene/test.yml +++ b/tests/modules/fargene/test.yml @@ -4,9 +4,11 @@ - fargene files: - path: output/fargene/fargene_analysis.log - - path: output/fargene/test/hmmsearchresults/unziped-class_A-hmmsearched.out + - path: output/fargene/test/hmmsearchresults/test1.contigs-class_A-hmmsearched.out - path: output/fargene/test/results_summary.txt md5sum: 690d351cfc52577263ef4cfab1c81f50 - - path: output/fargene/test/tmpdir/tmp.out - - path: output/fargene/test/tmpdir/unziped-positives.out + - path: output/fargene/test/tmpdir/test1.contigs-positives.out md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/fargene/test/tmpdir/tmp.out + - path: output/gunzip/test1.contigs.fa + md5sum: 80c4d78f2810f6d9e90fa6da9bb9c4f9 From 9d0cad583b9a71a6509b754fdf589cbfbed08961 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Thu, 2 Dec 2021 12:39:55 +0000 Subject: [PATCH 077/101] Change syntax from task.ext.suffix to tast.ext.prefix in all modules (#1110) * Add comment line for consistency * Remove all functions.nf * Remove include functions.nf and publishDir options * Replace options.args3 with task.ext.args3 - 3 modules * Replace options.args3 with task.ext.args3 - 17 modules * Replace {task.cpus} with task.cpus * Replace off on off off off off off off off on off on off on off off off on off off off on on off off off on on off off off off off off off on off off off off on off on on off off off on on on on off off off on off on on off on on off off on on on off on on off on off off off off on off off off on off off on off on off off off on on off on off on off off on off off off on off off off on off off off off on off off off on on on off on on off off on off on on on off on on off on on on off off off off off on on off off on off off off off off on off off on on off on on off on off off off on off off off off on on off on off off on off off on off on off off off off off off off off on on off on off off off.args with * Add def args = task.ext.args line to all modules in script section * Replace options.args with args and args_list * Initialise args2 and args3 properly * Replace container syntax * Revert container changes for cellranger/mkref * Replace getProcessName in all modules * Replace getSoftwareName in all modules * Unify modules using VERSION variable * Replae options.suffix with task.ext.suffix * Remove NF version restriction for CI * Bump NF version in README * Replace task.process.tokenize logic with task.process * Minor tweaks to unify syntax in tests main.nf * Add a separate nextflow.config for each module * Transfer remaining module options to nextflow.config * Remove addParams from tests main.nf * Remove TODO statements * Use -c to import module specific config * Bump NF version to 21.10.3 * Fix tests for artic/minion * Fix broken publishDir syntax * Standardise and fix obvious failing module tests * Remove kronatools to krona * Comment out tags in subworkflow test.yml * Fix failing module tests * Add consistent indentation to nextflow.config * Comment out subworklow definitions * Fix kallistobustools/ref * Fix rmarkdownnotebook * Fix jupyternotebook * Quote task.process * Add plink2/vcf to pytest_modules.yml * Remove NF_CORE_MODULES_TEST from pytest CI * Fix more tests * Move bacteroides_fragilis to prokaryotes folder * Fix cooler merge tests * Fix kallistobustools/count tests * Fix kallistobustools/ref tests * Update test_10x_1_fastq_gz file for kallistobustools/count tests * Fix bcftools/query tests * Fix delly/call tests * Fix cooler/zoomify tests * Fix csvtk/split tests * Fix gatk4/intervallisttools tests * Fix gatk4/variantfiltration * Fix pydamage/filter tests * Fix test data for unicycler * Fix gstama/collapse module * Fix leehom tests * Fix metaphlan3 tests * Fix pairtools/select tests * Update nextflow.config * Update nextflow.config * feat: update syntax * Fix arriba tests * Fix more failing tests * Update test syntax * Remove comments from tests nextflow.config * Apply suggestions from code review * Fix kallistobustools/count module * Update dumpsoftwareversions module * Update custom/dumpsoftwareversions * Add args2 to untar module * Update leftover modules * Remove last remaining addParams * Change syntax from task.ext.suffix to tast.ext.prefix * Change nextflow.config in all tests to use ext.prefix instead of ext.suffix Co-authored-by: JoseEspinosa Co-authored-by: Gregor Sturm Co-authored-by: MaxUlysse --- modules/abacas/main.nf | 2 +- modules/adapterremoval/main.nf | 2 +- modules/agrvate/main.nf | 2 +- modules/allelecounter/main.nf | 2 +- modules/arriba/main.nf | 2 +- modules/artic/guppyplex/main.nf | 2 +- modules/assemblyscan/main.nf | 2 +- modules/ataqv/ataqv/main.nf | 2 +- modules/bamaligncleaner/main.nf | 2 +- modules/bamtools/split/main.nf | 2 +- modules/bamutil/trimbam/main.nf | 2 +- modules/bandage/image/main.nf | 2 +- modules/bbmap/align/main.nf | 2 +- modules/bbmap/bbduk/main.nf | 2 +- modules/bbmap/bbsplit/main.nf | 2 +- modules/bcftools/consensus/main.nf | 2 +- modules/bcftools/filter/main.nf | 2 +- modules/bcftools/index/main.nf | 2 +- modules/bcftools/mpileup/main.nf | 2 +- modules/bcftools/norm/main.nf | 2 +- modules/bcftools/query/main.nf | 2 +- modules/bcftools/reheader/main.nf | 2 +- modules/bcftools/stats/main.nf | 2 +- modules/bcftools/view/main.nf | 2 +- modules/bedtools/bamtobed/main.nf | 2 +- modules/bedtools/complement/main.nf | 2 +- modules/bedtools/genomecov/main.nf | 2 +- modules/bedtools/intersect/main.nf | 2 +- modules/bedtools/makewindows/main.nf | 2 +- modules/bedtools/maskfasta/main.nf | 2 +- modules/bedtools/merge/main.nf | 2 +- modules/bedtools/slop/main.nf | 2 +- modules/bedtools/sort/main.nf | 2 +- modules/bedtools/subtract/main.nf | 2 +- modules/bismark/align/main.nf | 2 +- modules/bismark/deduplicate/main.nf | 2 +- modules/blast/blastn/main.nf | 2 +- modules/bowtie/align/main.nf | 2 +- modules/bowtie2/align/main.nf | 2 +- modules/bwa/aln/main.nf | 2 +- modules/bwa/mem/main.nf | 2 +- modules/bwa/sampe/main.nf | 2 +- modules/bwa/samse/main.nf | 2 +- modules/bwamem2/mem/main.nf | 2 +- modules/bwameth/align/main.nf | 2 +- modules/cat/fastq/main.nf | 2 +- modules/chromap/chromap/main.nf | 2 +- modules/clonalframeml/main.nf | 2 +- modules/cmseq/polymut/main.nf | 2 +- modules/cooler/cload/main.nf | 2 +- modules/cooler/dump/main.nf | 2 +- modules/cooler/merge/main.nf | 2 +- modules/cooler/zoomify/main.nf | 2 +- modules/csvtk/split/main.nf | 2 +- modules/cutadapt/main.nf | 2 +- modules/dastool/dastool/main.nf | 2 +- modules/dastool/scaffolds2bin/main.nf | 2 +- modules/deeptools/computematrix/main.nf | 2 +- modules/deeptools/plotfingerprint/main.nf | 2 +- modules/deeptools/plotheatmap/main.nf | 2 +- modules/deeptools/plotprofile/main.nf | 2 +- modules/delly/call/main.nf | 2 +- modules/diamond/blastp/main.nf | 2 +- modules/diamond/blastx/main.nf | 2 +- modules/dshbio/exportsegments/main.nf | 2 +- modules/dshbio/filterbed/main.nf | 2 +- modules/dshbio/filtergff3/main.nf | 2 +- modules/dshbio/splitbed/main.nf | 2 +- modules/dshbio/splitgff3/main.nf | 2 +- modules/ectyper/main.nf | 2 +- modules/emmtyper/main.nf | 2 +- modules/ensemblvep/main.nf | 2 +- modules/expansionhunter/main.nf | 2 +- modules/fastani/main.nf | 2 +- modules/fastp/main.nf | 2 +- modules/fastqc/main.nf | 2 +- modules/fastqscan/main.nf | 2 +- modules/fgbio/callmolecularconsensusreads/main.nf | 2 +- modules/fgbio/fastqtobam/main.nf | 2 +- modules/fgbio/groupreadsbyumi/main.nf | 2 +- modules/fgbio/sortbam/main.nf | 2 +- modules/filtlong/main.nf | 2 +- modules/flash/main.nf | 2 +- modules/freebayes/main.nf | 2 +- modules/gatk4/applybqsr/main.nf | 2 +- modules/gatk4/baserecalibrator/main.nf | 2 +- modules/gatk4/bedtointervallist/main.nf | 2 +- modules/gatk4/calculatecontamination/main.nf | 2 +- modules/gatk4/createsomaticpanelofnormals/main.nf | 2 +- modules/gatk4/estimatelibrarycomplexity/main.nf | 2 +- modules/gatk4/fastqtosam/main.nf | 2 +- modules/gatk4/filtermutectcalls/main.nf | 2 +- modules/gatk4/genotypegvcfs/main.nf | 2 +- modules/gatk4/getpileupsummaries/main.nf | 2 +- modules/gatk4/haplotypecaller/main.nf | 2 +- modules/gatk4/intervallisttools/main.nf | 2 +- modules/gatk4/learnreadorientationmodel/main.nf | 2 +- modules/gatk4/markduplicates/main.nf | 2 +- modules/gatk4/mergebamalignment/main.nf | 2 +- modules/gatk4/mergevcfs/main.nf | 2 +- modules/gatk4/mutect2/main.nf | 2 +- modules/gatk4/revertsam/main.nf | 2 +- modules/gatk4/samtofastq/main.nf | 2 +- modules/gatk4/splitncigarreads/main.nf | 2 +- modules/gatk4/variantfiltration/main.nf | 2 +- modules/genrich/main.nf | 2 +- modules/glnexus/main.nf | 2 +- modules/graphmap2/align/main.nf | 2 +- modules/gstama/collapse/main.nf | 2 +- modules/gstama/merge/main.nf | 2 +- modules/gunc/run/main.nf | 2 +- modules/hicap/main.nf | 2 +- modules/hifiasm/main.nf | 2 +- modules/hisat2/align/main.nf | 2 +- modules/hmmcopy/readcounter/main.nf | 2 +- modules/hmmer/hmmalign/main.nf | 2 +- modules/homer/annotatepeaks/main.nf | 2 +- modules/homer/findpeaks/main.nf | 2 +- modules/homer/maketagdirectory/main.nf | 2 +- modules/homer/makeucscfile/main.nf | 2 +- modules/imputeme/vcftoprs/main.nf | 2 +- modules/ismapper/main.nf | 2 +- modules/isoseq3/cluster/main.nf | 2 +- modules/isoseq3/refine/main.nf | 2 +- modules/ivar/consensus/main.nf | 2 +- modules/ivar/trim/main.nf | 2 +- modules/ivar/variants/main.nf | 2 +- modules/jupyternotebook/main.nf | 2 +- modules/kallistobustools/count/main.nf | 2 +- modules/kleborate/main.nf | 2 +- modules/kraken2/kraken2/main.nf | 2 +- modules/last/dotplot/main.nf | 2 +- modules/last/lastal/main.nf | 2 +- modules/last/lastdb/main.nf | 2 +- modules/last/mafconvert/main.nf | 2 +- modules/last/mafswap/main.nf | 2 +- modules/last/postmask/main.nf | 2 +- modules/last/split/main.nf | 2 +- modules/last/train/main.nf | 2 +- modules/lima/main.nf | 2 +- modules/lissero/main.nf | 2 +- modules/lofreq/call/main.nf | 2 +- modules/lofreq/callparallel/main.nf | 2 +- modules/lofreq/filter/main.nf | 2 +- modules/lofreq/indelqual/main.nf | 2 +- modules/macs2/callpeak/main.nf | 2 +- modules/manta/germline/main.nf | 2 +- modules/manta/somatic/main.nf | 2 +- modules/manta/tumoronly/main.nf | 2 +- modules/mapdamage2/main.nf | 2 +- modules/mash/sketch/main.nf | 2 +- modules/mashtree/main.nf | 2 +- modules/maxbin2/main.nf | 2 +- modules/medaka/main.nf | 2 +- modules/megahit/main.nf | 2 +- modules/meningotype/main.nf | 2 +- modules/metabat2/jgisummarizebamcontigdepths/main.nf | 2 +- modules/metabat2/metabat2/main.nf | 2 +- modules/metaphlan3/main.nf | 2 +- modules/methyldackel/mbias/main.nf | 2 +- modules/minia/main.nf | 2 +- modules/miniasm/main.nf | 2 +- modules/minimap2/align/main.nf | 2 +- modules/mlst/main.nf | 2 +- modules/mosdepth/main.nf | 2 +- modules/msisensor/scan/main.nf | 2 +- modules/mtnucratio/main.nf | 2 +- modules/mummer/main.nf | 2 +- modules/muscle/main.nf | 2 +- modules/nanolyse/main.nf | 2 +- modules/ncbigenomedownload/main.nf | 2 +- modules/ngmaster/main.nf | 2 +- modules/nucmer/main.nf | 2 +- modules/pairtools/dedup/main.nf | 2 +- modules/pairtools/flip/main.nf | 2 +- modules/pairtools/parse/main.nf | 2 +- modules/pairtools/restrict/main.nf | 2 +- modules/pairtools/select/main.nf | 2 +- modules/pairtools/sort/main.nf | 2 +- modules/pangolin/main.nf | 2 +- modules/paraclu/main.nf | 2 +- modules/pbbam/pbmerge/main.nf | 2 +- modules/pbccs/main.nf | 2 +- modules/peddy/main.nf | 2 +- modules/phantompeakqualtools/main.nf | 2 +- modules/phyloflash/main.nf | 4 ++-- modules/picard/collecthsmetrics/main.nf | 2 +- modules/picard/collectmultiplemetrics/main.nf | 2 +- modules/picard/collectwgsmetrics/main.nf | 2 +- modules/picard/filtersamreads/main.nf | 2 +- modules/picard/markduplicates/main.nf | 2 +- modules/picard/mergesamfiles/main.nf | 2 +- modules/picard/sortsam/main.nf | 2 +- modules/pirate/main.nf | 2 +- modules/plink/extract/main.nf | 2 +- modules/plink/vcf/main.nf | 2 +- modules/plink2/vcf/main.nf | 2 +- modules/pmdtools/filter/main.nf | 2 +- modules/porechop/main.nf | 2 +- modules/preseq/lcextrap/main.nf | 2 +- modules/pydamage/analyze/main.nf | 2 +- modules/pydamage/filter/main.nf | 2 +- modules/qcat/main.nf | 2 +- modules/racon/main.nf | 2 +- modules/rasusa/main.nf | 2 +- modules/rmarkdownnotebook/main.nf | 2 +- modules/roary/main.nf | 2 +- modules/rseqc/bamstat/main.nf | 2 +- modules/rseqc/inferexperiment/main.nf | 2 +- modules/rseqc/innerdistance/main.nf | 2 +- modules/rseqc/junctionannotation/main.nf | 2 +- modules/rseqc/junctionsaturation/main.nf | 2 +- modules/rseqc/readdistribution/main.nf | 2 +- modules/rseqc/readduplication/main.nf | 2 +- modules/samblaster/main.nf | 2 +- modules/samtools/ampliconclip/main.nf | 2 +- modules/samtools/bam2fq/main.nf | 2 +- modules/samtools/depth/main.nf | 2 +- modules/samtools/fastq/main.nf | 2 +- modules/samtools/fixmate/main.nf | 2 +- modules/samtools/mpileup/main.nf | 2 +- modules/samtools/sort/main.nf | 2 +- modules/samtools/view/main.nf | 2 +- modules/scoary/main.nf | 2 +- modules/seacr/callpeak/main.nf | 2 +- modules/seqsero2/main.nf | 2 +- modules/seqtk/mergepe/main.nf | 2 +- modules/seqtk/sample/main.nf | 2 +- modules/sequenzautils/bam2seqz/main.nf | 2 +- modules/sequenzautils/gcwiggle/main.nf | 2 +- modules/seqwish/induce/main.nf | 2 +- modules/snpdists/main.nf | 2 +- modules/snpeff/main.nf | 2 +- modules/sortmerna/main.nf | 2 +- modules/spades/main.nf | 2 +- modules/spatyper/main.nf | 2 +- modules/staphopiasccmec/main.nf | 2 +- modules/star/align/main.nf | 2 +- modules/strelka/germline/main.nf | 2 +- modules/strelka/somatic/main.nf | 2 +- modules/stringtie/stringtie/main.nf | 2 +- modules/subread/featurecounts/main.nf | 2 +- modules/tabix/bgzip/main.nf | 2 +- modules/tabix/bgziptabix/main.nf | 2 +- modules/tiddit/cov/main.nf | 2 +- modules/tiddit/sv/main.nf | 2 +- modules/trimgalore/main.nf | 2 +- modules/ucsc/bed12tobigbed/main.nf | 2 +- modules/ucsc/bedclip/main.nf | 2 +- modules/ucsc/bedgraphtobigwig/main.nf | 2 +- modules/ucsc/bigwigaverageoverbed/main.nf | 2 +- modules/ucsc/liftover/main.nf | 2 +- modules/ultra/pipeline/main.nf | 2 +- modules/umitools/dedup/main.nf | 2 +- modules/umitools/extract/main.nf | 2 +- modules/unicycler/main.nf | 2 +- modules/variantbam/main.nf | 2 +- modules/vcftools/main.nf | 2 +- modules/yara/mapper/main.nf | 2 +- tests/modules/bbmap/bbduk/nextflow.config | 2 +- tests/modules/bcftools/reheader/nextflow.config | 2 +- tests/modules/bedtools/complement/nextflow.config | 2 +- tests/modules/bedtools/genomecov/nextflow.config | 2 +- tests/modules/bedtools/intersect/nextflow.config | 2 +- tests/modules/bedtools/merge/nextflow.config | 2 +- tests/modules/bedtools/slop/nextflow.config | 2 +- tests/modules/bedtools/sort/nextflow.config | 2 +- tests/modules/diamond/blastp/nextflow.config | 2 +- tests/modules/diamond/blastx/nextflow.config | 2 +- tests/modules/dshbio/filterbed/nextflow.config | 2 +- tests/modules/dshbio/filtergff3/nextflow.config | 2 +- tests/modules/dshbio/splitbed/nextflow.config | 2 +- tests/modules/dshbio/splitgff3/nextflow.config | 2 +- .../modules/fgbio/callmolecularconsensusreads/nextflow.config | 4 ++-- .../modules/gatk4/createsomaticpanelofnormals/nextflow.config | 2 +- tests/modules/gatk4/filtermutectcalls/nextflow.config | 2 +- tests/modules/gatk4/genotypegvcfs/nextflow.config | 2 +- tests/modules/gatk4/learnreadorientationmodel/nextflow.config | 2 +- tests/modules/gatk4/variantfiltration/nextflow.config | 2 +- tests/modules/gffread/nextflow.config | 2 +- tests/modules/gstama/collapse/nextflow.config | 2 +- tests/modules/gstama/merge/nextflow.config | 2 +- tests/modules/isoseq3/refine/nextflow.config | 2 +- tests/modules/last/postmask/nextflow.config | 2 +- tests/modules/last/split/nextflow.config | 2 +- tests/modules/lima/nextflow.config | 2 +- tests/modules/lofreq/indelqual/nextflow.config | 2 +- tests/modules/medaka/nextflow.config | 2 +- tests/modules/metaphlan3/nextflow.config | 2 +- tests/modules/miniasm/nextflow.config | 2 +- tests/modules/nanolyse/nextflow.config | 2 +- tests/modules/pairtools/dedup/nextflow.config | 2 +- tests/modules/pairtools/parse/nextflow.config | 2 +- tests/modules/pairtools/restrict/nextflow.config | 2 +- tests/modules/pairtools/sort/nextflow.config | 2 +- tests/modules/pbbam/pbmerge/nextflow.config | 2 +- tests/modules/picard/filtersamreads/nextflow.config | 4 ++-- tests/modules/picard/sortsam/nextflow.config | 2 +- tests/modules/plink/extract/nextflow.config | 2 +- tests/modules/porechop/nextflow.config | 2 +- tests/modules/rasusa/nextflow.config | 2 +- tests/modules/samblaster/nextflow.config | 2 +- tests/modules/samtools/merge/nextflow.config | 2 +- tests/modules/samtools/sort/nextflow.config | 2 +- tests/modules/seqtk/mergepe/nextflow.config | 2 +- tests/modules/seqtk/sample/nextflow.config | 2 +- tests/modules/seqtk/subseq/nextflow.config | 2 +- tests/modules/ucsc/bedclip/nextflow.config | 2 +- tests/modules/ultra/pipeline/nextflow.config | 2 +- 309 files changed, 312 insertions(+), 312 deletions(-) diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index 7fe71e3a..49040214 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -17,7 +17,7 @@ process ABACAS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ abacas.pl \\ -r $fasta \\ diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 33955ed2..0cf257ff 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -17,7 +17,7 @@ process ADAPTERREMOVAL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index 06392e16..aff72abc 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -17,7 +17,7 @@ process AGRVATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ agrvate \\ $args \\ diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 5cbc4cbd..850a018f 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -18,7 +18,7 @@ process ALLELECOUNTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference_options = fasta ? "-r $fasta": "" """ diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf index 459ff100..0fcb6ba7 100644 --- a/modules/arriba/main.nf +++ b/modules/arriba/main.nf @@ -19,7 +19,7 @@ process ARRIBA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def blacklist = (args.contains('-b')) ? '' : '-f blacklist' """ arriba \\ diff --git a/modules/artic/guppyplex/main.nf b/modules/artic/guppyplex/main.nf index a69e5381..780f5111 100644 --- a/modules/artic/guppyplex/main.nf +++ b/modules/artic/guppyplex/main.nf @@ -16,7 +16,7 @@ process ARTIC_GUPPYPLEX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ artic \\ guppyplex \\ diff --git a/modules/assemblyscan/main.nf b/modules/assemblyscan/main.nf index 7b5b752b..56541222 100644 --- a/modules/assemblyscan/main.nf +++ b/modules/assemblyscan/main.nf @@ -16,7 +16,7 @@ process ASSEMBLYSCAN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ assembly-scan $assembly > ${prefix}.json diff --git a/modules/ataqv/ataqv/main.nf b/modules/ataqv/ataqv/main.nf index 39602d30..20525e85 100644 --- a/modules/ataqv/ataqv/main.nf +++ b/modules/ataqv/ataqv/main.nf @@ -21,7 +21,7 @@ process ATAQV_ATAQV { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def peak = peak_file ? "--peak-file $peak_file" : '' def tss = tss_file ? "--tss-file $tss_file" : '' def excl_regs = excl_regs_file ? "--excluded-region-file $excl_regs_file" : '' diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf index f1481c39..88fe21aa 100644 --- a/modules/bamaligncleaner/main.nf +++ b/modules/bamaligncleaner/main.nf @@ -16,7 +16,7 @@ process BAMALIGNCLEANER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bamAlignCleaner \\ diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 676aab6f..8d5e5690 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -16,7 +16,7 @@ process BAMTOOLS_SPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bamtools \\ split \\ diff --git a/modules/bamutil/trimbam/main.nf b/modules/bamutil/trimbam/main.nf index a210fe5f..9ceb2b65 100644 --- a/modules/bamutil/trimbam/main.nf +++ b/modules/bamutil/trimbam/main.nf @@ -16,7 +16,7 @@ process BAMUTIL_TRIMBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bam \\ trimBam \\ diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index e31566d1..bc2a9495 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -17,7 +17,7 @@ process BANDAGE_IMAGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ Bandage image $gfa ${prefix}.png $args Bandage image $gfa ${prefix}.svg $args diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index ef23fada..ac839497 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -18,7 +18,7 @@ process BBMAP_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" input = meta.single_end ? "in=${fastq}" : "in=${fastq[0]} in2=${fastq[1]}" diff --git a/modules/bbmap/bbduk/main.nf b/modules/bbmap/bbduk/main.nf index 98a21eab..79c3c306 100644 --- a/modules/bbmap/bbduk/main.nf +++ b/modules/bbmap/bbduk/main.nf @@ -18,7 +18,7 @@ process BBMAP_BBDUK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def raw = meta.single_end ? "in=${reads[0]}" : "in1=${reads[0]} in2=${reads[1]}" def trimmed = meta.single_end ? "out=${prefix}.fastq.gz" : "out1=${prefix}_1.fastq.gz out2=${prefix}_2.fastq.gz" def contaminants_fa = contaminants ? "ref=$contaminants" : '' diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf index 53f6b1aa..0c916dfe 100644 --- a/modules/bbmap/bbsplit/main.nf +++ b/modules/bbmap/bbsplit/main.nf @@ -22,7 +22,7 @@ process BBMAP_BBSPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 9b9384a6..4633790e 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -16,7 +16,7 @@ process BCFTOOLS_CONSENSUS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ cat $fasta | bcftools consensus $vcf $args > ${prefix}.fa header=\$(head -n 1 ${prefix}.fa | sed 's/>//g') diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index 87ad3183..95e0249a 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -16,7 +16,7 @@ process BCFTOOLS_FILTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools filter \\ --output ${prefix}.vcf.gz \\ diff --git a/modules/bcftools/index/main.nf b/modules/bcftools/index/main.nf index 8f40c683..0cdebf31 100644 --- a/modules/bcftools/index/main.nf +++ b/modules/bcftools/index/main.nf @@ -17,7 +17,7 @@ process BCFTOOLS_INDEX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools \\ diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index 9d91193c..8a209a66 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -21,7 +21,7 @@ process BCFTOOLS_MPILEUP { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def args3 = task.ext.args3 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ echo "${meta.id}" > sample_name.list diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index 79ab36e0..95da56db 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -17,7 +17,7 @@ process BCFTOOLS_NORM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools norm \\ --fasta-ref ${fasta} \\ diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf index 1919fa76..d1098f99 100644 --- a/modules/bcftools/query/main.nf +++ b/modules/bcftools/query/main.nf @@ -19,7 +19,7 @@ process BCFTOOLS_QUERY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf index 3cbe2d8f..018431a9 100644 --- a/modules/bcftools/reheader/main.nf +++ b/modules/bcftools/reheader/main.nf @@ -18,7 +18,7 @@ process BCFTOOLS_REHEADER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def update_sequences = fai ? "-f $fai" : "" def new_header = header ? "-h $header" : "" """ diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index c66f4453..67e8dca7 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -16,7 +16,7 @@ process BCFTOOLS_STATS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools stats $args $vcf > ${prefix}.bcftools_stats.txt cat <<-END_VERSIONS > versions.yml diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index b2cbb580..f37c1ab9 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -19,7 +19,7 @@ process BCFTOOLS_VIEW { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" diff --git a/modules/bedtools/bamtobed/main.nf b/modules/bedtools/bamtobed/main.nf index aebf7339..98d9ea2f 100644 --- a/modules/bedtools/bamtobed/main.nf +++ b/modules/bedtools/bamtobed/main.nf @@ -16,7 +16,7 @@ process BEDTOOLS_BAMTOBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ bamtobed \\ diff --git a/modules/bedtools/complement/main.nf b/modules/bedtools/complement/main.nf index df44b5bc..3146827c 100644 --- a/modules/bedtools/complement/main.nf +++ b/modules/bedtools/complement/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_COMPLEMENT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ complement \\ diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index e2a74ed3..ca491e75 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -24,7 +24,7 @@ process BEDTOOLS_GENOMECOV { args += " -bg" } - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (intervals.name =~ /\.bam/) { """ bedtools \\ diff --git a/modules/bedtools/intersect/main.nf b/modules/bedtools/intersect/main.nf index e01c78ac..afb0d056 100644 --- a/modules/bedtools/intersect/main.nf +++ b/modules/bedtools/intersect/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_INTERSECT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ intersect \\ diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf index cb7d6561..2414393c 100644 --- a/modules/bedtools/makewindows/main.nf +++ b/modules/bedtools/makewindows/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_MAKEWINDOWS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def arg_input = use_bed ? "-b $regions" : "-g $regions" """ bedtools \\ diff --git a/modules/bedtools/maskfasta/main.nf b/modules/bedtools/maskfasta/main.nf index 77be060c..7eeb4c7d 100644 --- a/modules/bedtools/maskfasta/main.nf +++ b/modules/bedtools/maskfasta/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_MASKFASTA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ maskfasta \\ diff --git a/modules/bedtools/merge/main.nf b/modules/bedtools/merge/main.nf index 907f1c9b..5f1da95b 100644 --- a/modules/bedtools/merge/main.nf +++ b/modules/bedtools/merge/main.nf @@ -16,7 +16,7 @@ process BEDTOOLS_MERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ merge \\ diff --git a/modules/bedtools/slop/main.nf b/modules/bedtools/slop/main.nf index e5d92850..9d8633ec 100644 --- a/modules/bedtools/slop/main.nf +++ b/modules/bedtools/slop/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_SLOP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ slop \\ diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index 15e69036..1ed95a57 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -17,7 +17,7 @@ process BEDTOOLS_SORT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ sort \\ diff --git a/modules/bedtools/subtract/main.nf b/modules/bedtools/subtract/main.nf index e645109d..b2efefe5 100644 --- a/modules/bedtools/subtract/main.nf +++ b/modules/bedtools/subtract/main.nf @@ -16,7 +16,7 @@ process BEDTOOLS_SUBTRACT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ subtract \\ diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index 95e7cdfc..e490b48c 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -19,7 +19,7 @@ process BISMARK_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fastq = meta.single_end ? reads : "-1 ${reads[0]} -2 ${reads[1]}" """ bismark \\ diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index c95c54d1..16c624f1 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -17,7 +17,7 @@ process BISMARK_DEDUPLICATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def seqtype = meta.single_end ? '-s' : '-p' """ deduplicate_bismark \\ diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index d1bdcf77..3a0bafe0 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -17,7 +17,7 @@ process BLAST_BLASTN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.ndb" | sed 's/.ndb//'` blastn \\ diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 12188269..b25b5e21 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -20,7 +20,7 @@ process BOWTIE_ALIGN { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def unaligned = params.save_unaligned ? "--un ${prefix}.unmapped.fastq" : '' def endedness = meta.single_end ? "$reads" : "-1 ${reads[0]} -2 ${reads[1]}" """ diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 11c9c20a..41c8a6bf 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -20,7 +20,7 @@ process BOWTIE2_ALIGN { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { def unaligned = params.save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : '' """ diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index f6cdaefa..992e25de 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -17,7 +17,7 @@ process BWA_ALN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 9a04ed63..801293a8 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -18,7 +18,7 @@ process BWA_MEM { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index 2abd9335..0b5ec255 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -17,7 +17,7 @@ process BWA_SAMPE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 56e9127f..bee06bc8 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -17,7 +17,7 @@ process BWA_SAMSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 7c238741..81b4b8ab 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -18,7 +18,7 @@ process BWAMEM2_MEM { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 06e9da44..0bcd9bac 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -18,7 +18,7 @@ process BWAMETH_ALIGN { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ${index} -name "*.bwameth.c2t" | sed 's/.bwameth.c2t//'` diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index b6be93b0..c5ece83a 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -16,7 +16,7 @@ process CAT_FASTQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def readList = reads.collect{ it.toString() } if (meta.single_end) { if (readList.size > 1) { diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index f6686cf2..4a7f0097 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -28,7 +28,7 @@ process CHROMAP_CHROMAP { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def args_list = args.tokenize() def file_extension = args.contains("--SAM") ? 'sam' : args.contains("--TagAlign")? 'tagAlign' : args.contains("--pairs")? 'pairs' : 'bed' diff --git a/modules/clonalframeml/main.nf b/modules/clonalframeml/main.nf index 60eaad12..db647a38 100644 --- a/modules/clonalframeml/main.nf +++ b/modules/clonalframeml/main.nf @@ -21,7 +21,7 @@ process CLONALFRAMEML { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ClonalFrameML \\ $newick \\ diff --git a/modules/cmseq/polymut/main.nf b/modules/cmseq/polymut/main.nf index 18bb8c59..47e86f0c 100644 --- a/modules/cmseq/polymut/main.nf +++ b/modules/cmseq/polymut/main.nf @@ -18,7 +18,7 @@ process CMSEQ_POLYMUT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fasta_refid = fasta ? "-c $fasta" : "" def sortindex = bai ? "" : "--sortindex" """ diff --git a/modules/cooler/cload/main.nf b/modules/cooler/cload/main.nf index ed7a41a1..d8bdc031 100644 --- a/modules/cooler/cload/main.nf +++ b/modules/cooler/cload/main.nf @@ -18,7 +18,7 @@ process COOLER_CLOAD { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def nproc = args.contains('pairix') || args.contains('tabix')? "--nproc $task.cpus" : '' """ diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 0836640e..a438acc8 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -17,7 +17,7 @@ process COOLER_DUMP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def suffix = resolution ? "::$resolution" : "" """ cooler dump \\ diff --git a/modules/cooler/merge/main.nf b/modules/cooler/merge/main.nf index 0fed76c9..b1814b68 100644 --- a/modules/cooler/merge/main.nf +++ b/modules/cooler/merge/main.nf @@ -16,7 +16,7 @@ process COOLER_MERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ cooler merge \\ $args \\ diff --git a/modules/cooler/zoomify/main.nf b/modules/cooler/zoomify/main.nf index e61ca99d..226d4114 100644 --- a/modules/cooler/zoomify/main.nf +++ b/modules/cooler/zoomify/main.nf @@ -16,7 +16,7 @@ process COOLER_ZOOMIFY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ cooler zoomify \\ $args \\ diff --git a/modules/csvtk/split/main.nf b/modules/csvtk/split/main.nf index 89b44154..52ab7ec7 100644 --- a/modules/csvtk/split/main.nf +++ b/modules/csvtk/split/main.nf @@ -18,7 +18,7 @@ process CSVTK_SPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def delimiter = in_format == "tsv" ? "--tabs" : (in_format == "csv" ? "--delimiter ',' " : in_format) def out_delimiter = out_format == "tsv" ? "--out-tabs" : (out_format == "csv" ? "--out-delimiter ',' " : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index f98113e8..89105715 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -17,7 +17,7 @@ process CUTADAPT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def trimmed = meta.single_end ? "-o ${prefix}.trim.fastq.gz" : "-o ${prefix}_1.trim.fastq.gz -p ${prefix}_2.trim.fastq.gz" """ cutadapt \\ diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf index b67ee993..722f6c55 100644 --- a/modules/dastool/dastool/main.nf +++ b/modules/dastool/dastool/main.nf @@ -28,7 +28,7 @@ process DASTOOL_DASTOOL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def bin_list = bins instanceof List ? bins.join(",") : "$bins" def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" def db_dir = db_directory ? "--db_directory $db_directory" : "" diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf index 78a06b6e..09f800bb 100644 --- a/modules/dastool/scaffolds2bin/main.nf +++ b/modules/dastool/scaffolds2bin/main.nf @@ -17,7 +17,7 @@ process DASTOOL_SCAFFOLDS2BIN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def file_extension = extension ? extension : "fasta" """ diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index e39310f4..70be934b 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -18,7 +18,7 @@ process DEEPTOOLS_COMPUTEMATRIX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ computeMatrix \\ $args \\ diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index aeb635ce..7925c9a9 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -18,7 +18,7 @@ process DEEPTOOLS_PLOTFINGERPRINT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def extend = (meta.single_end && params.fragment_size > 0) ? "--extendReads ${params.fragment_size}" : '' """ plotFingerprint \\ diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index f981744e..992c9058 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -17,7 +17,7 @@ process DEEPTOOLS_PLOTHEATMAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plotHeatmap \\ $args \\ diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index b32e04d3..60184fa6 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -17,7 +17,7 @@ process DEEPTOOLS_PLOTPROFILE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plotProfile \\ $args \\ diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index d4aa1adb..fc04cda7 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -19,7 +19,7 @@ process DELLY_CALL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ delly \\ call \\ diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 015be864..c7342767 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -19,7 +19,7 @@ process DIAMOND_BLASTP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index f4018aa9..bd7d1dd9 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -19,7 +19,7 @@ process DIAMOND_BLASTX { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index 7cc5da22..d506a4b6 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -16,7 +16,7 @@ process DSHBIO_EXPORTSEGMENTS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ export-segments \\ diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 065d8bec..6480f4a4 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -16,7 +16,7 @@ process DSHBIO_FILTERBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ filter-bed \\ diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index c738c95a..a0bbf3af 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -16,7 +16,7 @@ process DSHBIO_FILTERGFF3 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ filter-gff3 \\ diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 60b8b7a3..8dbf1104 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -16,7 +16,7 @@ process DSHBIO_SPLITBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ split-bed \\ diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index 7ad2fd08..fc868a39 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -16,7 +16,7 @@ process DSHBIO_SPLITGFF3 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ split-gff3 \\ diff --git a/modules/ectyper/main.nf b/modules/ectyper/main.nf index 5f458eb9..0e040958 100644 --- a/modules/ectyper/main.nf +++ b/modules/ectyper/main.nf @@ -18,7 +18,7 @@ process ECTYPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def is_compressed = fasta.getName().endsWith(".gz") ? true : false def fasta_name = fasta.getName().replace(".gz", "") """ diff --git a/modules/emmtyper/main.nf b/modules/emmtyper/main.nf index 9cf98694..70dabfb7 100644 --- a/modules/emmtyper/main.nf +++ b/modules/emmtyper/main.nf @@ -16,7 +16,7 @@ process EMMTYPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ emmtyper \\ $args \\ diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 76cd9235..3182feb2 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -24,7 +24,7 @@ process ENSEMBLVEP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def dir_cache = task.ext.use_cache ? "\${PWD}/${cache}" : "/.vep" """ mkdir $prefix diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 2ef00d17..4db78230 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -18,7 +18,7 @@ process EXPANSIONHUNTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def gender = (meta.gender == 'male' || meta.gender == 1 || meta.gender == 'XY') ? "male" : "female" """ ExpansionHunter \\ diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf index 7e3721bd..cc1c4902 100644 --- a/modules/fastani/main.nf +++ b/modules/fastani/main.nf @@ -17,7 +17,7 @@ process FASTANI { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.batch_input) { """ diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index 05eb1e98..33603842 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -24,7 +24,7 @@ process FASTP { script: def args = task.ext.args ?: '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { def fail_fastq = save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' """ diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index 673a00b8..d250eca0 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -18,7 +18,7 @@ process FASTQC { script: def args = task.ext.args ?: '' // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz diff --git a/modules/fastqscan/main.nf b/modules/fastqscan/main.nf index 768728f2..a0dcc46a 100644 --- a/modules/fastqscan/main.nf +++ b/modules/fastqscan/main.nf @@ -16,7 +16,7 @@ process FASTQSCAN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ zcat $reads | \\ fastq-scan \\ diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index f514b69a..3aab935b 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -16,7 +16,7 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ fgbio \\ CallMolecularConsensusReads \\ diff --git a/modules/fgbio/fastqtobam/main.nf b/modules/fgbio/fastqtobam/main.nf index 40713d03..126c3dd8 100644 --- a/modules/fgbio/fastqtobam/main.nf +++ b/modules/fgbio/fastqtobam/main.nf @@ -17,7 +17,7 @@ process FGBIO_FASTQTOBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir tmp diff --git a/modules/fgbio/groupreadsbyumi/main.nf b/modules/fgbio/groupreadsbyumi/main.nf index b35186a5..47f000a5 100644 --- a/modules/fgbio/groupreadsbyumi/main.nf +++ b/modules/fgbio/groupreadsbyumi/main.nf @@ -18,7 +18,7 @@ process FGBIO_GROUPREADSBYUMI { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir tmp diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index c2822548..c542f3df 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -16,7 +16,7 @@ process FGBIO_SORTBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ fgbio \\ SortBam \\ diff --git a/modules/filtlong/main.nf b/modules/filtlong/main.nf index 10e147a6..bb1c1eb3 100644 --- a/modules/filtlong/main.nf +++ b/modules/filtlong/main.nf @@ -16,7 +16,7 @@ process FILTLONG { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" """ filtlong \\ diff --git a/modules/flash/main.nf b/modules/flash/main.nf index 23bd1892..7bc38c97 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -15,7 +15,7 @@ process FLASH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ flash \\ $args \\ diff --git a/modules/freebayes/main.nf b/modules/freebayes/main.nf index b9a63d02..1dd91fef 100644 --- a/modules/freebayes/main.nf +++ b/modules/freebayes/main.nf @@ -22,7 +22,7 @@ process FREEBAYES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input = input_2 ? "${input_1} ${input_2}" : "${input_1}" def targets_file = targets ? "--target ${targets}" : "" def samples_file = samples ? "--samples ${samples}" : "" diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index f93dd574..bd428d6c 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -20,7 +20,7 @@ process GATK4_APPLYBQSR { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" if (!task.memory) { log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index b422a798..9b0bf286 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -22,7 +22,7 @@ process GATK4_BASERECALIBRATOR { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 77819a0f..c4538034 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -17,7 +17,7 @@ process GATK4_BEDTOINTERVALLIST { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk BedToIntervalList \\ -I $bed \\ diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index 93a2ee57..7c112c3c 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -18,7 +18,7 @@ process GATK4_CALCULATECONTAMINATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def matched_command = matched ? " -matched ${matched} " : '' def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' """ diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 9bc8d1d0..2860e82e 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -20,7 +20,7 @@ process GATK4_CREATESOMATICPANELOFNORMALS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk \\ CreateSomaticPanelOfNormals \\ diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index b0b35e42..f636dc46 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -19,7 +19,7 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ") def avail_mem = 3 diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index fc075735..915eb996 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -16,7 +16,7 @@ process GATK4_FASTQTOSAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" """ gatk FastqToSam \\ diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 7111db37..02fa804f 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -21,7 +21,7 @@ process GATK4_FILTERMUTECTCALLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def orientationbias_options = '' if (orientationbias) { diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index ddb4a922..f0b35447 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -22,7 +22,7 @@ process GATK4_GENOTYPEGVCFS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 0894e17b..99be601f 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -19,7 +19,7 @@ process GATK4_GETPILEUPSUMMARIES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def sitesCommand = '' sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 418a2785..e00f1e58 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -23,7 +23,7 @@ process GATK4_HAPLOTYPECALLER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def interval_option = interval ? "-L ${interval}" : "" def dbsnp_option = dbsnp ? "-D ${dbsnp}" : "" def avail_mem = 3 diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 8e5b70e1..7e1a47f7 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -16,7 +16,7 @@ process GATK4_INTERVALLISTTOOLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir ${prefix}_split diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index 5e9700e3..ac021afa 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -16,7 +16,7 @@ process GATK4_LEARNREADORIENTATIONMODEL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def inputs_list = [] f1r2.each() { a -> inputs_list.add(" -I " + a) } """ diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index 9f0b46da..a109facc 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -18,7 +18,7 @@ process GATK4_MARKDUPLICATES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") def avail_mem = 3 if (!task.memory) { diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 01effb0f..5e552cb2 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -19,7 +19,7 @@ process GATK4_MERGEBAMALIGNMENT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk MergeBamAlignment \\ ALIGNED=$aligned \\ diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index cbfc2e9d..cd1840c3 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -18,7 +18,7 @@ process GATK4_MERGEVCFS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" // Make list of VCFs to merge def input = "" diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 662b3f0c..2cf940de 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -30,7 +30,7 @@ process GATK4_MUTECT2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def panels_command = '' def normals_command = '' diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index bca31a29..638b7705 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -16,7 +16,7 @@ process GATK4_REVERTSAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk RevertSam \\ I=$bam \\ diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index aa9a6b2d..a909f540 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -16,7 +16,7 @@ process GATK4_SAMTOFASTQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" """ gatk SamToFastq \\ diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 32d36df9..65b82a35 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -19,7 +19,7 @@ process GATK4_SPLITNCIGARREADS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gatk SplitNCigarReads \\ -R $fasta \\ diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index d5cc1eb3..00dc2588 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -20,7 +20,7 @@ process GATK4_VARIANTFILTRATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/genrich/main.nf b/modules/genrich/main.nf index dfbebd3a..d9deea3c 100644 --- a/modules/genrich/main.nf +++ b/modules/genrich/main.nf @@ -26,7 +26,7 @@ process GENRICH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def control = control_bam ? "-c $control_bam" : '' def blacklist = blacklist_bed ? "-E $blacklist_bed" : "" def pvalues = save_pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index e36729b2..b8afca22 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -16,7 +16,7 @@ process GLNEXUS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" // Make list of GVCFs to merge def input = gvcfs.collect { it.toString() } diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index e0f2d4cd..554e585b 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -19,7 +19,7 @@ process GRAPHMAP2_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ graphmap2 \\ align \\ diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf index d8a64113..1c06692d 100644 --- a/modules/gstama/collapse/main.nf +++ b/modules/gstama/collapse/main.nf @@ -26,7 +26,7 @@ process GSTAMA_COLLAPSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ tama_collapse.py \\ -s $bam \\ diff --git a/modules/gstama/merge/main.nf b/modules/gstama/merge/main.nf index 4a8e829c..53ff93e4 100644 --- a/modules/gstama/merge/main.nf +++ b/modules/gstama/merge/main.nf @@ -20,7 +20,7 @@ process GSTAMA_MERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ tama_merge.py \\ -f $filelist \\ diff --git a/modules/gunc/run/main.nf b/modules/gunc/run/main.nf index 6ac681ad..8508c9f0 100644 --- a/modules/gunc/run/main.nf +++ b/modules/gunc/run/main.nf @@ -18,7 +18,7 @@ process GUNC_RUN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gunc \\ run \\ diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf index ed1d7797..a96343f6 100644 --- a/modules/hicap/main.nf +++ b/modules/hicap/main.nf @@ -20,7 +20,7 @@ process HICAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def database_args = database_dir ? "--database_dir ${database_dir}" : "" def model_args = model_fp ? "--model_fp ${model_fp}" : "" def is_compressed = fasta.getName().endsWith(".gz") ? true : false diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf index 7fc857f1..208554d6 100644 --- a/modules/hifiasm/main.nf +++ b/modules/hifiasm/main.nf @@ -27,7 +27,7 @@ process HIFIASM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (use_parental_kmers) { """ hifiasm \\ diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 0c5f4134..ae888616 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -22,7 +22,7 @@ process HISAT2_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { diff --git a/modules/hmmcopy/readcounter/main.nf b/modules/hmmcopy/readcounter/main.nf index 6cd776a1..6399b1a2 100644 --- a/modules/hmmcopy/readcounter/main.nf +++ b/modules/hmmcopy/readcounter/main.nf @@ -18,7 +18,7 @@ process HMMCOPY_READCOUNTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ readCounter \\ $args \\ diff --git a/modules/hmmer/hmmalign/main.nf b/modules/hmmer/hmmalign/main.nf index a25871e8..e6d04044 100644 --- a/modules/hmmer/hmmalign/main.nf +++ b/modules/hmmer/hmmalign/main.nf @@ -17,7 +17,7 @@ process HMMER_HMMALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fastacmd = fasta.getExtension() == 'gz' ? "gunzip -c $fasta" : "cat $fasta" """ $fastacmd | \\ diff --git a/modules/homer/annotatepeaks/main.nf b/modules/homer/annotatepeaks/main.nf index 321dbc7c..84e0241a 100644 --- a/modules/homer/annotatepeaks/main.nf +++ b/modules/homer/annotatepeaks/main.nf @@ -20,7 +20,7 @@ process HOMER_ANNOTATEPEAKS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ annotatePeaks.pl \\ $peak \\ diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index a39fe753..66de06b6 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -18,7 +18,7 @@ process HOMER_FINDPEAKS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ findPeaks \\ diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index 44490d50..72e2091f 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -19,7 +19,7 @@ process HOMER_MAKETAGDIRECTORY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ makeTagDirectory \\ tag_dir \\ diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index 8a0e3f37..17e86947 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -18,7 +18,7 @@ process HOMER_MAKEUCSCFILE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ makeUCSCfile \\ $tagDir \\ diff --git a/modules/imputeme/vcftoprs/main.nf b/modules/imputeme/vcftoprs/main.nf index 0c8c1952..5fee90c2 100644 --- a/modules/imputeme/vcftoprs/main.nf +++ b/modules/imputeme/vcftoprs/main.nf @@ -16,7 +16,7 @@ process IMPUTEME_VCFTOPRS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ #!/usr/bin/env Rscript diff --git a/modules/ismapper/main.nf b/modules/ismapper/main.nf index 4a33261b..a51cc01e 100644 --- a/modules/ismapper/main.nf +++ b/modules/ismapper/main.nf @@ -16,7 +16,7 @@ process ISMAPPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ismap \\ $args \\ diff --git a/modules/isoseq3/cluster/main.nf b/modules/isoseq3/cluster/main.nf index 27d5c3d8..fdd47971 100644 --- a/modules/isoseq3/cluster/main.nf +++ b/modules/isoseq3/cluster/main.nf @@ -26,7 +26,7 @@ process ISOSEQ3_CLUSTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ isoseq3 \\ cluster \\ diff --git a/modules/isoseq3/refine/main.nf b/modules/isoseq3/refine/main.nf index 5bde2f8f..5044cba2 100644 --- a/modules/isoseq3/refine/main.nf +++ b/modules/isoseq3/refine/main.nf @@ -21,7 +21,7 @@ process ISOSEQ3_REFINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ isoseq3 \\ refine \\ diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index 4a657756..58d97c8c 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -20,7 +20,7 @@ process IVAR_CONSENSUS { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" """ samtools mpileup \\ diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index 35798123..4d0c70a2 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -18,7 +18,7 @@ process IVAR_TRIM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ivar trim \\ $args \\ diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index ba791307..ce4abd4d 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -20,7 +20,7 @@ process IVAR_VARIANTS { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" def features = params.gff ? "-g $gff" : "" """ diff --git a/modules/jupyternotebook/main.nf b/modules/jupyternotebook/main.nf index 02f1947f..e4bdf98b 100644 --- a/modules/jupyternotebook/main.nf +++ b/modules/jupyternotebook/main.nf @@ -24,7 +24,7 @@ process JUPYTERNOTEBOOK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 00ca8971..d67eba31 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -22,7 +22,7 @@ process KALLISTOBUSTOOLS_COUNT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def cdna = t1c ? "-c1 $t1c" : '' def introns = t2c ? "-c2 $t2c" : '' """ diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index b64a0c45..998eced1 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -16,7 +16,7 @@ process KLEBORATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ kleborate \\ $args \\ diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index e5fb4b80..3c4d1caf 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -19,7 +19,7 @@ process KRAKEN2_KRAKEN2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired = meta.single_end ? "" : "--paired" def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" def unclassified = meta.single_end ? "${prefix}.unclassified.fastq" : "${prefix}.unclassified#.fastq" diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index 51667378..e8857403 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -18,7 +18,7 @@ process LAST_DOTPLOT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ last-dotplot \\ $args \\ diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index 4b90a965..b5ac8bfe 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -17,7 +17,7 @@ process LAST_LASTAL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def trained_params = param_file ? "-p ${param_file}" : '' """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index ff6485dc..e9895c5c 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -16,7 +16,7 @@ process LAST_LASTDB { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir lastdb lastdb \\ diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index f1a7312e..ca60e7fe 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -25,7 +25,7 @@ process LAST_MAFCONVERT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ maf-convert $args $format $maf | gzip --no-name \\ > ${prefix}.${format}.gz diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index c66e47d4..0a58b027 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -16,7 +16,7 @@ process LAST_MAFSWAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ maf-swap $args $maf | gzip --no-name > ${prefix}.swapped.maf.gz diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index e4f4390a..fb097a11 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -16,7 +16,7 @@ process LAST_POSTMASK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if( "$maf" == "${prefix}.maf.gz" ) error "Input and output names are the same, use the suffix option to disambiguate" """ last-postmask $args $maf | gzip --no-name > ${prefix}.maf.gz diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index ecc47e80..60ed135b 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -16,7 +16,7 @@ process LAST_SPLIT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ zcat < $maf | last-split $args | gzip --no-name > ${prefix}.maf.gz diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index 0a949857..471db7c1 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -17,7 +17,7 @@ process LAST_TRAIN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) diff --git a/modules/lima/main.nf b/modules/lima/main.nf index 64f6d87d..a662a7bb 100644 --- a/modules/lima/main.nf +++ b/modules/lima/main.nf @@ -30,7 +30,7 @@ process LIMA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ OUT_EXT="" diff --git a/modules/lissero/main.nf b/modules/lissero/main.nf index b5cd2b68..667697ef 100644 --- a/modules/lissero/main.nf +++ b/modules/lissero/main.nf @@ -16,7 +16,7 @@ process LISSERO { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lissero \\ $args \\ diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index 74995152..d7fd078b 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -17,7 +17,7 @@ process LOFREQ_CALL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ call \\ diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index 63ae2886..764efcc5 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -18,7 +18,7 @@ process LOFREQ_CALLPARALLEL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ call-parallel \\ diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index 6f13ae44..34a5aef8 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -16,7 +16,7 @@ process LOFREQ_FILTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ filter \\ diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index bf04c5d2..5e5b8f44 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -17,7 +17,7 @@ process LOFREQ_INDELQUAL { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq indelqual \\ $args \\ diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index e8bfcda0..c5c88f8e 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -22,7 +22,7 @@ process MACS2_CALLPEAK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def args_list = args.tokenize() def format = meta.single_end ? 'BAM' : 'BAMPE' def control = controlbam ? "--control $controlbam" : '' diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index 553f0be9..2a8c0acc 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -25,7 +25,7 @@ process MANTA_GERMLINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf index 38d73133..1d62635b 100644 --- a/modules/manta/somatic/main.nf +++ b/modules/manta/somatic/main.nf @@ -27,7 +27,7 @@ process MANTA_SOMATIC { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf index dc72fcc4..63f7a840 100644 --- a/modules/manta/tumoronly/main.nf +++ b/modules/manta/tumoronly/main.nf @@ -25,7 +25,7 @@ process MANTA_TUMORONLY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ diff --git a/modules/mapdamage2/main.nf b/modules/mapdamage2/main.nf index 3673970e..e3668fda 100644 --- a/modules/mapdamage2/main.nf +++ b/modules/mapdamage2/main.nf @@ -34,7 +34,7 @@ process MAPDAMAGE2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mapDamage \\ $args \\ diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index 0c0b6e17..d93641f7 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -16,7 +16,7 @@ process MASH_SKETCH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mash \\ sketch \\ diff --git a/modules/mashtree/main.nf b/modules/mashtree/main.nf index 6728e3ce..5da2f805 100644 --- a/modules/mashtree/main.nf +++ b/modules/mashtree/main.nf @@ -17,7 +17,7 @@ process MASHTREE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mashtree \\ $args \\ diff --git a/modules/maxbin2/main.nf b/modules/maxbin2/main.nf index e13af704..4d384391 100644 --- a/modules/maxbin2/main.nf +++ b/modules/maxbin2/main.nf @@ -23,7 +23,7 @@ process MAXBIN2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def associate_files = reads ? "-reads $reads" : "-abund $abund" """ run_MaxBin.pl \\ diff --git a/modules/medaka/main.nf b/modules/medaka/main.nf index e7a8b9cc..761b1c34 100644 --- a/modules/medaka/main.nf +++ b/modules/medaka/main.nf @@ -16,7 +16,7 @@ process MEDAKA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ medaka_consensus \\ -t $task.cpus \\ diff --git a/modules/megahit/main.nf b/modules/megahit/main.nf index 011fa7d3..7b511883 100644 --- a/modules/megahit/main.nf +++ b/modules/megahit/main.nf @@ -21,7 +21,7 @@ process MEGAHIT { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ megahit \\ diff --git a/modules/meningotype/main.nf b/modules/meningotype/main.nf index 5dde5633..c3b65b9d 100644 --- a/modules/meningotype/main.nf +++ b/modules/meningotype/main.nf @@ -16,7 +16,7 @@ process MENINGOTYPE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ meningotype \\ $args \\ diff --git a/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/modules/metabat2/jgisummarizebamcontigdepths/main.nf index e35d6715..4a5869b6 100644 --- a/modules/metabat2/jgisummarizebamcontigdepths/main.nf +++ b/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -16,7 +16,7 @@ process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ export OMP_NUM_THREADS=$task.cpus diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf index d158af91..2d01fdf6 100644 --- a/modules/metabat2/metabat2/main.nf +++ b/modules/metabat2/metabat2/main.nf @@ -17,7 +17,7 @@ process METABAT2_METABAT2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def decompress_depth = depth ? "gzip -d -f $depth" : "" def depth_file = depth ? "-a ${depth.baseName}" : "" """ diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index 9463da6f..64965af3 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -19,7 +19,7 @@ process METAPHLAN3 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input_type = ("$input".endsWith(".fastq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" def input_data = ("$input_type".contains("fastq")) && !meta.single_end ? "${input[0]},${input[1]}" : "$input" def bowtie2_out = "$input_type" == "--input_type bowtie2out" || "$input_type" == "--input_type sam" ? '' : "--bowtie2out ${prefix}.bowtie2out.txt" diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index 1b4b14c4..021f76f1 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -18,7 +18,7 @@ process METHYLDACKEL_MBIAS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ MethylDackel mbias \\ $args \\ diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 8516ef6e..ceff67c5 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -18,7 +18,7 @@ process MINIA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def read_list = reads.join(",") """ echo "${read_list}" | sed 's/,/\\n/g' > input_files.txt diff --git a/modules/miniasm/main.nf b/modules/miniasm/main.nf index 35c2e2c0..b0db6925 100644 --- a/modules/miniasm/main.nf +++ b/modules/miniasm/main.nf @@ -17,7 +17,7 @@ process MINIASM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ miniasm \\ $args \\ diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index c6c0c316..500250e9 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -17,7 +17,7 @@ process MINIMAP2_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" """ minimap2 \\ diff --git a/modules/mlst/main.nf b/modules/mlst/main.nf index aa338420..b2983b82 100644 --- a/modules/mlst/main.nf +++ b/modules/mlst/main.nf @@ -16,7 +16,7 @@ process MLST { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mlst \\ --threads $task.cpus \\ diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index b25e6a3d..d2669b7e 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -24,7 +24,7 @@ process MOSDEPTH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def interval = window_size ? "--by ${window_size}" : "--by ${bed}" """ mosdepth \\ diff --git a/modules/msisensor/scan/main.nf b/modules/msisensor/scan/main.nf index 2419a0a1..223b4f44 100644 --- a/modules/msisensor/scan/main.nf +++ b/modules/msisensor/scan/main.nf @@ -16,7 +16,7 @@ process MSISENSOR_SCAN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ msisensor \\ scan \\ diff --git a/modules/mtnucratio/main.nf b/modules/mtnucratio/main.nf index b8663469..83d6ea2b 100644 --- a/modules/mtnucratio/main.nf +++ b/modules/mtnucratio/main.nf @@ -18,7 +18,7 @@ process MTNUCRATIO { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mtnucratio \\ diff --git a/modules/mummer/main.nf b/modules/mummer/main.nf index f4f3bb18..39ad3e8b 100644 --- a/modules/mummer/main.nf +++ b/modules/mummer/main.nf @@ -18,7 +18,7 @@ process MUMMER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") diff --git a/modules/muscle/main.nf b/modules/muscle/main.nf index a50f5cb3..6d549aaa 100644 --- a/modules/muscle/main.nf +++ b/modules/muscle/main.nf @@ -23,7 +23,7 @@ process MUSCLE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def fasta_out = args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' def clw_out = args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' def msf_out = args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' diff --git a/modules/nanolyse/main.nf b/modules/nanolyse/main.nf index f29eeb77..0ad0f799 100644 --- a/modules/nanolyse/main.nf +++ b/modules/nanolyse/main.nf @@ -18,7 +18,7 @@ process NANOLYSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ gunzip -c $fastq | NanoLyse -r $fasta | gzip > ${prefix}.fastq.gz mv NanoLyse.log ${prefix}.nanolyse.log diff --git a/modules/ncbigenomedownload/main.nf b/modules/ncbigenomedownload/main.nf index 466c8d09..9897c861 100644 --- a/modules/ncbigenomedownload/main.nf +++ b/modules/ncbigenomedownload/main.nf @@ -29,7 +29,7 @@ process NCBIGENOMEDOWNLOAD { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def accessions_opt = accessions ? "-A ${accessions}" : "" """ ncbi-genome-download \\ diff --git a/modules/ngmaster/main.nf b/modules/ngmaster/main.nf index 0884b55c..7d04031c 100644 --- a/modules/ngmaster/main.nf +++ b/modules/ngmaster/main.nf @@ -16,7 +16,7 @@ process NGMASTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ngmaster \\ $args \\ diff --git a/modules/nucmer/main.nf b/modules/nucmer/main.nf index bb5dcb7d..4e296515 100644 --- a/modules/nucmer/main.nf +++ b/modules/nucmer/main.nf @@ -17,7 +17,7 @@ process NUCMER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false def is_compressed_query = query.getName().endsWith(".gz") ? true : false def fasta_name_ref = ref.getName().replace(".gz", "") diff --git a/modules/pairtools/dedup/main.nf b/modules/pairtools/dedup/main.nf index 5ee9dc43..fe59e155 100644 --- a/modules/pairtools/dedup/main.nf +++ b/modules/pairtools/dedup/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_DEDUP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools dedup \\ $args \\ diff --git a/modules/pairtools/flip/main.nf b/modules/pairtools/flip/main.nf index 452800cc..376191ce 100644 --- a/modules/pairtools/flip/main.nf +++ b/modules/pairtools/flip/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_FLIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ flip \\ diff --git a/modules/pairtools/parse/main.nf b/modules/pairtools/parse/main.nf index 1d34d42c..7bd778c9 100644 --- a/modules/pairtools/parse/main.nf +++ b/modules/pairtools/parse/main.nf @@ -18,7 +18,7 @@ process PAIRTOOLS_PARSE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ parse \\ diff --git a/modules/pairtools/restrict/main.nf b/modules/pairtools/restrict/main.nf index 9fcc245c..8759f709 100644 --- a/modules/pairtools/restrict/main.nf +++ b/modules/pairtools/restrict/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_RESTRICT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ restrict \\ diff --git a/modules/pairtools/select/main.nf b/modules/pairtools/select/main.nf index f699afa3..a6d62ba7 100644 --- a/modules/pairtools/select/main.nf +++ b/modules/pairtools/select/main.nf @@ -17,7 +17,7 @@ process PAIRTOOLS_SELECT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools select \\ "$args" \\ diff --git a/modules/pairtools/sort/main.nf b/modules/pairtools/sort/main.nf index 5caa5b74..d5996dd0 100644 --- a/modules/pairtools/sort/main.nf +++ b/modules/pairtools/sort/main.nf @@ -16,7 +16,7 @@ process PAIRTOOLS_SORT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def mem = task.memory.toString().replaceAll(/(\s|\.|B)+/, '') """ pairtools \\ diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index 99a68e09..5ee2b2e0 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -16,7 +16,7 @@ process PANGOLIN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pangolin \\ $fasta\\ diff --git a/modules/paraclu/main.nf b/modules/paraclu/main.nf index a2003834..1623ea89 100644 --- a/modules/paraclu/main.nf +++ b/modules/paraclu/main.nf @@ -19,7 +19,7 @@ process PARACLU { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ awk -F "\t" '{print\$1"\t"\$6"\t"\$2"\t"\$5}' < $bed > ${bed}_4P diff --git a/modules/pbbam/pbmerge/main.nf b/modules/pbbam/pbmerge/main.nf index 970128cb..e0525cb1 100644 --- a/modules/pbbam/pbmerge/main.nf +++ b/modules/pbbam/pbmerge/main.nf @@ -17,7 +17,7 @@ process PBBAM_PBMERGE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pbmerge \\ -o ${prefix}.bam \\ diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 83e56d96..440fbc72 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -22,7 +22,7 @@ process PBCCS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ccs \\ $bam \\ diff --git a/modules/peddy/main.nf b/modules/peddy/main.nf index 0a6c3384..d64c3762 100644 --- a/modules/peddy/main.nf +++ b/modules/peddy/main.nf @@ -20,7 +20,7 @@ process PEDDY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ peddy \\ $args \\ diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index f2edabc3..6fe34cc2 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -20,7 +20,7 @@ process PHANTOMPEAKQUALTOOLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ RUN_SPP=`which run_spp.R` Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus diff --git a/modules/phyloflash/main.nf b/modules/phyloflash/main.nf index c507dd14..9ebc40de 100644 --- a/modules/phyloflash/main.nf +++ b/modules/phyloflash/main.nf @@ -18,7 +18,7 @@ process PHYLOFLASH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ phyloFlash.pl \\ @@ -58,7 +58,7 @@ process PHYLOFLASH { } stub: - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir ${prefix} touch ${prefix}/${prefix}.SSU.collection.fasta diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index adb82d8c..3705b8fb 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -20,7 +20,7 @@ process PICARD_COLLECTHSMETRICS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "-R $fasta" : "" def avail_mem = 3 diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index f52f5885..6b292534 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -18,7 +18,7 @@ process PICARD_COLLECTMULTIPLEMETRICS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectMultipleMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index 94745d2d..eddb4604 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -17,7 +17,7 @@ process PICARD_COLLECTWGSMETRICS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectWgsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index 8b1d2e6b..d8de137b 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -17,7 +17,7 @@ process PICARD_FILTERSAMREADS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard FilterSamReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index d4c5886f..d3bf6938 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -18,7 +18,7 @@ process PICARD_MARKDUPLICATES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 3a2fc620..86796593 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -16,7 +16,7 @@ process PICARD_MERGESAMFILES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def bam_files = bams.sort() def avail_mem = 3 if (!task.memory) { diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index b264b927..eb3caf40 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -17,7 +17,7 @@ process PICARD_SORTSAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard SortSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' diff --git a/modules/pirate/main.nf b/modules/pirate/main.nf index 3bbb1d64..70de52e6 100644 --- a/modules/pirate/main.nf +++ b/modules/pirate/main.nf @@ -17,7 +17,7 @@ process PIRATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ PIRATE \\ $args \\ diff --git a/modules/plink/extract/main.nf b/modules/plink/extract/main.nf index 34b12fca..9b8a52f3 100644 --- a/modules/plink/extract/main.nf +++ b/modules/plink/extract/main.nf @@ -18,7 +18,7 @@ process PLINK_EXTRACT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if( "$bed" == "${prefix}.bed" ) error "Input and output names are the same, use the suffix option to disambiguate" """ plink \\ diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index b6fd03d7..719e90d2 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -19,7 +19,7 @@ process PLINK_VCF { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plink \\ diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf index 8101f7dd..078ece1e 100644 --- a/modules/plink2/vcf/main.nf +++ b/modules/plink2/vcf/main.nf @@ -18,7 +18,7 @@ process PLINK2_VCF { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ plink2 \\ $args \\ diff --git a/modules/pmdtools/filter/main.nf b/modules/pmdtools/filter/main.nf index 301f9206..0b3bcbc6 100644 --- a/modules/pmdtools/filter/main.nf +++ b/modules/pmdtools/filter/main.nf @@ -21,7 +21,7 @@ process PMDTOOLS_FILTER { def args2 = task.ext.args2 ?: '' def args3 = task.ext.args3 ?: '' def split_cpus = Math.floor(task.cpus/2) - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if ("$bam" == "${prefix}.bam") error "[pmdtools/filter] Input and output names are the same, use the suffix option to disambiguate!" //threshold and header flags activate filtering function of pmdtools """ diff --git a/modules/porechop/main.nf b/modules/porechop/main.nf index 2edc5c78..249efad9 100644 --- a/modules/porechop/main.nf +++ b/modules/porechop/main.nf @@ -16,7 +16,7 @@ process PORECHOP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ porechop \\ -i $reads \\ diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index 43f86cf8..b5bd0620 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -18,7 +18,7 @@ process PRESEQ_LCEXTRAP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' """ preseq \\ diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index c55616db..c06c592c 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -16,7 +16,7 @@ process PYDAMAGE_ANALYZE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pydamage \\ analyze \\ diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 2e0afac9..ab0b2115 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -16,7 +16,7 @@ process PYDAMAGE_FILTER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ pydamage \\ diff --git a/modules/qcat/main.nf b/modules/qcat/main.nf index 9f53f0cb..7d81952d 100644 --- a/modules/qcat/main.nf +++ b/modules/qcat/main.nf @@ -17,7 +17,7 @@ process QCAT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ ## Unzip fastq file ## qcat doesn't support zipped files yet diff --git a/modules/racon/main.nf b/modules/racon/main.nf index 5936fac0..9be5ce63 100644 --- a/modules/racon/main.nf +++ b/modules/racon/main.nf @@ -16,7 +16,7 @@ process RACON { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ racon -t "$task.cpus" \\ "${reads}" \\ diff --git a/modules/rasusa/main.nf b/modules/rasusa/main.nf index b43792ee..c2893d18 100644 --- a/modules/rasusa/main.nf +++ b/modules/rasusa/main.nf @@ -17,7 +17,7 @@ process RASUSA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "--output ${prefix}.fastq.gz" : "--output ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz" """ rasusa \\ diff --git a/modules/rmarkdownnotebook/main.nf b/modules/rmarkdownnotebook/main.nf index 9a7db505..f8183216 100644 --- a/modules/rmarkdownnotebook/main.nf +++ b/modules/rmarkdownnotebook/main.nf @@ -25,7 +25,7 @@ process RMARKDOWNNOTEBOOK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params diff --git a/modules/roary/main.nf b/modules/roary/main.nf index a05973eb..edda3281 100644 --- a/modules/roary/main.nf +++ b/modules/roary/main.nf @@ -17,7 +17,7 @@ process ROARY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ roary \\ $args \\ diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index d9d3fa36..1141a13f 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -16,7 +16,7 @@ process RSEQC_BAMSTAT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bam_stat.py \\ -i $bam \\ diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index 3b879cfb..2243c43e 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -17,7 +17,7 @@ process RSEQC_INFEREXPERIMENT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ infer_experiment.py \\ -i $bam \\ diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index 88bec499..425737d6 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -21,7 +21,7 @@ process RSEQC_INNERDISTANCE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (!meta.single_end) { """ inner_distance.py \\ diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index b6949641..d2562e5c 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -23,7 +23,7 @@ process RSEQC_JUNCTIONANNOTATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ junction_annotation.py \\ -i $bam \\ diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index 58451d2e..695762b5 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -18,7 +18,7 @@ process RSEQC_JUNCTIONSATURATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ junction_saturation.py \\ -i $bam \\ diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index 74af618d..333193e3 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -17,7 +17,7 @@ process RSEQC_READDISTRIBUTION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ read_distribution.py \\ -i $bam \\ diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index 80fcb150..134f2e8d 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -19,7 +19,7 @@ process RSEQC_READDUPLICATION { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ read_duplication.py \\ -i $bam \\ diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf index 8445b0d0..c6573283 100644 --- a/modules/samblaster/main.nf +++ b/modules/samblaster/main.nf @@ -18,7 +18,7 @@ process SAMBLASTER { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def args3 = task.ext.args3 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if( "$bam" == "${prefix}.bam" ) error "Input and output names are the same, use the suffix option to disambiguate" """ samtools view -h $args2 $bam | \\ diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 87d6ff8b..55a2f736 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -21,7 +21,7 @@ process SAMTOOLS_AMPLICONCLIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def rejects = save_cliprejects ? "--rejects-file ${prefix}.cliprejects.bam" : "" def stats = save_clipstats ? "-f ${prefix}.clipstats.txt" : "" """ diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf index 20e83a14..689eb960 100644 --- a/modules/samtools/bam2fq/main.nf +++ b/modules/samtools/bam2fq/main.nf @@ -17,7 +17,7 @@ process SAMTOOLS_BAM2FQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (split){ """ diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf index f336547f..ebf029aa 100644 --- a/modules/samtools/depth/main.nf +++ b/modules/samtools/depth/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_DEPTH { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools \\ diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index bdbf53e4..212e804e 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_FASTQ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def endedness = meta.single_end ? "-0 ${prefix}.fastq.gz" : "-1 ${prefix}_1.fastq.gz -2 ${prefix}_2.fastq.gz" """ diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index 180833f4..8f86c1c4 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_FIXMATE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if ("$bam" == "${prefix}.bam") error "Input and output names are the same, use the suffix option to disambiguate!" """ diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 5f6e2d49..c40f46d1 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -17,7 +17,7 @@ process SAMTOOLS_MPILEUP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools mpileup \\ --fasta-ref $fasta \\ diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index 623f10b6..0c2cf25e 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -16,7 +16,7 @@ process SAMTOOLS_SORT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools sort $args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam cat <<-END_VERSIONS > versions.yml diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 464edd09..619b84dc 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -18,7 +18,7 @@ process SAMTOOLS_VIEW { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "--reference ${fasta} -C" : "" def file_type = input.getExtension() """ diff --git a/modules/scoary/main.nf b/modules/scoary/main.nf index 8fed0119..ca33041d 100644 --- a/modules/scoary/main.nf +++ b/modules/scoary/main.nf @@ -17,7 +17,7 @@ process SCOARY { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def newick_tree = tree ? "-n ${tree}" : "" """ scoary \\ diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 328e4e6c..12b9205f 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -19,7 +19,7 @@ process SEACR_CALLPEAK { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def function_switch = ctrlbedgraph ? "$ctrlbedgraph" : "$threshold" """ SEACR_1.3.sh \\ diff --git a/modules/seqsero2/main.nf b/modules/seqsero2/main.nf index a8dd731e..0a7aa6ad 100644 --- a/modules/seqsero2/main.nf +++ b/modules/seqsero2/main.nf @@ -18,7 +18,7 @@ process SEQSERO2 { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ SeqSero2_package.py \\ $args \\ diff --git a/modules/seqtk/mergepe/main.nf b/modules/seqtk/mergepe/main.nf index 954bed5c..299c9ea4 100644 --- a/modules/seqtk/mergepe/main.nf +++ b/modules/seqtk/mergepe/main.nf @@ -16,7 +16,7 @@ process SEQTK_MERGEPE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ ln -s ${reads} ${prefix}.fastq.gz diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index 83a107d0..96e08fd4 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -17,7 +17,7 @@ process SEQTK_SAMPLE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ seqtk \\ diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index 9082d426..ce9d1962 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -18,7 +18,7 @@ process SEQUENZAUTILS_BAM2SEQZ { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ sequenza-utils \\ bam2seqz \\ diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index 43358c43..a6fcb559 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -16,7 +16,7 @@ process SEQUENZAUTILS_GCWIGGLE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ sequenza-utils \\ gc_wiggle \\ diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index fb25a96e..089f3478 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -19,7 +19,7 @@ process SEQWISH_INDUCE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ seqwish \\ --threads $task.cpus \\ diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf index de79e89b..c8d61161 100644 --- a/modules/snpdists/main.nf +++ b/modules/snpdists/main.nf @@ -16,7 +16,7 @@ process SNPDISTS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ snp-dists \\ $args \\ diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 2cd023f6..d0ec993e 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -28,7 +28,7 @@ process SNPEFF { } else { avail_mem = task.memory.giga } - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def dir_cache = task.ext.use_cache ? "-dataDir \${PWD}/${cache}" : "" """ snpEff \\ diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 83cd8092..5c0950d8 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -18,7 +18,7 @@ process SORTMERNA { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ sortmerna \\ diff --git a/modules/spades/main.nf b/modules/spades/main.nf index 4663ec55..ba690d35 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -22,7 +22,7 @@ process SPADES { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def maxmem = task.memory.toGiga() def illumina_reads = illumina ? ( meta.single_end ? "-s $illumina" : "-1 ${illumina[0]} -2 ${illumina[1]}" ) : "" def pacbio_reads = pacbio ? "--pacbio $pacbio" : "" diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf index d7c75ba6..e0ba8d13 100644 --- a/modules/spatyper/main.nf +++ b/modules/spatyper/main.nf @@ -18,7 +18,7 @@ process SPATYPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" """ spaTyper \\ diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf index f33634ae..dbb61a27 100644 --- a/modules/staphopiasccmec/main.nf +++ b/modules/staphopiasccmec/main.nf @@ -16,7 +16,7 @@ process STAPHOPIASCCMEC { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ staphopia-sccmec --assembly $fasta $args > ${prefix}.tsv diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index 46023d3e..9725496f 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -32,7 +32,7 @@ process STAR_ALIGN { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def ignore_gtf = star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" def seq_platform = seq_platform ? "'PL:$seq_platform'" : "" def seq_center = seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index e991db67..324be6df 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -23,7 +23,7 @@ process STRELKA_GERMLINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def regions = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaGermlineWorkflow.py \\ diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index fa138633..a9766d01 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -23,7 +23,7 @@ process STRELKA_SOMATIC { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def options_target_bed = target_bed ? "--exome --callRegions ${target_bed}" : "" def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : "" """ diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index 4367a84d..9d62a966 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -20,7 +20,7 @@ process STRINGTIE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index 43a7f8cd..53eb279e 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -17,7 +17,7 @@ process SUBREAD_FEATURECOUNTS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-p' def strandedness = 0 diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index 13f9a942..ed9362b2 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -16,7 +16,7 @@ process TABIX_BGZIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bgzip -c $args $input > ${prefix}.${input.getExtension()}.gz diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 9a633d2e..20b47a9f 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -17,7 +17,7 @@ process TABIX_BGZIPTABIX { script: def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bgzip -c $args $input > ${prefix}.gz tabix $args2 ${prefix}.gz diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf index e9bb9b5d..c5a1ca0f 100644 --- a/modules/tiddit/cov/main.nf +++ b/modules/tiddit/cov/main.nf @@ -18,7 +18,7 @@ process TIDDIT_COV { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "--ref $fasta" : "" """ tiddit \\ diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index 83a46f82..08eecc01 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -20,7 +20,7 @@ process TIDDIT_SV { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" """ tiddit \\ diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 86761ad8..ee40b780 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -38,7 +38,7 @@ process TRIMGALORE { def tpc_r2 = params.three_prime_clip_r2 > 0 ? "--three_prime_clip_r2 ${params.three_prime_clip_r2}" : '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz diff --git a/modules/ucsc/bed12tobigbed/main.nf b/modules/ucsc/bed12tobigbed/main.nf index 937eabd6..742798b3 100644 --- a/modules/ucsc/bed12tobigbed/main.nf +++ b/modules/ucsc/bed12tobigbed/main.nf @@ -19,7 +19,7 @@ process UCSC_BED12TOBIGBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedToBigBed \\ $bed \\ diff --git a/modules/ucsc/bedclip/main.nf b/modules/ucsc/bedclip/main.nf index 1d46342c..dacd7260 100755 --- a/modules/ucsc/bedclip/main.nf +++ b/modules/ucsc/bedclip/main.nf @@ -19,7 +19,7 @@ process UCSC_BEDCLIP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedClip \\ $bedgraph \\ diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index e18b41bc..9ba306ab 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -19,7 +19,7 @@ process UCSC_BEDGRAPHTOBIGWIG { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ bedGraphToBigWig \\ $bedgraph \\ diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index 8c6f1178..1e97c83d 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -19,7 +19,7 @@ process UCSC_BIGWIGAVERAGEOVERBED { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" // BUG: bigWigAverageOverBed cannot handle ensembl seqlevels style """ bigWigAverageOverBed \\ diff --git a/modules/ucsc/liftover/main.nf b/modules/ucsc/liftover/main.nf index 1c667262..3dd9531e 100644 --- a/modules/ucsc/liftover/main.nf +++ b/modules/ucsc/liftover/main.nf @@ -20,7 +20,7 @@ process UCSC_LIFTOVER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ liftOver \\ diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf index 5df34121..f2dcb543 100644 --- a/modules/ultra/pipeline/main.nf +++ b/modules/ultra/pipeline/main.nf @@ -18,7 +18,7 @@ process ULTRA_PIPELINE { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ uLTRA \\ pipeline \\ diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 287bb8c2..ce21437d 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -16,7 +16,7 @@ process UMITOOLS_DEDUP { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def paired = meta.single_end ? "" : "--paired" """ umi_tools dedup \\ diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index 3c2402e2..fba8f054 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -17,7 +17,7 @@ process UMITOOLS_EXTRACT { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ umi_tools \\ diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 14319dc1..1ccc72a9 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -18,7 +18,7 @@ process UNICYCLER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def short_reads = shortreads ? ( meta.single_end ? "-s $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" ) : "" def long_reads = longreads ? "-l $longreads" : "" """ diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf index 3d354016..11059a9a 100644 --- a/modules/variantbam/main.nf +++ b/modules/variantbam/main.nf @@ -18,7 +18,7 @@ process VARIANTBAM { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" """ variant \\ $bam \\ diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index 62fff0cf..fbe646ca 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -83,7 +83,7 @@ process VCFTOOLS { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" def args_list = args.tokenize() def bed_arg = (args.contains('--bed')) ? "--bed ${bed}" : diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 6e7f433b..4539033d 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -17,7 +17,7 @@ process YARA_MAPPER { script: def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ yara_mapper \\ diff --git a/tests/modules/bbmap/bbduk/nextflow.config b/tests/modules/bbmap/bbduk/nextflow.config index 8940a9be..46fc33b4 100644 --- a/tests/modules/bbmap/bbduk/nextflow.config +++ b/tests/modules/bbmap/bbduk/nextflow.config @@ -4,7 +4,7 @@ process { withName: BBMAP_BBDUK { ext.args = 'trimq=10 qtrim=r' - ext.suffix = '.trim' + ext.prefix = { "${meta.id}.trim" } } } diff --git a/tests/modules/bcftools/reheader/nextflow.config b/tests/modules/bcftools/reheader/nextflow.config index a377b26d..55d2cff8 100644 --- a/tests/modules/bcftools/reheader/nextflow.config +++ b/tests/modules/bcftools/reheader/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BCFTOOLS_REHEADER { - ext.suffix = '.updated' + ext.prefix = { "${meta.id}.updated" } } } diff --git a/tests/modules/bedtools/complement/nextflow.config b/tests/modules/bedtools/complement/nextflow.config index 561fdead..cb867120 100644 --- a/tests/modules/bedtools/complement/nextflow.config +++ b/tests/modules/bedtools/complement/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_COMPLEMENT { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/genomecov/nextflow.config b/tests/modules/bedtools/genomecov/nextflow.config index bc0e4aaf..6e1c03e2 100644 --- a/tests/modules/bedtools/genomecov/nextflow.config +++ b/tests/modules/bedtools/genomecov/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_GENOMECOV { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/intersect/nextflow.config b/tests/modules/bedtools/intersect/nextflow.config index c7d0c826..3aa2593f 100644 --- a/tests/modules/bedtools/intersect/nextflow.config +++ b/tests/modules/bedtools/intersect/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_INTERSECT { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/merge/nextflow.config b/tests/modules/bedtools/merge/nextflow.config index e7d635dd..545a523d 100644 --- a/tests/modules/bedtools/merge/nextflow.config +++ b/tests/modules/bedtools/merge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_MERGE { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/slop/nextflow.config b/tests/modules/bedtools/slop/nextflow.config index 5dc03727..09abb51a 100644 --- a/tests/modules/bedtools/slop/nextflow.config +++ b/tests/modules/bedtools/slop/nextflow.config @@ -4,7 +4,7 @@ process { withName: BEDTOOLS_SLOP { ext.args = '-l 15 -r 30' - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/bedtools/sort/nextflow.config b/tests/modules/bedtools/sort/nextflow.config index 6bb73232..2ecc295a 100644 --- a/tests/modules/bedtools/sort/nextflow.config +++ b/tests/modules/bedtools/sort/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: BEDTOOLS_SORT { - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } } diff --git a/tests/modules/diamond/blastp/nextflow.config b/tests/modules/diamond/blastp/nextflow.config index d1222d49..5a9aacad 100644 --- a/tests/modules/diamond/blastp/nextflow.config +++ b/tests/modules/diamond/blastp/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DIAMOND_BLASTP { - ext.suffix = '.diamond_blastp' + ext.prefix = { "${meta.id}.diamond_blastp" } } } diff --git a/tests/modules/diamond/blastx/nextflow.config b/tests/modules/diamond/blastx/nextflow.config index 83169455..25320af3 100644 --- a/tests/modules/diamond/blastx/nextflow.config +++ b/tests/modules/diamond/blastx/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DIAMOND_BLASTX { - ext.suffix = '.diamond_blastx' + ext.prefix = { "${meta.id}.diamond_blastx" } } } diff --git a/tests/modules/dshbio/filterbed/nextflow.config b/tests/modules/dshbio/filterbed/nextflow.config index 2f1e5ab9..3937a184 100644 --- a/tests/modules/dshbio/filterbed/nextflow.config +++ b/tests/modules/dshbio/filterbed/nextflow.config @@ -4,6 +4,6 @@ process { withName: DSHBIO_FILTERBED { ext.args = '--range chr1:0-1000' - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/dshbio/filtergff3/nextflow.config b/tests/modules/dshbio/filtergff3/nextflow.config index c4b75eaf..80dcd28c 100644 --- a/tests/modules/dshbio/filtergff3/nextflow.config +++ b/tests/modules/dshbio/filtergff3/nextflow.config @@ -4,7 +4,7 @@ process { withName: DSHBIO_FILTERGFF3 { ext.args = '--range MT192765.1:0-1000' - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/dshbio/splitbed/nextflow.config b/tests/modules/dshbio/splitbed/nextflow.config index 4369c509..ad9c045b 100644 --- a/tests/modules/dshbio/splitbed/nextflow.config +++ b/tests/modules/dshbio/splitbed/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DSHBIO_SPLITBED { - ext.suffix = '.' + ext.prefix = { "${meta.id}." } ext.args = '--records 2' } diff --git a/tests/modules/dshbio/splitgff3/nextflow.config b/tests/modules/dshbio/splitgff3/nextflow.config index e31f8e13..f6a0b921 100644 --- a/tests/modules/dshbio/splitgff3/nextflow.config +++ b/tests/modules/dshbio/splitgff3/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: DSHBIO_SPLITGFF3 { - ext.suffix = '.' + ext.prefix = { "${meta.id}." } ext.args = '--records 15' } diff --git a/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config index 0a266da9..e6721ff6 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config +++ b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config @@ -4,12 +4,12 @@ process { withName: FGBIO_SORTBAM { ext.args = '-s TemplateCoordinate' - ext.suffix = '_out' + ext.prefix = { "${meta.id}_out" } } withName: FGBIO_CALLMOLECULARCONSENSUSREADS { ext.args = '-M 1' - ext.suffix = '_molreads' + ext.prefix = { "${meta.id}_molreads" } } } diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config index 6fda39ec..d73e78ad 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config +++ b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_CREATESOMATICPANELOFNORMALS { - ext.suffix = '.pon' + ext.prefix = { "${meta.id}.pon" } } } diff --git a/tests/modules/gatk4/filtermutectcalls/nextflow.config b/tests/modules/gatk4/filtermutectcalls/nextflow.config index c830fdc6..3d4148d2 100644 --- a/tests/modules/gatk4/filtermutectcalls/nextflow.config +++ b/tests/modules/gatk4/filtermutectcalls/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_FILTERMUTECTCALLS { - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/gatk4/genotypegvcfs/nextflow.config b/tests/modules/gatk4/genotypegvcfs/nextflow.config index aaa704da..97396a74 100644 --- a/tests/modules/gatk4/genotypegvcfs/nextflow.config +++ b/tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_GENOTYPEGVCFS { - ext.suffix = '.genotyped' + ext.prefix = { "${meta.id}.genotyped" } } } diff --git a/tests/modules/gatk4/learnreadorientationmodel/nextflow.config b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config index 3a74623a..463e2d54 100644 --- a/tests/modules/gatk4/learnreadorientationmodel/nextflow.config +++ b/tests/modules/gatk4/learnreadorientationmodel/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GATK4_LEARNREADORIENTATIONMODEL { - ext.suffix = '.artifact-prior' + ext.prefix = { "${meta.id}.artifact-prior" } } } diff --git a/tests/modules/gatk4/variantfiltration/nextflow.config b/tests/modules/gatk4/variantfiltration/nextflow.config index ff2feb9c..4b930f28 100644 --- a/tests/modules/gatk4/variantfiltration/nextflow.config +++ b/tests/modules/gatk4/variantfiltration/nextflow.config @@ -4,7 +4,7 @@ process { withName: GATK4_VARIANTFILTRATION { ext.args = "--filter-name \'test_filter\' --filter-expression \'MQ0 > 0\'" - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/gffread/nextflow.config b/tests/modules/gffread/nextflow.config index 00c052f5..c020f934 100644 --- a/tests/modules/gffread/nextflow.config +++ b/tests/modules/gffread/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GFFREAD { - ext.suffix = '.out' + ext.prefix = { "${meta.id}.out" } } } diff --git a/tests/modules/gstama/collapse/nextflow.config b/tests/modules/gstama/collapse/nextflow.config index 0455c8b2..a68f33f2 100644 --- a/tests/modules/gstama/collapse/nextflow.config +++ b/tests/modules/gstama/collapse/nextflow.config @@ -4,7 +4,7 @@ process { withName: GSTAMA_COLLAPSE { ext.args = '-x capped -b BAM' - ext.suffix = '_tc' + ext.prefix = { "${meta.id}_tc" } } } diff --git a/tests/modules/gstama/merge/nextflow.config b/tests/modules/gstama/merge/nextflow.config index a9c63fcf..e0d7c8ef 100644 --- a/tests/modules/gstama/merge/nextflow.config +++ b/tests/modules/gstama/merge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GSTAMA_MERGE { - ext.suffix = '_merged' + ext.prefix = { "${meta.id}_merged" } } } diff --git a/tests/modules/isoseq3/refine/nextflow.config b/tests/modules/isoseq3/refine/nextflow.config index 88f1bdc4..6a4dea9f 100644 --- a/tests/modules/isoseq3/refine/nextflow.config +++ b/tests/modules/isoseq3/refine/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: ISOSEQ3_REFINE { - ext.suffix = '.refine' + ext.prefix = { "${meta.id}.refine" } } } diff --git a/tests/modules/last/postmask/nextflow.config b/tests/modules/last/postmask/nextflow.config index dc021264..70c3f35b 100644 --- a/tests/modules/last/postmask/nextflow.config +++ b/tests/modules/last/postmask/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: LAST_POSTMASK { - ext.suffix = '.postmask' + ext.prefix = { "${meta.id}.postmask" } } } diff --git a/tests/modules/last/split/nextflow.config b/tests/modules/last/split/nextflow.config index 8b31ca0f..6252ec14 100644 --- a/tests/modules/last/split/nextflow.config +++ b/tests/modules/last/split/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: LAST_SPLIT { - ext.suffix = '.split' + ext.prefix = { "${meta.id}.split" } } } diff --git a/tests/modules/lima/nextflow.config b/tests/modules/lima/nextflow.config index 5091b034..8da2613f 100644 --- a/tests/modules/lima/nextflow.config +++ b/tests/modules/lima/nextflow.config @@ -4,7 +4,7 @@ process { withName: LIMA { ext.args = '--isoseq --peek-guess' - ext.suffix = '.fl' + ext.prefix = { "${meta.id}.fl" } } } diff --git a/tests/modules/lofreq/indelqual/nextflow.config b/tests/modules/lofreq/indelqual/nextflow.config index b9ad2787..c50c1363 100644 --- a/tests/modules/lofreq/indelqual/nextflow.config +++ b/tests/modules/lofreq/indelqual/nextflow.config @@ -4,7 +4,7 @@ process { withName: LOFREQ_INDELQUAL { ext.args = '--dindel' - ext.suffix = '.indelqual' + ext.prefix = { "${meta.id}.indelqual" } } } diff --git a/tests/modules/medaka/nextflow.config b/tests/modules/medaka/nextflow.config index 1f89be62..c0b1b507 100644 --- a/tests/modules/medaka/nextflow.config +++ b/tests/modules/medaka/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: MEDAKA { - ext.suffix = '.polished.genome' + ext.prefix = { "${meta.id}.polished.genome" } } } diff --git a/tests/modules/metaphlan3/nextflow.config b/tests/modules/metaphlan3/nextflow.config index 2dde2212..a47b46e0 100644 --- a/tests/modules/metaphlan3/nextflow.config +++ b/tests/modules/metaphlan3/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SAMTOOLS_VIEW { - ext.suffix = '.sam' + ext.prefix = { "${meta.id}.sam" } } withName: METAPHLAN3 { diff --git a/tests/modules/miniasm/nextflow.config b/tests/modules/miniasm/nextflow.config index 844a0120..23f0a8d0 100644 --- a/tests/modules/miniasm/nextflow.config +++ b/tests/modules/miniasm/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: MINIASM { - ext.suffix = '.assembly' + ext.prefix = { "${meta.id}.assembly" } } } diff --git a/tests/modules/nanolyse/nextflow.config b/tests/modules/nanolyse/nextflow.config index ede080cc..5f7b5bed 100644 --- a/tests/modules/nanolyse/nextflow.config +++ b/tests/modules/nanolyse/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: NANOLYSE { - ext.suffix = '.clean' + ext.prefix = { "${meta.id}.clean" } } } diff --git a/tests/modules/pairtools/dedup/nextflow.config b/tests/modules/pairtools/dedup/nextflow.config index 1de3348f..b47fab16 100644 --- a/tests/modules/pairtools/dedup/nextflow.config +++ b/tests/modules/pairtools/dedup/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_DEDUP { - ext.suffix = '.dedup' + ext.prefix = { "${meta.id}.dedup" } } } diff --git a/tests/modules/pairtools/parse/nextflow.config b/tests/modules/pairtools/parse/nextflow.config index 1a1182f6..a5d3ef9d 100644 --- a/tests/modules/pairtools/parse/nextflow.config +++ b/tests/modules/pairtools/parse/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_PARSE { - ext.suffix = '.raw' + ext.prefix = { "${meta.id}.raw" } } } diff --git a/tests/modules/pairtools/restrict/nextflow.config b/tests/modules/pairtools/restrict/nextflow.config index 857d7534..fa8217bc 100644 --- a/tests/modules/pairtools/restrict/nextflow.config +++ b/tests/modules/pairtools/restrict/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_RESTRICT { - ext.suffix = '.restrict' + ext.prefix = { "${meta.id}.restrict" } } } diff --git a/tests/modules/pairtools/sort/nextflow.config b/tests/modules/pairtools/sort/nextflow.config index 86b3d802..dfaf6053 100644 --- a/tests/modules/pairtools/sort/nextflow.config +++ b/tests/modules/pairtools/sort/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PAIRTOOLS_SORT { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } } diff --git a/tests/modules/pbbam/pbmerge/nextflow.config b/tests/modules/pbbam/pbmerge/nextflow.config index c897068b..4fc270a9 100644 --- a/tests/modules/pbbam/pbmerge/nextflow.config +++ b/tests/modules/pbbam/pbmerge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PBBAM_PBMERGE { - ext.suffix = '.merged' + ext.prefix = { "${meta.id}.merged" } } } diff --git a/tests/modules/picard/filtersamreads/nextflow.config b/tests/modules/picard/filtersamreads/nextflow.config index e9ce4914..653e9633 100644 --- a/tests/modules/picard/filtersamreads/nextflow.config +++ b/tests/modules/picard/filtersamreads/nextflow.config @@ -3,11 +3,11 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PICARD_SORTSAM { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } withName: PICARD_FILTERSAMREADS { - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/picard/sortsam/nextflow.config b/tests/modules/picard/sortsam/nextflow.config index 2c290cbe..ca572c2f 100644 --- a/tests/modules/picard/sortsam/nextflow.config +++ b/tests/modules/picard/sortsam/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PICARD_SORTSAM { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } } diff --git a/tests/modules/plink/extract/nextflow.config b/tests/modules/plink/extract/nextflow.config index 12668b01..6a7f6d42 100644 --- a/tests/modules/plink/extract/nextflow.config +++ b/tests/modules/plink/extract/nextflow.config @@ -7,7 +7,7 @@ process { } withName: PLINK_EXTRACT { - ext.suffix = '.extract' + ext.prefix = { "${meta.id}.extract" } } } diff --git a/tests/modules/porechop/nextflow.config b/tests/modules/porechop/nextflow.config index 3a0536b0..85eb257a 100644 --- a/tests/modules/porechop/nextflow.config +++ b/tests/modules/porechop/nextflow.config @@ -4,7 +4,7 @@ process { withName: PORECHOP { ext.args = '' - ext.suffix = '_porechop' + ext.prefix = { "${meta.id}_porechop" } } } diff --git a/tests/modules/rasusa/nextflow.config b/tests/modules/rasusa/nextflow.config index fea844ae..50c32e5c 100644 --- a/tests/modules/rasusa/nextflow.config +++ b/tests/modules/rasusa/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: RASUSA { - ext.suffix = '_100X' + ext.prefix = { "${meta.id}_100X" } } } diff --git a/tests/modules/samblaster/nextflow.config b/tests/modules/samblaster/nextflow.config index 3018088b..7ba8b23b 100644 --- a/tests/modules/samblaster/nextflow.config +++ b/tests/modules/samblaster/nextflow.config @@ -4,7 +4,7 @@ process { withName: SAMBLASTER { ext.args = '-M --addMateTags' - ext.suffix = '.processed' + ext.prefix = { "${meta.id}.processed" } } } diff --git a/tests/modules/samtools/merge/nextflow.config b/tests/modules/samtools/merge/nextflow.config index cb350bf7..4ac70fa0 100644 --- a/tests/modules/samtools/merge/nextflow.config +++ b/tests/modules/samtools/merge/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SAMTOOLS_MERGE { - ext.suffix = '_merged' + ext.prefix = { "${meta.id}_merged" } } } diff --git a/tests/modules/samtools/sort/nextflow.config b/tests/modules/samtools/sort/nextflow.config index 57ae6280..230bec5f 100644 --- a/tests/modules/samtools/sort/nextflow.config +++ b/tests/modules/samtools/sort/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SAMTOOLS_SORT { - ext.suffix = '.sorted' + ext.prefix = { "${meta.id}.sorted" } } } diff --git a/tests/modules/seqtk/mergepe/nextflow.config b/tests/modules/seqtk/mergepe/nextflow.config index b14e72ff..04eeef72 100644 --- a/tests/modules/seqtk/mergepe/nextflow.config +++ b/tests/modules/seqtk/mergepe/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SEQTK_MERGEPE { - ext.suffix = '.processed' + ext.prefix = { "${meta.id}.processed" } } } diff --git a/tests/modules/seqtk/sample/nextflow.config b/tests/modules/seqtk/sample/nextflow.config index 3efac50d..a79ad290 100644 --- a/tests/modules/seqtk/sample/nextflow.config +++ b/tests/modules/seqtk/sample/nextflow.config @@ -4,7 +4,7 @@ process { withName: SEQTK_SAMPLE { ext.args = '-s100' - ext.suffix = '.sampled' + ext.prefix = { "${meta.id}.sampled" } } } diff --git a/tests/modules/seqtk/subseq/nextflow.config b/tests/modules/seqtk/subseq/nextflow.config index c61c4a74..24f16bad 100644 --- a/tests/modules/seqtk/subseq/nextflow.config +++ b/tests/modules/seqtk/subseq/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SEQTK_SUBSEQ { - ext.suffix = '.filtered' + ext.prefix = { "${meta.id}.filtered" } } } diff --git a/tests/modules/ucsc/bedclip/nextflow.config b/tests/modules/ucsc/bedclip/nextflow.config index 4adc3b8f..46af4b0a 100644 --- a/tests/modules/ucsc/bedclip/nextflow.config +++ b/tests/modules/ucsc/bedclip/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: UCSC_BEDCLIP { - ext.suffix = '.clip' + ext.prefix = { "${meta.id}.clip" } } } diff --git a/tests/modules/ultra/pipeline/nextflow.config b/tests/modules/ultra/pipeline/nextflow.config index a3b88ea3..16ed7f9b 100644 --- a/tests/modules/ultra/pipeline/nextflow.config +++ b/tests/modules/ultra/pipeline/nextflow.config @@ -4,7 +4,7 @@ process { withName: GFFREAD { ext.args = '--sort-alpha --keep-genes -T' - ext.suffix = '_sorted' + ext.prefix = { "${meta.id}_sorted" } } } From e2ba70ed9a1d2f09aa77b0744ea50b447c35f696 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 2 Dec 2021 08:27:20 -0600 Subject: [PATCH 078/101] Add Cell Ranger mkfastq, mkgtf, and count (#979) * feat(cellranger): Add initial count module Co-authored-by: Gisela Gabernet * feat(cellranger): Add mkgtf module * test(cellranger): Fix count test with mkgtf * fix(cellranger): Generalize gtf attribute filters * chore: Add .gitignore for cellranger tar * build(cellranger): Update dockerfile https://joshtronic.com/2021/09/12/fixed-repository-debian-security-buster-updates-changed-suite-from-stable-to-oldstable/ * Apply suggestions from code review Co-authored-by: Gisela Gabernet * Apply suggestions from code review Co-authored-by: Harshil Patel * Update modules/cellranger/mkgtf/main.nf Co-authored-by: Harshil Patel * style: Capitalize README * test(cellranger): Update pytest_modules * feat(cellranger): Add initial mkfastq module * ci: Update pytest modules * refactor(cellranger): Update modules to new syntax * docs(cellranger): Update meta files There is some terrible copy-pasting going on. * fix(cellranger): Add args Co-authored-by: Gisela Gabernet Co-authored-by: Harshil Patel --- modules/cellranger/.gitignore | 1 + modules/cellranger/Dockerfile | 2 +- modules/cellranger/{readme.md => README.md} | 0 modules/cellranger/count/main.nf | 49 +++++++++++++++ modules/cellranger/count/meta.yml | 40 +++++++++++++ modules/cellranger/mkfastq/main.nf | 31 ++++++++++ modules/cellranger/mkfastq/meta.yml | 38 ++++++++++++ modules/cellranger/mkgtf/main.nf | 31 ++++++++++ modules/cellranger/mkgtf/meta.yml | 31 ++++++++++ modules/cellranger/mkref/meta.yml | 60 +++++++++---------- tests/config/pytest_modules.yml | 18 +++++- tests/modules/cellranger/count/main.nf | 33 ++++++++++ .../modules/cellranger/count/nextflow.config | 31 ++++++++++ tests/modules/cellranger/count/test.yml | 19 ++++++ tests/modules/cellranger/mkfastq/main.nf | 26 ++++++++ .../cellranger/mkfastq/nextflow.config | 5 ++ tests/modules/cellranger/mkfastq/test.yml | 13 ++++ tests/modules/cellranger/mkgtf/main.nf | 11 ++++ .../modules/cellranger/mkgtf/nextflow.config | 27 +++++++++ tests/modules/cellranger/mkgtf/test.yml | 8 +++ 20 files changed, 441 insertions(+), 33 deletions(-) create mode 100644 modules/cellranger/.gitignore rename modules/cellranger/{readme.md => README.md} (100%) create mode 100644 modules/cellranger/count/main.nf create mode 100644 modules/cellranger/count/meta.yml create mode 100644 modules/cellranger/mkfastq/main.nf create mode 100644 modules/cellranger/mkfastq/meta.yml create mode 100644 modules/cellranger/mkgtf/main.nf create mode 100644 modules/cellranger/mkgtf/meta.yml create mode 100644 tests/modules/cellranger/count/main.nf create mode 100644 tests/modules/cellranger/count/nextflow.config create mode 100644 tests/modules/cellranger/count/test.yml create mode 100644 tests/modules/cellranger/mkfastq/main.nf create mode 100644 tests/modules/cellranger/mkfastq/nextflow.config create mode 100644 tests/modules/cellranger/mkfastq/test.yml create mode 100644 tests/modules/cellranger/mkgtf/main.nf create mode 100644 tests/modules/cellranger/mkgtf/nextflow.config create mode 100644 tests/modules/cellranger/mkgtf/test.yml diff --git a/modules/cellranger/.gitignore b/modules/cellranger/.gitignore new file mode 100644 index 00000000..9f8cb0f5 --- /dev/null +++ b/modules/cellranger/.gitignore @@ -0,0 +1 @@ +cellranger-*.tar.gz diff --git a/modules/cellranger/Dockerfile b/modules/cellranger/Dockerfile index aced4233..e9437bf6 100644 --- a/modules/cellranger/Dockerfile +++ b/modules/cellranger/Dockerfile @@ -4,7 +4,7 @@ LABEL authors="Gisela Gabernet " \ # Disclaimer: this container is not provided nor supported by 10x Genomics. # Install procps and clean apt cache -RUN apt-get update \ +RUN apt-get update --allow-releaseinfo-change \ && apt-get install -y procps \ && apt-get clean -y && rm -rf /var/lib/apt/lists/* diff --git a/modules/cellranger/readme.md b/modules/cellranger/README.md similarity index 100% rename from modules/cellranger/readme.md rename to modules/cellranger/README.md diff --git a/modules/cellranger/count/main.nf b/modules/cellranger/count/main.nf new file mode 100644 index 00000000..be3f512a --- /dev/null +++ b/modules/cellranger/count/main.nf @@ -0,0 +1,49 @@ +process CELLRANGER_COUNT { + tag "$meta.gem" + label 'process_high' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "nfcore/cellranger:6.0.2" + + input: + tuple val(meta), path(reads) + path reference + + output: + path("sample-${meta.gem}/outs/*"), emit: outs + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def sample_arg = meta.samples.unique().join(",") + def reference_name = reference.name + """ + cellranger \\ + count \\ + --id='sample-${meta.gem}' \\ + --fastqs=. \\ + --transcriptome=$reference_name \\ + --sample=$sample_arg \\ + --localcores=$task.cpus \\ + --localmem=${task.memory.toGiga()} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ + + stub: + """ + mkdir -p "sample-${meta.gem}/outs/" + touch sample-${meta.gem}/outs/fake_file.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/count/meta.yml b/modules/cellranger/count/meta.yml new file mode 100644 index 00000000..e4647c98 --- /dev/null +++ b/modules/cellranger/count/meta.yml @@ -0,0 +1,40 @@ +name: cellranger_count +description: Module to use Cell Ranger's pipelines analyze sequencing data produced from Chromium Single Cell Gene Expression. +keywords: + - align + - count + - reference +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - reference: + type: folder + description: Folder containing all the reference indices needed by Cell Ranger +output: + - outs: + type: file + description: Files containing the outputs of Cell Ranger + pattern: "sample-${meta.gem}/outs/*" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" diff --git a/modules/cellranger/mkfastq/main.nf b/modules/cellranger/mkfastq/main.nf new file mode 100644 index 00000000..14d68665 --- /dev/null +++ b/modules/cellranger/mkfastq/main.nf @@ -0,0 +1,31 @@ +process CELLRANGER_MKFASTQ { + tag "mkfastq" + label 'process_medium' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "litd/docker-cellranger:v6.1.1" // FIXME Add bcl2fastq to nf-core docker image + + input: + path bcl + path csv + + output: + path "versions.yml", emit: versions + path "*.fastq.gz" , emit: fastq + + script: + def args = task.ext.args ?: '' + """ + cellranger mkfastq --id=${bcl.getSimpleName()} \ + --run=$bcl \ + --csv=$csv + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/mkfastq/meta.yml b/modules/cellranger/mkfastq/meta.yml new file mode 100644 index 00000000..e288fb8c --- /dev/null +++ b/modules/cellranger/mkfastq/meta.yml @@ -0,0 +1,38 @@ +name: cellranger_mkfastq +description: Module to create fastqs needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkfastq command. +keywords: + - reference + - mkfastq + - fastq + - illumina + - bcl2fastq +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - bcl: + type: file + description: Base call files + pattern: "*.bcl.bgzf" + - csv: + type: file + description: Sample sheet + pattern: "*.csv" +output: + - fastq: + type: file + description: Unaligned FastQ files + pattern: "*.fastq.gz" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" + - "@RHReynolds" diff --git a/modules/cellranger/mkgtf/main.nf b/modules/cellranger/mkgtf/main.nf new file mode 100644 index 00000000..4db274d7 --- /dev/null +++ b/modules/cellranger/mkgtf/main.nf @@ -0,0 +1,31 @@ +process CELLRANGER_MKGTF { + tag "$gtf" + label 'process_low' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "nfcore/cellranger:6.0.2" + + input: + path gtf + + output: + path "*.filtered.gtf", emit: gtf + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + cellranger \\ + mkgtf \\ + $gtf \\ + ${gtf.baseName}.filtered.gtf \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/mkgtf/meta.yml b/modules/cellranger/mkgtf/meta.yml new file mode 100644 index 00000000..c160072f --- /dev/null +++ b/modules/cellranger/mkgtf/meta.yml @@ -0,0 +1,31 @@ +name: cellranger_mkgtf +description: Module to build a filtered gtf needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkgtf command. +keywords: + - reference + - mkref + - index +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - gtf: + type: file + description: + pattern: "*.gtf" +output: + - gtf: + type: folder + description: gtf transcriptome file + pattern: "*.filtered.gtf" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" diff --git a/modules/cellranger/mkref/meta.yml b/modules/cellranger/mkref/meta.yml index 9b849af7..06bf5b93 100644 --- a/modules/cellranger/mkref/meta.yml +++ b/modules/cellranger/mkref/meta.yml @@ -1,39 +1,37 @@ name: cellranger_mkref description: Module to build the reference needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkref command. keywords: - - reference - - mkref - - index + - reference + - mkref + - index tools: - - cellranger: - description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. - homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger - documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov - tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov - doi: "" - licence: 10x Genomics EULA - + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA input: - - fasta: - type: file - description: fasta genome file - pattern: "*.{fasta,fa}" - - gtf: - type: file - description: gtf transcriptome file - pattern: "*.gtf" - - reference_name: - type: val - description: name to give the reference folder - pattern: str - + - fasta: + type: file + description: fasta genome file + pattern: "*.{fasta,fa}" + - gtf: + type: file + description: gtf transcriptome file + pattern: "*.gtf" + - reference_name: + type: val + description: name to give the reference folder + pattern: str output: - - versions: - type: file - description: File containing software version - pattern: "versions.yml" - - reference: - type: folder - description: Folder containing all the reference indices needed by Cell Ranger + - reference: + type: folder + description: Folder containing all the reference indices needed by Cell Ranger + - versions: + type: file + description: File containing software version + pattern: "versions.yml" authors: - "@ggabernet" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index aa59b7c9..85689d8b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -270,9 +270,25 @@ cat/fastq: - modules/cat/fastq/** - tests/modules/cat/fastq/** -cellranger/mkref: +cellranger/gtf: # &cellranger/gtf + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/mkref: # &cellranger/mkref - modules/cellranger/mkref/** - tests/modules/cellranger/mkref/** + # - *cellranger/gtf + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/count: + - modules/cellranger/count/** + - tests/modules/cellranger/count/** + # - *cellranger/mkref + - modules/cellranger/mkref/** + - tests/modules/cellranger/mkref/** + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** checkm/lineagewf: - modules/checkm/lineagewf/** diff --git a/tests/modules/cellranger/count/main.nf b/tests/modules/cellranger/count/main.nf new file mode 100644 index 00000000..bb9e11d1 --- /dev/null +++ b/tests/modules/cellranger/count/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CELLRANGER_MKGTF } from '../../../../modules/cellranger/mkgtf/main.nf' +include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' +include { CELLRANGER_COUNT } from '../../../../modules/cellranger/count/main.nf' + +workflow test_cellranger_count { + + input = [ [ id:'test', single_end:true, strandedness:'forward', gem: '123', samples: ["test_10x"] ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_10x_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_10x_2_fastq_gz'], checkIfExists: true) + ] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + reference_name = "homo_sapiens_chr22_reference" + + CELLRANGER_MKGTF ( gtf ) + + CELLRANGER_MKREF ( + fasta, + CELLRANGER_MKGTF.out.gtf, + reference_name + ) + + CELLRANGER_COUNT( + input, + CELLRANGER_MKREF.out.reference + ) +} diff --git a/tests/modules/cellranger/count/nextflow.config b/tests/modules/cellranger/count/nextflow.config new file mode 100644 index 00000000..16419fce --- /dev/null +++ b/tests/modules/cellranger/count/nextflow.config @@ -0,0 +1,31 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CELLRANGER_MKGTF { + ext.args = '--attribute=gene_biotype:protein_coding \ + --attribute=gene_biotype:lincRNA \ + --attribute=gene_biotype:antisense \ + --attribute=gene_biotype:IG_LV_gene \ + --attribute=gene_biotype:IG_V_gene \ + --attribute=gene_biotype:IG_V_pseudogene \ + --attribute=gene_biotype:IG_D_gene \ + --attribute=gene_biotype:IG_J_gene \ + --attribute=gene_biotype:IG_J_pseudogene \ + --attribute=gene_biotype:IG_C_gene \ + --attribute=gene_biotype:IG_C_pseudogene \ + --attribute=gene_biotype:TR_V_gene \ + --attribute=gene_biotype:TR_V_pseudogene \ + --attribute=gene_biotype:TR_D_gene \ + --attribute=gene_biotype:TR_J_gene \ + --attribute=gene_biotype:TR_J_pseudogene \ + --attribute=gene_biotype:TR_C_gene' + + + } + + withName: CELLRANGER_COUNT { + ext.args = '--chemistry SC3Pv3' + } + +} diff --git a/tests/modules/cellranger/count/test.yml b/tests/modules/cellranger/count/test.yml new file mode 100644 index 00000000..6b151a2a --- /dev/null +++ b/tests/modules/cellranger/count/test.yml @@ -0,0 +1,19 @@ +- name: cellranger count test_cellranger_count + command: nextflow run tests/modules/cellranger/count -entry test_cellranger_count -c tests/config/nextflow.config -c tests/modules/cellranger/count/nextflow.config + tags: + - cellranger + - cellranger/count + files: + - path: output/cellranger/sample-123/outs/filtered_feature_bc_matrix.h5 + - path: output/cellranger/sample-123/outs/metrics_summary.csv + md5sum: 707df0f101d479d93f412ca74f9c4131 + - path: output/cellranger/sample-123/outs/molecule_info.h5 + md5sum: cf03b2b3ca776a1c37aa3518e91268ba + - path: output/cellranger/sample-123/outs/possorted_genome_bam.bam + md5sum: 15441da9cfceea0bb48c8b66b1b860df + - path: output/cellranger/sample-123/outs/possorted_genome_bam.bam.bai + md5sum: 7c3d49c77016a09535aff61a027f750c + - path: output/cellranger/sample-123/outs/raw_feature_bc_matrix + - path: output/cellranger/sample-123/outs/raw_feature_bc_matrix.h5 + md5sum: 40c8df814eb8723b7317b234dc8222e9 + - path: output/cellranger/sample-123/outs/web_summary.html diff --git a/tests/modules/cellranger/mkfastq/main.nf b/tests/modules/cellranger/mkfastq/main.nf new file mode 100644 index 00000000..5e594fd1 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/main.nf @@ -0,0 +1,26 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' +include { CELLRANGER_MKFASTQ } from '../../../../modules/cellranger/mkfastq/main.nf' + +workflow test_cellranger_mkfastq_simple { + + simple_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-simple-1.2.0.csv", checkIfExists: true) + tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + + UNTAR ( tiny_bcl ) + + CELLRANGER_MKFASTQ ( UNTAR.out.untar, simple_csv) +} + +workflow test_cellranger_mkfastq_illumina { + + samplesheet_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-samplesheet-1.2.0.csv", checkIfExists: true) + tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + + UNTAR ( tiny_bcl ) + + CELLRANGER_MKFASTQ ( UNTAR.out.untar, samplesheet_csv) +} diff --git a/tests/modules/cellranger/mkfastq/nextflow.config b/tests/modules/cellranger/mkfastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cellranger/mkfastq/test.yml b/tests/modules/cellranger/mkfastq/test.yml new file mode 100644 index 00000000..bdd32187 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/test.yml @@ -0,0 +1,13 @@ +- name: cellranger mkfastq test_cellranger_mkfastq_simple + command: nextflow run tests/modules/cellranger/mkfastq -entry test_cellranger_mkfastq_simple -c tests/config/nextflow.config -c ./tests/modules/cellranger/mkfastq/nextflow.config + tags: + - cellranger + - cellranger/mkfastq + # files: + # - path: output/cellranger/genome.filtered.gtf + # md5sum: a8b8a7b5039e05d3a9cf9151ea138b5b +- name: cellranger mkfastq test_cellranger_mkfastq_illumina + command: nextflow run tests/modules/cellranger/mkfastq -entry test_cellranger_mkfastq_illumina -c tests/config/nextflow.config -c ./tests/modules/cellranger/mkfastq/nextflow.config + tags: + - cellranger + - cellranger/mkfastq diff --git a/tests/modules/cellranger/mkgtf/main.nf b/tests/modules/cellranger/mkgtf/main.nf new file mode 100644 index 00000000..19e2cba0 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/main.nf @@ -0,0 +1,11 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CELLRANGER_MKGTF } from '../../../../modules/cellranger/mkgtf/main.nf' + +workflow test_cellranger_mkgtf { + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + CELLRANGER_MKGTF ( gtf ) +} diff --git a/tests/modules/cellranger/mkgtf/nextflow.config b/tests/modules/cellranger/mkgtf/nextflow.config new file mode 100644 index 00000000..03fd9e09 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/nextflow.config @@ -0,0 +1,27 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CELLRANGER_MKGTF { + ext.args = '--attribute=gene_biotype:protein_coding \ + --attribute=gene_biotype:lincRNA \ + --attribute=gene_biotype:antisense \ + --attribute=gene_biotype:IG_LV_gene \ + --attribute=gene_biotype:IG_V_gene \ + --attribute=gene_biotype:IG_V_pseudogene \ + --attribute=gene_biotype:IG_D_gene \ + --attribute=gene_biotype:IG_J_gene \ + --attribute=gene_biotype:IG_J_pseudogene \ + --attribute=gene_biotype:IG_C_gene \ + --attribute=gene_biotype:IG_C_pseudogene \ + --attribute=gene_biotype:TR_V_gene \ + --attribute=gene_biotype:TR_V_pseudogene \ + --attribute=gene_biotype:TR_D_gene \ + --attribute=gene_biotype:TR_J_gene \ + --attribute=gene_biotype:TR_J_pseudogene \ + --attribute=gene_biotype:TR_C_gene' + + + } + +} diff --git a/tests/modules/cellranger/mkgtf/test.yml b/tests/modules/cellranger/mkgtf/test.yml new file mode 100644 index 00000000..2130afd2 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/test.yml @@ -0,0 +1,8 @@ +- name: cellranger mkgtf test_cellranger_mkgtf + command: nextflow run tests/modules/cellranger/mkgtf -entry test_cellranger_mkgtf -c tests/config/nextflow.config -c tests/modules/cellranger/mkgtf/nextflow.config + tags: + - cellranger + - cellranger/mkgtf + files: + - path: output/cellranger/genome.filtered.gtf + md5sum: a8b8a7b5039e05d3a9cf9151ea138b5b From cd94731789aa516631e5ea11e0f49469f2ba82dd Mon Sep 17 00:00:00 2001 From: tamuanand Date: Sun, 5 Dec 2021 16:45:09 -0500 Subject: [PATCH 079/101] Update meta.yml by fixing html pattern expected (#1113) Fixed html pattern typo: Before: pattern: "*.thml" After fix: pattern: "*.html" --- modules/fastp/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/fastp/meta.yml b/modules/fastp/meta.yml index 6e133871..a1875faf 100644 --- a/modules/fastp/meta.yml +++ b/modules/fastp/meta.yml @@ -40,7 +40,7 @@ output: - html: type: file description: Results in HTML format - pattern: "*.thml" + pattern: "*.html" - log: type: file description: fastq log file From f3ffa69b8dcde337dce1782c15118323c7ea14d7 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Mon, 6 Dec 2021 08:56:41 +0000 Subject: [PATCH 080/101] Dragmap (#1108) * feat(dragmap): Add initial hastable module * feat(dragmap): Add initial align module * test(dragmap): Remove md5sum Forgot sam files have a header. Might pipe this through samtools. * build(dragmap): Add mulled container * chore(dragmap): Update prefix * feat(dragmap): Output a bam file * feat(dragmap): Add log files * Update modules/dragmap/align/meta.yml Co-authored-by: Jose Espinosa-Carrasco --- modules/dragmap/align/main.nf | 59 +++++++++++++++++++ modules/dragmap/align/meta.yml | 42 +++++++++++++ modules/dragmap/hashtable/main.nf | 33 +++++++++++ modules/dragmap/hashtable/meta.yml | 30 ++++++++++ tests/config/pytest_modules.yml | 8 +++ tests/modules/dragmap/align/main.nf | 33 +++++++++++ tests/modules/dragmap/align/nextflow.config | 5 ++ tests/modules/dragmap/align/test.yml | 17 ++++++ tests/modules/dragmap/hashtable/main.nf | 15 +++++ .../modules/dragmap/hashtable/nextflow.config | 5 ++ tests/modules/dragmap/hashtable/test.yml | 19 ++++++ 11 files changed, 266 insertions(+) create mode 100644 modules/dragmap/align/main.nf create mode 100644 modules/dragmap/align/meta.yml create mode 100644 modules/dragmap/hashtable/main.nf create mode 100644 modules/dragmap/hashtable/meta.yml create mode 100644 tests/modules/dragmap/align/main.nf create mode 100644 tests/modules/dragmap/align/nextflow.config create mode 100644 tests/modules/dragmap/align/test.yml create mode 100644 tests/modules/dragmap/hashtable/main.nf create mode 100644 tests/modules/dragmap/hashtable/nextflow.config create mode 100644 tests/modules/dragmap/hashtable/test.yml diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf new file mode 100644 index 00000000..f6d6877e --- /dev/null +++ b/modules/dragmap/align/main.nf @@ -0,0 +1,59 @@ +process DRAGMAP_ALIGN { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::dragmap=1.2.1 bioconda::samtools=1.14 conda-forge::pigz=2.3.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0': + 'quay.io/biocontainers/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0' }" + + input: + tuple val(meta), path(reads) + path hashmap + + output: + tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path('*.log'), emit: log + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if (meta.single_end) { + """ + dragen-os \\ + -r $hashmap \\ + -1 $reads \\ + --num-threads $task.cpus \\ + $args \\ + 2> ${prefix}.dragmap.log \\ + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ + } else { + """ + dragen-os \\ + -r $hashmap \\ + -1 ${reads[0]} \\ + -2 ${reads[1]} \\ + --num-threads $task.cpus \\ + $args \\ + 2> ${prefix}.dragmap.log \\ + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ + } +} diff --git a/modules/dragmap/align/meta.yml b/modules/dragmap/align/meta.yml new file mode 100644 index 00000000..e943ccf8 --- /dev/null +++ b/modules/dragmap/align/meta.yml @@ -0,0 +1,42 @@ +name: dragmap_align +description: Performs fastq alignment to a reference using DRAGMAP +keywords: + - alignment + - map + - fastq + - bam + - sam +tools: + - dragmap: + description: Dragmap is the Dragen mapper/aligner Open Source Software. + homepage: https://github.com/Illumina/dragmap + documentation: https://github.com/Illumina/dragmap + tool_dev_url: https://github.com/Illumina/dragmap#basic-command-line-usage + doi: "" + licence: ['GPL v3'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - hashmap: + type: file + description: DRAGMAP hash table + pattern: "Directory containing DRAGMAP hash table *.{cmp,.bin,.txt}" +output: + - bam: + type: file + description: Output BAM file containing read alignments + pattern: "*.{bam}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@Emiller88" diff --git a/modules/dragmap/hashtable/main.nf b/modules/dragmap/hashtable/main.nf new file mode 100644 index 00000000..ab55364b --- /dev/null +++ b/modules/dragmap/hashtable/main.nf @@ -0,0 +1,33 @@ +process DRAGMAP_HASHTABLE { + tag "$fasta" + label 'process_high' + + conda (params.enable_conda ? "bioconda::dragmap=1.2.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dragmap:1.2.1--hd4ca14e_0': + 'quay.io/biocontainers/dragmap:1.2.1--hd4ca14e_0' }" + + input: + path fasta + + output: + path "dragmap" , emit: hashmap + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + mkdir dragmap + dragen-os \\ + --build-hash-table true \\ + --ht-reference $fasta \\ + --output-directory dragmap \\ + $args \\ + --ht-num-threads $task.cpus + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + END_VERSIONS + """ +} diff --git a/modules/dragmap/hashtable/meta.yml b/modules/dragmap/hashtable/meta.yml new file mode 100644 index 00000000..86e58789 --- /dev/null +++ b/modules/dragmap/hashtable/meta.yml @@ -0,0 +1,30 @@ +name: dragmap_hashtable +description: Create DRAGEN hashtable for reference genome +keywords: + - index + - fasta + - genome + - reference +tools: + - dragmap: + description: Dragmap is the Dragen mapper/aligner Open Source Software. + homepage: https://github.com/Illumina/dragmap + documentation: https://github.com/Illumina/dragmap + tool_dev_url: https://github.com/Illumina/dragmap#basic-command-line-usage + doi: "" + licence: ['GPL v3'] +input: + - fasta: + type: file + description: Input genome fasta file +output: + - hashmap: + type: file + description: DRAGMAP hash table + pattern: "*.{cmp,.bin,.txt}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@Emiller88" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 85689d8b..bbe89840 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -402,6 +402,14 @@ diamond/makedb: - modules/diamond/makedb/** - tests/modules/diamond/makedb/** +dragmap/align: + - modules/dragmap/align/** + - tests/modules/dragmap/align/** + +dragmap/hashtable: + - modules/dragmap/hashtable/** + - tests/modules/dragmap/hashtable/** + dragonflye: - modules/dragonflye/** - tests/modules/dragonflye/** diff --git a/tests/modules/dragmap/align/main.nf b/tests/modules/dragmap/align/main.nf new file mode 100644 index 00000000..92e8c265 --- /dev/null +++ b/tests/modules/dragmap/align/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' +include { DRAGMAP_ALIGN } from '../../../../modules/dragmap/align/main.nf' + +workflow test_dragmap_align_single_end { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) +} + +workflow test_dragmap_align_paired_end { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) +} diff --git a/tests/modules/dragmap/align/nextflow.config b/tests/modules/dragmap/align/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/dragmap/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/dragmap/align/test.yml b/tests/modules/dragmap/align/test.yml new file mode 100644 index 00000000..75c5ea96 --- /dev/null +++ b/tests/modules/dragmap/align/test.yml @@ -0,0 +1,17 @@ +- name: dragmap align single-end + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log + +- name: dragmap align paired-end + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log diff --git a/tests/modules/dragmap/hashtable/main.nf b/tests/modules/dragmap/hashtable/main.nf new file mode 100644 index 00000000..91b43caa --- /dev/null +++ b/tests/modules/dragmap/hashtable/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' + +workflow test_dragmap_hashtable { + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) +} + +// TODO Add test using alt-masked bed file +// https://github.com/Illumina/dragmap#build-hash-table-using-an-alt-masked-bed-file diff --git a/tests/modules/dragmap/hashtable/nextflow.config b/tests/modules/dragmap/hashtable/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/dragmap/hashtable/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/dragmap/hashtable/test.yml b/tests/modules/dragmap/hashtable/test.yml new file mode 100644 index 00000000..59a3ed55 --- /dev/null +++ b/tests/modules/dragmap/hashtable/test.yml @@ -0,0 +1,19 @@ +- name: dragmap hashtable + command: nextflow run ./tests/modules/dragmap/hashtable -entry test_dragmap_hashtable -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/hashtable/nextflow.config + tags: + - dragmap + - dragmap/hashtable + files: + - path: output/dragmap/dragmap/hash_table.cfg + - path: output/dragmap/dragmap/hash_table.cfg.bin + - path: output/dragmap/dragmap/hash_table.cmp + md5sum: bc210e5358fd65656f9aea297b59ec7d + - path: output/dragmap/dragmap/hash_table_stats.txt + - path: output/dragmap/dragmap/reference.bin + md5sum: b6b5c12a42416b990cd2844de8f33c5d + - path: output/dragmap/dragmap/ref_index.bin + md5sum: 8470be9566ecee77eb4aea6a38922a66 + - path: output/dragmap/dragmap/repeat_mask.bin + md5sum: 2439259a2fd32a1d0f4c53d585f3da3a + - path: output/dragmap/dragmap/str_table.bin + md5sum: 302e2b30993973527e69c6bcd1f093d0 From e0aa89141ffecb5f54d230f7ea46de242b74e084 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Mon, 6 Dec 2021 11:37:04 +0100 Subject: [PATCH 081/101] Add meta information to samtools/faidx (#1114) * add meta to samtools/faidx --- modules/samtools/faidx/main.nf | 6 +++--- modules/samtools/faidx/meta.yml | 10 ++++++++++ tests/modules/samtools/faidx/main.nf | 6 ++++-- tests/modules/samtools/faidx/test.yml | 6 ++++-- 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index c53373a9..d8308b03 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -8,11 +8,11 @@ process SAMTOOLS_FAIDX { 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: - path fasta + tuple val(meta), path(fasta) output: - path "*.fai" , emit: fai - path "versions.yml", emit: versions + tuple val(meta), path ("*.fai") , emit: fai + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' diff --git a/modules/samtools/faidx/meta.yml b/modules/samtools/faidx/meta.yml index 16c0b334..bae97a39 100644 --- a/modules/samtools/faidx/meta.yml +++ b/modules/samtools/faidx/meta.yml @@ -14,11 +14,21 @@ tools: doi: 10.1093/bioinformatics/btp352 licence: ['MIT'] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fasta: type: file description: FASTA file pattern: "*.{fa,fasta}" output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fai: type: file description: FASTA index file diff --git a/tests/modules/samtools/faidx/main.nf b/tests/modules/samtools/faidx/main.nf index bc47c847..bc4dc5e3 100644 --- a/tests/modules/samtools/faidx/main.nf +++ b/tests/modules/samtools/faidx/main.nf @@ -5,7 +5,9 @@ nextflow.enable.dsl = 2 include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' workflow test_samtools_faidx { - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - SAMTOOLS_FAIDX ( fasta ) + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + SAMTOOLS_FAIDX ( input ) } diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index f0224f34..dc2184ee 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -1,8 +1,10 @@ -- name: samtools faidx test workflow - command: nextflow run ./tests/modules/samtools/faidx -entry test_samtools_faidx -c ./tests/config/nextflow.config -c ./tests/modules/samtools/faidx/nextflow.config +- name: samtools faidx test_samtools_faidx + command: nextflow run tests/modules/samtools/faidx -entry test_samtools_faidx -c tests/config/nextflow.config tags: - samtools - samtools/faidx files: - path: output/samtools/genome.fasta.fai md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 + - path: output/samtools/versions.yml + md5sum: d56671a7c8f8058944d3d536c3058f7f From 98b024c0e46ef0ea994cd2cba408f0043e7e4dcf Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Mon, 6 Dec 2021 12:59:49 +0100 Subject: [PATCH 082/101] Fix syntax for extra containers for ensemblvep and snpeff (#1105) * fix: correct syntax for task.ext.use_cache * Apply suggestions from code review * fix: simplify logic * fix: update to new syntax --- modules/ensemblvep/main.nf | 14 ++++++-------- modules/snpeff/main.nf | 14 ++++++-------- tests/modules/ensemblvep/nextflow.config | 3 +-- tests/modules/snpeff/nextflow.config | 3 +-- 4 files changed, 14 insertions(+), 20 deletions(-) diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 3182feb2..9caffb0c 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -2,13 +2,11 @@ process ENSEMBLVEP { label 'process_medium' conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - if (task.ext.use_cache) { - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + !task.ext.container_tag ? 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : - 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" - } else { - container "nfcore/vep:${task.ext.vep_tag}" - } + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' : + "nfcore/vep:${task.ext.container_tag}" }" input: tuple val(meta), path(vcf) @@ -25,7 +23,7 @@ process ENSEMBLVEP { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def dir_cache = task.ext.use_cache ? "\${PWD}/${cache}" : "/.vep" + def dir_cache = cache ? "\${PWD}/${cache}" : "/.vep" """ mkdir $prefix @@ -39,7 +37,7 @@ process ENSEMBLVEP { --cache_version $cache_version \\ --dir_cache $dir_cache \\ --fork $task.cpus \\ - --format vcf \\ + --vcf \\ --stats_file ${prefix}.summary.html rm -rf $prefix diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index d0ec993e..db9cca72 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -2,13 +2,11 @@ process SNPEFF { label 'process_medium' conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - if (task.ext.use_cache) { - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + !task.ext.container_tag ? 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : - 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" - } else { - container "nfcore/snpeff:${task.ext.snpeff_tag}" - } + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' : + "nfcore/snpeff:${task.ext.container_tag}" }" input: tuple val(meta), path(vcf) @@ -29,14 +27,14 @@ process SNPEFF { avail_mem = task.memory.giga } def prefix = task.ext.prefix ?: "${meta.id}" - def dir_cache = task.ext.use_cache ? "-dataDir \${PWD}/${cache}" : "" + def cache_command = cache ? "-dataDir \${PWD}/${cache}" : "" """ snpEff \\ -Xmx${avail_mem}g \\ $db \\ $args \\ -csvStats ${prefix}.csv \\ - $dir_cache \\ + $cache_command \\ $vcf \\ > ${prefix}.ann.vcf diff --git a/tests/modules/ensemblvep/nextflow.config b/tests/modules/ensemblvep/nextflow.config index bcca2d06..717fcae9 100644 --- a/tests/modules/ensemblvep/nextflow.config +++ b/tests/modules/ensemblvep/nextflow.config @@ -3,8 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: ENSEMBLVEP { - ext.vep_tag = '104.3.WBcel235' - ext.use_cache = false + ext.container_tag = '104.3.WBcel235' } } diff --git a/tests/modules/snpeff/nextflow.config b/tests/modules/snpeff/nextflow.config index 589c8cfb..3b094eed 100644 --- a/tests/modules/snpeff/nextflow.config +++ b/tests/modules/snpeff/nextflow.config @@ -3,8 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SNPEFF { - ext.snpeff_tag = '5.0.WBcel235' - ext.use_cache = false + ext.container_tag = '5.0.WBcel235' } } From e22966ce74340cb671576143e5fdbbd71670cffa Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 10:12:58 +0100 Subject: [PATCH 083/101] feat: emited channel should be gz_tbi and not only tbi (#1118) --- modules/tabix/bgziptabix/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 20b47a9f..e419d153 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -11,7 +11,7 @@ process TABIX_BGZIPTABIX { tuple val(meta), path(input) output: - tuple val(meta), path("*.gz"), path("*.tbi"), emit: tbi + tuple val(meta), path("*.gz"), path("*.tbi"), emit: gz_tbi path "versions.yml" , emit: versions script: From 7006699ff8e4351e4c95d548de959d4222c7862a Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Tue, 7 Dec 2021 10:22:32 +0100 Subject: [PATCH 084/101] Update version & prefix (#1120) * Update version & prefix * Fix indentation --- modules/seqkit/split2/main.nf | 25 ++++----- tests/modules/seqkit/split2/test.yml | 78 ++++++++++++++++------------ 2 files changed, 58 insertions(+), 45 deletions(-) diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index fc027793..7e361a06 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -2,29 +2,30 @@ process SEQKIT_SPLIT2 { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::seqkit=0.16.1' : null) + conda (params.enable_conda ? 'bioconda::seqkit=2.1.0' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0' : - 'quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0' }" + 'https://depot.galaxyproject.org/singularity/seqkit:2.1.0--h9ee0642_0' : + 'quay.io/biocontainers/seqkit:2.1.0--h9ee0642_0' }" input: tuple val(meta), path(reads) output: - tuple val(meta), path("*${prefix}/*.gz"), emit: reads - path "versions.yml" , emit: versions + tuple val(meta), path("**/*.gz"), emit: reads + path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if(meta.single_end){ """ seqkit \\ split2 \\ $args \\ --threads $task.cpus \\ - -1 $reads \\ - --out-dir $prefix + $reads \\ + --out-dir ${prefix} cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -37,9 +38,9 @@ process SEQKIT_SPLIT2 { split2 \\ $args \\ --threads $task.cpus \\ - -1 ${reads[0]} \\ - -2 ${reads[1]} \\ - --out-dir $prefix + --read1 ${reads[0]} \\ + --read2 ${reads[1]} \\ + --out-dir ${prefix} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/seqkit/split2/test.yml b/tests/modules/seqkit/split2/test.yml index 12b02072..00368e22 100644 --- a/tests/modules/seqkit/split2/test.yml +++ b/tests/modules/seqkit/split2/test.yml @@ -1,83 +1,95 @@ -- name: seqkit split2 single-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_length + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 + md5sum: 7f489b2374c5fcc155a60ce2365a7bb7 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: cf38c51506e45380fe25abdd1bd5ccc6 + md5sum: 45cccacb4676bca33beb17064322a781 + - path: output/seqkit/versions.yml + md5sum: 2d5a709d129be364687cc0b561efa532 -- name: seqkit split2 single-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_size + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: bf835e685d597fc1ab5e5ac7dd689619 + md5sum: b09324606fb3636b51448d6a007d2c71 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 + md5sum: f7873475d463e3b4d21dccbf8e859270 + - path: output/seqkit/versions.yml + md5sum: 490d00accd1092a8eca4e83ed809bad3 -- name: seqkit split2 single-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_part + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: fa25951435471238d5567fd2cae31f55 + md5sum: a9d29d08e27246b6d36e21e5def405e3 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a + md5sum: 6d547a959adcd027dd1a8734e195dd7d - path: output/seqkit/test/test_1.part_003.fastq.gz - md5sum: 8bc86ba83a611c54f592f4eae19b680f + md5sum: 6d63cc8400dd2a96d808514fb18278ee + - path: output/seqkit/versions.yml + md5sum: 90431cd3d28954f656988230d4481115 -- name: seqkit split2 paired-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_length + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 + md5sum: 7f489b2374c5fcc155a60ce2365a7bb7 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: cf38c51506e45380fe25abdd1bd5ccc6 + md5sum: 45cccacb4676bca33beb17064322a781 - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: 6b094b1ba7c439fe44c1bb5e99a02ba4 + md5sum: 160b5fd363ff7cad8af9d914269d6426 - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 927097c6ac7522199a9e016333181a8e + md5sum: 18bc5434cf55706394cccb44e6108561 + - path: output/seqkit/versions.yml + md5sum: 9272afc1a126ae997a712edeef317f22 -- name: seqkit split2 paired-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_size + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: bf835e685d597fc1ab5e5ac7dd689619 + md5sum: b09324606fb3636b51448d6a007d2c71 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 + md5sum: f7873475d463e3b4d21dccbf8e859270 - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: 09d0dd83b5b1b9b95d316eeed79ea5ba + md5sum: c0602b62aae860dd284c0eb0062c24dd - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 8796c3f327b1094244bfcdb36d536526 + md5sum: 5bc7a98b618100b29910eb41c4c9ac0d + - path: output/seqkit/versions.yml + md5sum: af66912ae8abc493f77f70e3bf473144 -- name: seqkit split2 paired-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c ./tests/config/nextflow.config -c ./tests/modules/seqkit/split2/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_part + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: fa25951435471238d5567fd2cae31f55 + md5sum: a9d29d08e27246b6d36e21e5def405e3 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a + md5sum: 6d547a959adcd027dd1a8734e195dd7d - path: output/seqkit/test/test_1.part_003.fastq.gz - md5sum: 8bc86ba83a611c54f592f4eae19b680f + md5sum: 6d63cc8400dd2a96d808514fb18278ee - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: f0055c99cd193fd97466b3cde9dd1b8f + md5sum: b51a1bed106e4ec0c9be7d9e224d0616 - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 8a90df768201785f7a7cd5dbb41e846a + md5sum: 079078a7f86114ae29cda8c00d5a7fc9 - path: output/seqkit/test/test_2.part_003.fastq.gz - md5sum: 890b90083e8e1606bd13ba34149cedd7 + md5sum: 6987941bf8c4a37565e333029ba41ca0 + - path: output/seqkit/versions.yml + md5sum: 193bc5f0c429076f816ab0a529c4c1fc From 6510a7ff4f339625ebb89055bd39b9063797bb63 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 10:30:35 +0100 Subject: [PATCH 085/101] feat: add meta.id tag (#1116) * feat: add meat.id tag * fix: actually call the right container for singularity --- modules/ensemblvep/main.nf | 9 +++++---- modules/snpeff/main.nf | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 9caffb0c..78f2712c 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -1,12 +1,13 @@ process ENSEMBLVEP { + tag "$meta.id" label 'process_medium' conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - !task.ext.container_tag ? + container "${ task.ext.container_tag ? + "nfcore/vep:${task.ext.container_tag}" : + workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : - 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' : - "nfcore/vep:${task.ext.container_tag}" }" + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" input: tuple val(meta), path(vcf) diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index db9cca72..9847c513 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -1,12 +1,13 @@ process SNPEFF { + tag "$meta.id" label 'process_medium' conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - !task.ext.container_tag ? + container "${ task.ext.container_tag ? + "nfcore/snpeff:${task.ext.container_tag}" : + workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : - 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' : - "nfcore/snpeff:${task.ext.container_tag}" }" + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" input: tuple val(meta), path(vcf) From 3b366c7c6aac446c1a4ea7c2016092344633b2ec Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 11:18:12 +0100 Subject: [PATCH 086/101] greatly simplify syntax (#1121) --- modules/ensemblvep/main.nf | 8 +++----- modules/snpeff/main.nf | 8 +++----- tests/modules/ensemblvep/nextflow.config | 2 +- tests/modules/snpeff/nextflow.config | 2 +- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index 78f2712c..e3d0c286 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -3,11 +3,9 @@ process ENSEMBLVEP { label 'process_medium' conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - container "${ task.ext.container_tag ? - "nfcore/vep:${task.ext.container_tag}" : - workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : - 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" input: tuple val(meta), path(vcf) diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 9847c513..6248fee3 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -3,11 +3,9 @@ process SNPEFF { label 'process_medium' conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - container "${ task.ext.container_tag ? - "nfcore/snpeff:${task.ext.container_tag}" : - workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : - 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" input: tuple val(meta), path(vcf) diff --git a/tests/modules/ensemblvep/nextflow.config b/tests/modules/ensemblvep/nextflow.config index 717fcae9..f13d62e9 100644 --- a/tests/modules/ensemblvep/nextflow.config +++ b/tests/modules/ensemblvep/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: ENSEMBLVEP { - ext.container_tag = '104.3.WBcel235' + container = 'nfcore/vep:104.3.WBcel235' } } diff --git a/tests/modules/snpeff/nextflow.config b/tests/modules/snpeff/nextflow.config index 3b094eed..f4042ab9 100644 --- a/tests/modules/snpeff/nextflow.config +++ b/tests/modules/snpeff/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SNPEFF { - ext.container_tag = '5.0.WBcel235' + container = 'nfcore/snpeff:5.0.WBcel235' } } From d473a247d2e0c619b0df877ea19d9a5a98c8e3c8 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Tue, 7 Dec 2021 15:00:43 +0100 Subject: [PATCH 087/101] Replace remaining task.ext.suffix with task.ext.prefix (#1117) * Replace remaining task.ext.suffix with task.ext.prefix --- modules/artic/minion/main.nf | 4 ++-- modules/bakta/main.nf | 6 +++--- modules/bcftools/concat/main.nf | 4 ++-- modules/bcftools/isec/main.nf | 4 ++-- modules/bcftools/merge/main.nf | 4 ++-- modules/bedtools/getfasta/main.nf | 4 ++-- modules/checkm/lineagewf/main.nf | 4 ++-- modules/csvtk/concat/main.nf | 4 ++-- modules/damageprofiler/main.nf | 4 ++-- modules/dedup/main.nf | 4 ++-- modules/fargene/main.nf | 4 ++-- modules/gatk4/genomicsdbimport/main.nf | 4 ++-- modules/gffread/main.nf | 4 ++-- modules/leehom/main.nf | 4 ++-- modules/msisensor/msi/main.nf | 4 ++-- modules/nextclade/main.nf | 4 ++-- modules/optitype/main.nf | 6 +++--- modules/plasmidid/main.nf | 4 ++-- modules/prodigal/main.nf | 4 ++-- modules/prokka/main.nf | 4 ++-- modules/qualimap/bamqc/main.nf | 4 ++-- modules/qualimap/rnaseq/main.nf | 4 ++-- modules/quast/main.nf | 4 ++-- modules/rsem/calculateexpression/main.nf | 4 ++-- modules/salmon/quant/main.nf | 4 ++-- modules/samtools/merge/main.nf | 4 ++-- modules/seqkit/split2/main.nf | 1 - modules/seqtk/subseq/main.nf | 4 ++-- modules/tbprofiler/profile/main.nf | 4 ++-- tests/modules/gffread/nextflow.config | 2 +- tests/modules/seqtk/subseq/nextflow.config | 2 +- 31 files changed, 60 insertions(+), 61 deletions(-) diff --git a/modules/artic/minion/main.nf b/modules/artic/minion/main.nf index 86863f95..ce04fcc8 100644 --- a/modules/artic/minion/main.nf +++ b/modules/artic/minion/main.nf @@ -32,8 +32,8 @@ process ARTIC_MINION { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def version = scheme_version.toString().toLowerCase().replaceAll('v','') def fast5 = fast5_dir ? "--fast5-directory $fast5_dir" : "" def summary = sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf index 20127e53..2582dac2 100644 --- a/modules/bakta/main.nf +++ b/modules/bakta/main.nf @@ -26,8 +26,8 @@ process BAKTA { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigal-tf ${prodigal_tf[0]}" : "" """ @@ -47,7 +47,7 @@ process BAKTA { """ stub: - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + prefix = task.ext.prefix ?: "${meta.id}" """ touch ${prefix}.embl touch ${prefix}.faa diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf index dbd9d9dc..cebd2443 100644 --- a/modules/bcftools/concat/main.nf +++ b/modules/bcftools/concat/main.nf @@ -15,8 +15,8 @@ process BCFTOOLS_CONCAT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools concat \\ --output ${prefix}.vcf.gz \\ diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index c4eab09d..08323f28 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -15,8 +15,8 @@ process BCFTOOLS_ISEC { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools isec \\ $args \\ diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index 32ad760c..bfb0f162 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -15,8 +15,8 @@ process BCFTOOLS_MERGE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools merge -Oz \\ --output ${prefix}.vcf.gz \\ diff --git a/modules/bedtools/getfasta/main.nf b/modules/bedtools/getfasta/main.nf index c4dae429..5a283e94 100644 --- a/modules/bedtools/getfasta/main.nf +++ b/modules/bedtools/getfasta/main.nf @@ -16,8 +16,8 @@ process BEDTOOLS_GETFASTA { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${bed.baseName}${task.ext.suffix}" : "${bed.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${bed.baseName}" """ bedtools \\ getfasta \\ diff --git a/modules/checkm/lineagewf/main.nf b/modules/checkm/lineagewf/main.nf index 119ee491..992b165e 100644 --- a/modules/checkm/lineagewf/main.nf +++ b/modules/checkm/lineagewf/main.nf @@ -17,8 +17,8 @@ process CHECKM_LINEAGEWF { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ checkm \\ lineage_wf \\ diff --git a/modules/csvtk/concat/main.nf b/modules/csvtk/concat/main.nf index 745a9ac4..94b1925a 100644 --- a/modules/csvtk/concat/main.nf +++ b/modules/csvtk/concat/main.nf @@ -17,8 +17,8 @@ process CSVTK_CONCAT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def delimiter = in_format == "tsv" ? "\t" : (in_format == "csv" ? "," : in_format) def out_delimiter = out_format == "tsv" ? "\t" : (out_format == "csv" ? "," : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index da37909e..23eb9397 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -18,8 +18,8 @@ process DAMAGEPROFILER { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "-r $fasta" : "" def species_list = specieslist ? "-sf $specieslist" : "" """ diff --git a/modules/dedup/main.nf b/modules/dedup/main.nf index 60fc376e..8b4bdc37 100644 --- a/modules/dedup/main.nf +++ b/modules/dedup/main.nf @@ -18,8 +18,8 @@ process DEDUP { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ dedup \\ diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf index ac3f8338..73bdd411 100644 --- a/modules/fargene/main.nf +++ b/modules/fargene/main.nf @@ -32,8 +32,8 @@ process FARGENE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ fargene \\ $args \\ diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index 110dbf4f..e794aa5a 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -20,8 +20,8 @@ process GATK4_GENOMICSDBIMPORT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" // settings for running default create gendb mode inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index d31f76f8..e7893f8b 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -15,8 +15,8 @@ process GFFREAD { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def prefix = task.ext.suffix ? "${gff.baseName}${task.ext.suffix}" : "${gff.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${gff.baseName}" """ gffread \\ $gff \\ diff --git a/modules/leehom/main.nf b/modules/leehom/main.nf index d997e68b..b5cb2dcb 100644 --- a/modules/leehom/main.nf +++ b/modules/leehom/main.nf @@ -24,8 +24,8 @@ process LEEHOM { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" if (reads.toString().endsWith('.bam')) { """ diff --git a/modules/msisensor/msi/main.nf b/modules/msisensor/msi/main.nf index 1eb510a1..398b34a6 100644 --- a/modules/msisensor/msi/main.nf +++ b/modules/msisensor/msi/main.nf @@ -18,8 +18,8 @@ process MSISENSOR_MSI { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ msisensor \\ msi \\ diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index 317d393d..f60af57b 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -19,8 +19,8 @@ process NEXTCLADE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ nextclade \\ $args \\ diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 24be66a7..d27f7f9f 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -15,9 +15,9 @@ process OPTITYPE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def args2 = task.ext.args2 ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ # Create a config for OptiType on a per sample basis with task.ext.args2 diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 290ae549..7404a678 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -23,8 +23,8 @@ process PLASMIDID { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ plasmidID \\ -d $fasta \\ diff --git a/modules/prodigal/main.nf b/modules/prodigal/main.nf index b09da13c..184b17bb 100644 --- a/modules/prodigal/main.nf +++ b/modules/prodigal/main.nf @@ -19,8 +19,8 @@ process PRODIGAL { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ prodigal -i "${genome}" \\ $args \\ diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index 8fae6367..551a17b9 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -28,8 +28,8 @@ process PROKKA { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigaltf ${prodigal_tf[0]}" : "" """ diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index a47fde7e..973fd6a4 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -17,8 +17,8 @@ process QUALIMAP_BAMQC { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def collect_pairs = meta.single_end ? '' : '--collect-overlap-pairs' def memory = task.memory.toGiga() + "G" diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index 459f3da5..d83fcd99 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -16,8 +16,8 @@ process QUALIMAP_RNASEQ { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' def memory = task.memory.toGiga() + "G" diff --git a/modules/quast/main.nf b/modules/quast/main.nf index 43caca3d..e88051b5 100644 --- a/modules/quast/main.nf +++ b/modules/quast/main.nf @@ -19,8 +19,8 @@ process QUAST { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ?: 'quast' + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: 'quast' def features = use_gff ? "--features $gff" : '' def reference = use_fasta ? "-r $fasta" : '' """ diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index 659082fa..4b2ada47 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -23,8 +23,8 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("${prefix}.transcript.bam"), optional:true, emit: bam_transcript script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 9557fd24..6cae4f72 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -20,8 +20,8 @@ process SALMON_QUANT { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def reference = "--index $index" def input_reads = meta.single_end ? "-r $reads" : "-1 ${reads[0]} -2 ${reads[1]}" diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 8eeb64a2..fcfcf61f 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -17,8 +17,8 @@ process SAMTOOLS_MERGE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def file_type = input_files[0].getExtension() def reference = fasta ? "--reference ${fasta}" : "" """ diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 7e361a06..5bed1dae 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -17,7 +17,6 @@ process SEQKIT_SPLIT2 { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - if(meta.single_end){ """ seqkit \\ diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index 1d93b061..abfe4faa 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -16,8 +16,8 @@ process SEQTK_SUBSEQ { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - def prefix = task.ext.suffix ?: '' + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: '' def ext = "fa" if ("$sequences" ==~ /.+\.fq|.+\.fq.gz|.+\.fastq|.+\.fastq.gz/) { ext = "fq" diff --git a/modules/tbprofiler/profile/main.nf b/modules/tbprofiler/profile/main.nf index 3f6bffc3..87175a39 100644 --- a/modules/tbprofiler/profile/main.nf +++ b/modules/tbprofiler/profile/main.nf @@ -19,8 +19,8 @@ process TBPROFILER_PROFILE { path "versions.yml" , emit: versions script: - def args = task.ext.args ?: '' - prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def input_reads = meta.single_end ? "--read1 $reads" : "--read1 ${reads[0]} --read2 ${reads[1]}" """ tb-profiler \\ diff --git a/tests/modules/gffread/nextflow.config b/tests/modules/gffread/nextflow.config index c020f934..0714a6e8 100644 --- a/tests/modules/gffread/nextflow.config +++ b/tests/modules/gffread/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: GFFREAD { - ext.prefix = { "${meta.id}.out" } + ext.prefix = { "${gff.baseName}.out" } } } diff --git a/tests/modules/seqtk/subseq/nextflow.config b/tests/modules/seqtk/subseq/nextflow.config index 24f16bad..8a8b9b45 100644 --- a/tests/modules/seqtk/subseq/nextflow.config +++ b/tests/modules/seqtk/subseq/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: SEQTK_SUBSEQ { - ext.prefix = { "${meta.id}.filtered" } + ext.prefix = { ".filtered" } } } From 7389963d5cb18f81c10dff128c510e518ee4f0f6 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Tue, 7 Dec 2021 15:22:24 +0100 Subject: [PATCH 088/101] Add memory stuff to all gatk4 modules (#1122) * Add memory stuff to all gatj4 modules * Add removed input line back in * revert script section --- modules/gatk4/applybqsr/main.nf | 3 ++- modules/gatk4/baserecalibrator/main.nf | 4 ++-- modules/gatk4/bedtointervallist/main.nf | 8 +++++++- modules/gatk4/calculatecontamination/main.nf | 8 +++++++- modules/gatk4/createsequencedictionary/main.nf | 2 +- modules/gatk4/createsomaticpanelofnormals/main.nf | 8 +++++++- modules/gatk4/estimatelibrarycomplexity/main.nf | 2 +- modules/gatk4/fastqtosam/main.nf | 8 +++++++- modules/gatk4/filtermutectcalls/main.nf | 8 +++++++- modules/gatk4/genomicsdbimport/main.nf | 8 +++++++- modules/gatk4/genotypegvcfs/main.nf | 8 +++++++- modules/gatk4/getpileupsummaries/main.nf | 8 +++++++- modules/gatk4/indexfeaturefile/main.nf | 8 +++++++- modules/gatk4/intervallisttools/main.nf | 8 +++++++- modules/gatk4/learnreadorientationmodel/main.nf | 8 +++++++- modules/gatk4/markduplicates/main.nf | 6 +++--- modules/gatk4/mergebamalignment/main.nf | 8 +++++++- modules/gatk4/mergevcfs/main.nf | 8 +++++++- modules/gatk4/mutect2/main.nf | 8 +++++++- modules/gatk4/revertsam/main.nf | 8 +++++++- modules/gatk4/samtofastq/main.nf | 8 +++++++- modules/gatk4/splitncigarreads/main.nf | 8 +++++++- modules/gatk4/variantfiltration/main.nf | 2 +- 23 files changed, 129 insertions(+), 26 deletions(-) diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index bd428d6c..3cc69ddf 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -22,13 +22,14 @@ process GATK4_APPLYBQSR { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" + def avail_mem = 3 if (!task.memory) { log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk ApplyBQSR \\ + gatk --java-options "-Xmx${avail_mem}g" ApplyBQSR \\ -R $fasta \\ -I $input \\ --bqsr-recal-file $bqsr_table \\ diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 9b0bf286..17b37943 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -25,14 +25,14 @@ process GATK4_BASERECALIBRATOR { def prefix = task.ext.prefix ?: "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') - + def avail_mem = 3 if (!task.memory) { log.info '[GATK BaseRecalibrator] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk BaseRecalibrator \ + gatk --java-options "-Xmx${avail_mem}g" BaseRecalibrator \ -R $fasta \ -I $input \ $sitesCommand \ diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index c4538034..2f6266b9 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -18,8 +18,14 @@ process GATK4_BEDTOINTERVALLIST { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK BedToIntervalList] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk BedToIntervalList \\ + gatk --java-options "-Xmx${avail_mem}g" BedToIntervalList \\ -I $bed \\ -SD $sequence_dict \\ -O ${prefix}.interval_list \\ diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index 7c112c3c..8840356a 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -21,8 +21,14 @@ process GATK4_CALCULATECONTAMINATION { def prefix = task.ext.prefix ?: "${meta.id}" def matched_command = matched ? " -matched ${matched} " : '' def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK CalculateContamination] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk CalculateContamination \\ + gatk --java-options "-Xmx${avail_mem}g" CalculateContamination \\ -I $pileup \\ $matched_command \\ -O ${prefix}.contamination.table \\ diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 8d001856..e8f32106 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -18,7 +18,7 @@ process GATK4_CREATESEQUENCEDICTIONARY { def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { - log.info '[GATK] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' + log.info '[GATK CreateSequenceDictionary] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 2860e82e..ff345f75 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -21,8 +21,14 @@ process GATK4_CREATESOMATICPANELOFNORMALS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK CreateSomaticPanelOfNormals] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ CreateSomaticPanelOfNormals \\ -R $fasta \\ -V gendb://$genomicsdb \\ diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index f636dc46..c17dba09 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -29,7 +29,7 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { avail_mem = task.memory.giga } """ - gatk EstimateLibraryComplexity \ + gatk --java-options "-Xmx${avail_mem}g" EstimateLibraryComplexity \ ${crams} \ -O ${prefix}.metrics \ --REFERENCE_SEQUENCE ${fasta} \ diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index 915eb996..a55ba709 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -18,8 +18,14 @@ process GATK4_FASTQTOSAM { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK FastqToSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk FastqToSam \\ + gatk --java-options "-Xmx${avail_mem}g" FastqToSam \\ $read_files \\ -O ${prefix}.bam \\ -SM $prefix \\ diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 02fa804f..6a1d9b3a 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -37,8 +37,14 @@ process GATK4_FILTERMUTECTCALLS { if (contaminationfile) { contamination_options = '--contamination-table ' + contaminationfile.join(' --contamination-table ') } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK FilterMutectCalls] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk FilterMutectCalls \\ + gatk --java-options "-Xmx${avail_mem}g" FilterMutectCalls \\ -R $fasta \\ -V $vcf \\ $orientationbias_options \\ diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index e794aa5a..2751173b 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -42,8 +42,14 @@ process GATK4_GENOMICSDBIMPORT { updated_db = wspace.toString() } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GenomicsDBImport] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk GenomicsDBImport \\ + gatk --java-options "-Xmx${avail_mem}g" GenomicsDBImport \\ $inputs_command \\ $dir_command \\ $intervals_command \\ diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index f0b35447..1a772860 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -26,8 +26,14 @@ process GATK4_GENOTYPEGVCFS { def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GenotypeGVCFs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ GenotypeGVCFs \\ $args \\ $interval_options \\ diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 99be601f..361974e8 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -24,8 +24,14 @@ process GATK4_GETPILEUPSUMMARIES { sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GetPileupSummaries] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk GetPileupSummaries \\ + gatk --java-options "-Xmx${avail_mem}g" GetPileupSummaries \\ -I $bam \\ -V $variants \\ $sitesCommand \\ diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf index d33e030c..cc6c663e 100644 --- a/modules/gatk4/indexfeaturefile/main.nf +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -16,8 +16,14 @@ process GATK4_INDEXFEATUREFILE { script: def args = task.ext.args ?: '' + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK IndexFeatureFile] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ IndexFeatureFile \\ $args \\ -I $feature_file diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 7e1a47f7..b813d844 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -17,11 +17,17 @@ process GATK4_INTERVALLISTTOOLS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK IntervalListTools] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ mkdir ${prefix}_split - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ IntervalListTools \\ -I ${interval_list} \\ -O ${prefix}_split \\ diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index ac021afa..0c2f09d2 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -19,8 +19,14 @@ process GATK4_LEARNREADORIENTATIONMODEL { def prefix = task.ext.prefix ?: "${meta.id}" def inputs_list = [] f1r2.each() { a -> inputs_list.add(" -I " + a) } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK LearnReadOrientationModel] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ LearnReadOrientationModel \\ ${inputs_list.join(' ')} \\ -O ${prefix}.tar.gz \\ diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index a109facc..8bdb2c0a 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -20,14 +20,14 @@ process GATK4_MARKDUPLICATES { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") - def avail_mem = 3 + def avail_mem = 3 if (!task.memory) { - log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + log.info '[GATK MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk MarkDuplicates \\ + gatk --java-options "-Xmx${avail_mem}g" MarkDuplicates \\ $bam_list \\ --METRICS_FILE ${prefix}.metrics \\ --TMP_DIR . \\ diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 5e552cb2..a0f54976 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -20,8 +20,14 @@ process GATK4_MERGEBAMALIGNMENT { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK MergeBamAlignment] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk MergeBamAlignment \\ + gatk --java-options "-Xmx${avail_mem}g" MergeBamAlignment \\ ALIGNED=$aligned \\ UNMAPPED=$unmapped \\ R=$fasta \\ diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index cd1840c3..1fcce485 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -26,8 +26,14 @@ process GATK4_MERGEVCFS { input += " I=${vcf}" } def ref = use_ref_dict ? "D=${ref_dict}" : "" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK MergeVcfs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk MergeVcfs \\ + gatk --java-options "-Xmx${avail_mem}g" MergeVcfs \\ $input \\ O=${prefix}.vcf.gz \\ $ref \\ diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 2cf940de..414c7705 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -53,8 +53,14 @@ process GATK4_MUTECT2 { normals_command = '-normal ' + which_norm.join( ' -normal ') } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK Mutect2] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk Mutect2 \\ + gatk --java-options "-Xmx${avail_mem}g" Mutect2 \\ -R ${fasta} \\ ${inputs_command} \\ ${normals_command} \\ diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 638b7705..0713d7ca 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -17,8 +17,14 @@ process GATK4_REVERTSAM { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK RevertSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk RevertSam \\ + gatk --java-options "-Xmx${avail_mem}g" RevertSam \\ I=$bam \\ O=${prefix}.reverted.bam \\ $args diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index a909f540..0afb7ef3 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -18,8 +18,14 @@ process GATK4_SAMTOFASTQ { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK SamToFastq] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk SamToFastq \\ + gatk --java-options "-Xmx${avail_mem}g" SamToFastq \\ I=$bam \\ $output \\ $args diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 65b82a35..6daed954 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -20,8 +20,14 @@ process GATK4_SPLITNCIGARREADS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK SplitNCigarReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk SplitNCigarReads \\ + gatk --java-options "-Xmx${avail_mem}g" SplitNCigarReads \\ -R $fasta \\ -I $bam \\ -O ${prefix}.bam \\ diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 00dc2588..efe245cc 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -23,7 +23,7 @@ process GATK4_VARIANTFILTRATION { def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { - log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + log.info '[GATK VariantFiltration] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.toGiga() } From 31d4099f388eb1057d8befc5944305d0de3df951 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 7 Dec 2021 17:12:35 +0100 Subject: [PATCH 089/101] Add subworkflows for ensemblvep and snpeff (#1124) * greatly simplify syntax * feat: add subworkflows to annotate (+ bgzip/tabix index) with ensemblvep and snpeff * feat: get versions from all tools * add commented infor for new annotation modules --- .../nf-core/annotation_ensemblvep/main.nf | 26 ++++++++++++++++ .../nf-core/annotation_ensemblvep/meta.yml | 29 ++++++++++++++++++ .../nf-core/annotation_snpeff/main.nf | 23 ++++++++++++++ .../nf-core/annotation_snpeff/meta.yml | 29 ++++++++++++++++++ tests/config/pytest_modules.yml | 30 ++++++++++++------- .../nf-core/annotation_ensemblvep/main.nf | 14 +++++++++ .../annotation_ensemblvep/nextflow.config | 14 +++++++++ .../nf-core/annotation_ensemblvep/test.yml | 7 +++++ .../nf-core/annotation_snpeff/main.nf | 14 +++++++++ .../nf-core/annotation_snpeff/nextflow.config | 14 +++++++++ .../nf-core/annotation_snpeff/test.yml | 7 +++++ 11 files changed, 196 insertions(+), 11 deletions(-) create mode 100644 subworkflows/nf-core/annotation_ensemblvep/main.nf create mode 100644 subworkflows/nf-core/annotation_ensemblvep/meta.yml create mode 100644 subworkflows/nf-core/annotation_snpeff/main.nf create mode 100644 subworkflows/nf-core/annotation_snpeff/meta.yml create mode 100644 tests/subworkflows/nf-core/annotation_ensemblvep/main.nf create mode 100644 tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config create mode 100644 tests/subworkflows/nf-core/annotation_ensemblvep/test.yml create mode 100644 tests/subworkflows/nf-core/annotation_snpeff/main.nf create mode 100644 tests/subworkflows/nf-core/annotation_snpeff/nextflow.config create mode 100644 tests/subworkflows/nf-core/annotation_snpeff/test.yml diff --git a/subworkflows/nf-core/annotation_ensemblvep/main.nf b/subworkflows/nf-core/annotation_ensemblvep/main.nf new file mode 100644 index 00000000..3f3ecc6e --- /dev/null +++ b/subworkflows/nf-core/annotation_ensemblvep/main.nf @@ -0,0 +1,26 @@ +// +// Run VEP to annotate VCF files +// + +include { ENSEMBLVEP } from '../../../modules/ensemblvep/main' +include { TABIX_BGZIPTABIX as ANNOTATION_BGZIPTABIX } from '../../../modules/tabix/bgziptabix/main' + +workflow ANNOTATION_ENSEMBLVEP { + take: + vcf // channel: [ val(meta), vcf ] + vep_genome // value: which genome + vep_species // value: which species + vep_cache_version // value: which cache version + vep_cache // path: path_to_vep_cache (optionnal) + + main: + ENSEMBLVEP(vcf, vep_genome, vep_species, vep_cache_version, vep_cache) + ANNOTATION_BGZIPTABIX(ENSEMBLVEP.out.vcf) + + ch_versions = ENSEMBLVEP.out.versions.first().mix(ANNOTATION_BGZIPTABIX.out.versions.first()) + + emit: + vcf_tbi = ANNOTATION_BGZIPTABIX.out.gz_tbi // channel: [ val(meta), vcf.gz, vcf.gz.tbi ] + reports = ENSEMBLVEP.out.report // path: *.html + versions = ch_versions // path: versions.yml +} diff --git a/subworkflows/nf-core/annotation_ensemblvep/meta.yml b/subworkflows/nf-core/annotation_ensemblvep/meta.yml new file mode 100644 index 00000000..e7d92ce9 --- /dev/null +++ b/subworkflows/nf-core/annotation_ensemblvep/meta.yml @@ -0,0 +1,29 @@ +name: annotation_ensemblvep +description: | + Perform annotation with ensemblvep and bgzip + tabix index the resulting VCF file +keywords: + - ensemblvep +modules: + - ensemblvep + - tabix/bgziptabix +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: vcf + description: list containing one vcf file + pattern: "[ *.{vcf,vcf.gz} ]" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - vcf_tbi: + type: file + description: Compressed vcf file + tabix index + pattern: "[ *{.vcf.gz,vcf.gz.tbi} ]" +authors: + - '@maxulysse' diff --git a/subworkflows/nf-core/annotation_snpeff/main.nf b/subworkflows/nf-core/annotation_snpeff/main.nf new file mode 100644 index 00000000..add5f9c8 --- /dev/null +++ b/subworkflows/nf-core/annotation_snpeff/main.nf @@ -0,0 +1,23 @@ +// +// Run SNPEFF to annotate VCF files +// + +include { SNPEFF } from '../../../modules/snpeff/main' +include { TABIX_BGZIPTABIX as ANNOTATION_BGZIPTABIX } from '../../../modules/tabix/bgziptabix/main' + +workflow ANNOTATION_SNPEFF { + take: + vcf // channel: [ val(meta), vcf ] + snpeff_db // value: version of db to use + snpeff_cache // path: path_to_snpeff_cache (optionnal) + + main: + SNPEFF(vcf, snpeff_db, snpeff_cache) + ANNOTATION_BGZIPTABIX(SNPEFF.out.vcf) + ch_versions = SNPEFF.out.versions.first().mix(ANNOTATION_BGZIPTABIX.out.versions.first()) + + emit: + vcf_tbi = ANNOTATION_BGZIPTABIX.out.gz_tbi // channel: [ val(meta), vcf.gz, vcf.gz.tbi ] + reports = SNPEFF.out.report // path: *.html + versions = ch_versions // path: versions.yml +} diff --git a/subworkflows/nf-core/annotation_snpeff/meta.yml b/subworkflows/nf-core/annotation_snpeff/meta.yml new file mode 100644 index 00000000..164a0ee2 --- /dev/null +++ b/subworkflows/nf-core/annotation_snpeff/meta.yml @@ -0,0 +1,29 @@ +name: annotation_snpeff +description: | + Perform annotation with snpeff and bgzip + tabix index the resulting VCF file +keywords: + - snpeff +modules: + - snpeff + - tabix/bgziptabix +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: vcf + description: list containing one vcf file + pattern: "[ *.{vcf,vcf.gz} ]" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - vcf_tbi: + type: file + description: Compressed vcf file + tabix index + pattern: "[ *{.vcf.gz,vcf.gz.tbi} ]" +authors: + - '@maxulysse' diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index bbe89840..e35f8908 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1451,6 +1451,19 @@ yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** +# subworkflows/align_bowtie2: +# - subworkflows/nf-core/align_bowtie2/** +# - tests/subworkflows/nf-core/align_bowtie2/** +# - *subworkflows_bam_sort_samtools + +# subworkflows/annotation_ensemblvep: &subworkflows_annotation_ensemblvep +# - subworkflows/nf-core/annotation_ensemblvep/** +# - tests/subworkflows/nf-core/annotation_ensemblvep/** + +# subworkflows/annotation_snpeff: &subworkflows_annotation_snpeff +# - subworkflows/nf-core/annotation_snpeff/** +# - tests/subworkflows/nf-core/annotation_snpeff/** + # subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools # - subworkflows/nf-core/bam_stats_samtools/** # - tests/subworkflows/nf-core/bam_stats_samtools/** @@ -1462,17 +1475,6 @@ yara/mapper: # - *samtools_index # - *subworkflows_bam_stats_samtools -# subworkflows/align_bowtie2: -# - subworkflows/nf-core/align_bowtie2/** -# - tests/subworkflows/nf-core/align_bowtie2/** -# - *subworkflows_bam_sort_samtools - -# subworkflows/sra_fastq: -# - subworkflows/nf-core/sra_fastq/** -# - tests/subworkflows/nf-core/sra_fastq/** -# - *sratools_fasterqdump -# - *sratools_prefetch - # subworkflows/gatk_create_som_pon: # - subworkflows/nf-core/gatk_create_som_pon/** # - tests/subworkflows/nf-core/gatk_create_som_pon/** @@ -1495,3 +1497,9 @@ yara/mapper: # - *gatk4_getpileupsummaries # - *gatk4_calculatecontamination # - *gatk4_filtermutectcalls + +# subworkflows/sra_fastq: +# - subworkflows/nf-core/sra_fastq/** +# - tests/subworkflows/nf-core/sra_fastq/** +# - *sratools_fasterqdump +# - *sratools_prefetch diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf b/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf new file mode 100644 index 00000000..0f00c62e --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ANNOTATION_ENSEMBLVEP } from '../../../../subworkflows/nf-core/annotation_ensemblvep/main' + +workflow annotation_ensemblvep { + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + + ANNOTATION_ENSEMBLVEP ( input, "WBcel235", "caenorhabditis_elegans", "104", [] ) +} diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config b/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config new file mode 100644 index 00000000..4e8d2990 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config @@ -0,0 +1,14 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ENSEMBLVEP { + container = 'nfcore/vep:104.3.WBcel235' + publishDir = [ enabled: false ] + } + + withName: ANNOTATION_BGZIPTABIX { + ext.prefix = { "${meta.id}_VEP.ann.vcf" } + } + +} diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml b/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml new file mode 100644 index 00000000..706d9d05 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml @@ -0,0 +1,7 @@ +- name: ensemblvep annotation_ensemblvep + command: nextflow run ./tests/subworkflows/nf-core/annotation_ensemblvep -entry annotation_ensemblvep -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config + tags: + - annotation_ensemblvep + files: + - path: output/annotation/test_VEP.ann.vcf.gz + - path: output/annotation/test_VEP.ann.vcf.gz.tbi diff --git a/tests/subworkflows/nf-core/annotation_snpeff/main.nf b/tests/subworkflows/nf-core/annotation_snpeff/main.nf new file mode 100644 index 00000000..c80197ee --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ANNOTATION_SNPEFF } from '../../../../subworkflows/nf-core/annotation_snpeff/main' + +workflow annotation_snpeff { + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + + ANNOTATION_SNPEFF ( input, "WBcel235.99", [] ) +} diff --git a/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config b/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config new file mode 100644 index 00000000..be76cb4a --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config @@ -0,0 +1,14 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SNPEFF { + container = 'nfcore/snpeff:5.0.WBcel235' + publishDir = [ enabled: false ] + } + + withName: ANNOTATION_BGZIPTABIX { + ext.prefix = { "${meta.id}_snpEff.ann.vcf" } + } + +} diff --git a/tests/subworkflows/nf-core/annotation_snpeff/test.yml b/tests/subworkflows/nf-core/annotation_snpeff/test.yml new file mode 100644 index 00000000..943b24e9 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/test.yml @@ -0,0 +1,7 @@ +- name: snpeff annotation_snpeff + command: nextflow run ./tests/subworkflows/nf-core/annotation_snpeff -entry annotation_snpeff -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/annotation_snpeff/nextflow.config + tags: + - annotation_snpeff + files: + - path: output/annotation/test_snpEff.ann.vcf.gz + - path: output/annotation/test_snpEff.ann.vcf.gz.tbi From 1765225042d40cf01eb9469ea95bf6d0f6810937 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 8 Dec 2021 15:31:27 +0100 Subject: [PATCH 090/101] Add new boolean sort_bam as input to be able to chose between sort and view in bwamem and bwamem2mem (#1125) * feat: view is now in args2 so we can use sort * forgot one split_cpus * feat: update with new logic * fix: add more info * fix: remove split_cpus logic --- modules/bwa/mem/main.nf | 4 ++- modules/bwa/mem/meta.yml | 4 +++ modules/bwamem2/mem/main.nf | 4 ++- modules/bwamem2/mem/meta.yml | 10 ++++-- tests/modules/bwa/mem/main.nf | 37 ++++++++++++++++++++-- tests/modules/bwa/mem/nextflow.config | 4 +++ tests/modules/bwa/mem/test.yml | 36 +++++++++++++++++++++ tests/modules/bwamem2/mem/main.nf | 38 +++++++++++++++++++++-- tests/modules/bwamem2/mem/nextflow.config | 4 +++ tests/modules/bwamem2/mem/test.yml | 36 +++++++++++++++++++++ 10 files changed, 168 insertions(+), 9 deletions(-) diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 801293a8..9695bd2d 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -10,6 +10,7 @@ process BWA_MEM { input: tuple val(meta), path(reads) path index + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam @@ -20,6 +21,7 @@ process BWA_MEM { def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" + def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` @@ -29,7 +31,7 @@ process BWA_MEM { -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/bwa/mem/meta.yml b/modules/bwa/mem/meta.yml index 61eaddef..c7c28f19 100644 --- a/modules/bwa/mem/meta.yml +++ b/modules/bwa/mem/meta.yml @@ -32,6 +32,10 @@ input: type: file description: BWA genome index files pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}" + - sort_bam: + type: boolean + description: use samtools sort (true) or samtools view (false) + pattern: "true or false" output: - bam: type: file diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 81b4b8ab..6d4d8028 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -10,6 +10,7 @@ process BWAMEM2_MEM { input: tuple val(meta), path(reads) path index + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam @@ -20,6 +21,7 @@ process BWAMEM2_MEM { def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" + def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` @@ -30,7 +32,7 @@ process BWAMEM2_MEM { -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools $samtools_command $args2 -@ $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/bwamem2/mem/meta.yml b/modules/bwamem2/mem/meta.yml index 58a35e08..71e83759 100644 --- a/modules/bwamem2/mem/meta.yml +++ b/modules/bwamem2/mem/meta.yml @@ -11,9 +11,9 @@ keywords: tools: - bwa: description: | - BWA is a software package for mapping DNA sequences against + BWA-mem2 is a software package for mapping DNA sequences against a large reference genome, such as the human genome. - homepage: http://bio-bwa.sourceforge.net/ + homepage: https://github.com/bwa-mem2/bwa-mem2 documentation: http://www.htslib.org/doc/samtools.html arxiv: arXiv:1303.3997 licence: ['MIT'] @@ -31,7 +31,11 @@ input: - index: type: file description: BWA genome index files - pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}" + pattern: "Directory containing BWA index *.{0132,amb,ann,bwt.2bit.64,pac}" + - sort_bam: + type: boolean + description: use samtools sort (true) or samtools view (false) + pattern: "true or false" output: - bam: type: file diff --git a/tests/modules/bwa/mem/main.nf b/tests/modules/bwa/mem/main.nf index 117cbb4d..c9c57197 100644 --- a/tests/modules/bwa/mem/main.nf +++ b/tests/modules/bwa/mem/main.nf @@ -18,7 +18,23 @@ workflow test_bwa_mem_single_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) - BWA_MEM ( input, BWA_INDEX.out.index ) + BWA_MEM ( input, BWA_INDEX.out.index, false ) +} + +// +// Test with single-end data and sort +// +workflow test_bwa_mem_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_MEM ( input, BWA_INDEX.out.index, true ) } // @@ -35,5 +51,22 @@ workflow test_bwa_mem_paired_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) - BWA_MEM ( input, BWA_INDEX.out.index ) + BWA_MEM ( input, BWA_INDEX.out.index, false ) +} + +// +// Test with paired-end data and sort +// +workflow test_bwa_mem_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_MEM ( input, BWA_INDEX.out.index, true ) } diff --git a/tests/modules/bwa/mem/nextflow.config b/tests/modules/bwa/mem/nextflow.config index 8730f1c4..d15f6939 100644 --- a/tests/modules/bwa/mem/nextflow.config +++ b/tests/modules/bwa/mem/nextflow.config @@ -2,4 +2,8 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: BWA_MEM { + ext.args2 = { sort_bam ? "" : "-bh" } + } + } diff --git a/tests/modules/bwa/mem/test.yml b/tests/modules/bwa/mem/test.yml index 93535043..8fe2ee6b 100644 --- a/tests/modules/bwa/mem/test.yml +++ b/tests/modules/bwa/mem/test.yml @@ -16,6 +16,24 @@ - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 +- name: bwa mem single-end sort + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config + tags: + - bwa + - bwa/mem + files: + - path: ./output/bwa/test.bam + - path: ./output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/bwa/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwa/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 + - name: bwa mem paired-end command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: @@ -33,3 +51,21 @@ md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 + +- name: bwa mem paired-end sort + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config + tags: + - bwa + - bwa/mem + files: + - path: ./output/bwa/test.bam + - path: ./output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/bwa/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwa/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwamem2/mem/main.nf b/tests/modules/bwamem2/mem/main.nf index 2ab557e6..b4293dbe 100644 --- a/tests/modules/bwamem2/mem/main.nf +++ b/tests/modules/bwamem2/mem/main.nf @@ -18,9 +18,26 @@ workflow test_bwamem2_mem_single_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) - BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, false ) } +// +// Test with single-end data and sort +// +workflow test_bwamem2_mem_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWAMEM2_INDEX ( fasta ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, true ) +} + + // // Test with paired-end data // @@ -35,5 +52,22 @@ workflow test_bwamem2_mem_paired_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) - BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, false ) +} + +// +// Test with paired-end data and sort +// +workflow test_bwamem2_mem_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWAMEM2_INDEX ( fasta ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, true ) } diff --git a/tests/modules/bwamem2/mem/nextflow.config b/tests/modules/bwamem2/mem/nextflow.config index 8730f1c4..b5181865 100644 --- a/tests/modules/bwamem2/mem/nextflow.config +++ b/tests/modules/bwamem2/mem/nextflow.config @@ -2,4 +2,8 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: BWAMEM2_MEM { + ext.args2 = { sort_bam ? "" : "-bh" } + } + } diff --git a/tests/modules/bwamem2/mem/test.yml b/tests/modules/bwamem2/mem/test.yml index c1724bc0..bf445ebe 100644 --- a/tests/modules/bwamem2/mem/test.yml +++ b/tests/modules/bwamem2/mem/test.yml @@ -16,6 +16,24 @@ - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 +- name: bwamem2 mem single-end sort + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config + tags: + - bwamem2 + - bwamem2/mem + files: + - path: ./output/bwamem2/test.bam + - path: ./output/bwamem2/bwamem2/genome.fasta.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwamem2/bwamem2/genome.fasta.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 + md5sum: b02870de80106104abcb03cd9463e7d8 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 + md5sum: d097a1b82dee375d41a1ea69895a9216 + - path: ./output/bwamem2/bwamem2/genome.fasta.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - name: bwamem2 mem paired-end command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: @@ -33,3 +51,21 @@ md5sum: d097a1b82dee375d41a1ea69895a9216 - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 + +- name: bwamem2 mem paired-end sort + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config + tags: + - bwamem2 + - bwamem2/mem + files: + - path: ./output/bwamem2/test.bam + - path: ./output/bwamem2/bwamem2/genome.fasta.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwamem2/bwamem2/genome.fasta.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 + md5sum: b02870de80106104abcb03cd9463e7d8 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 + md5sum: d097a1b82dee375d41a1ea69895a9216 + - path: ./output/bwamem2/bwamem2/genome.fasta.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 From ca3ae9ff4f39eb3553ad6b3d2e1fa511131685de Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 8 Dec 2021 23:19:37 +0100 Subject: [PATCH 091/101] feat: update dragmap to follow new bwa/mem + bwamem2/mem logic (#1146) --- modules/dragmap/align/main.nf | 6 ++-- tests/modules/dragmap/align/main.nf | 33 +++++++++++++++++++-- tests/modules/dragmap/align/nextflow.config | 8 +++-- tests/modules/dragmap/align/test.yml | 18 +++++++++++ 4 files changed, 58 insertions(+), 7 deletions(-) diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf index f6d6877e..8a6f082a 100644 --- a/modules/dragmap/align/main.nf +++ b/modules/dragmap/align/main.nf @@ -10,6 +10,7 @@ process DRAGMAP_ALIGN { input: tuple val(meta), path(reads) path hashmap + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam @@ -20,6 +21,7 @@ process DRAGMAP_ALIGN { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def samtools_command = sort_bam ? 'sort' : 'view' if (meta.single_end) { """ dragen-os \\ @@ -28,7 +30,7 @@ process DRAGMAP_ALIGN { --num-threads $task.cpus \\ $args \\ 2> ${prefix}.dragmap.log \\ - | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -46,7 +48,7 @@ process DRAGMAP_ALIGN { --num-threads $task.cpus \\ $args \\ 2> ${prefix}.dragmap.log \\ - | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - + | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/dragmap/align/main.nf b/tests/modules/dragmap/align/main.nf index 92e8c265..4376602c 100644 --- a/tests/modules/dragmap/align/main.nf +++ b/tests/modules/dragmap/align/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' -include { DRAGMAP_ALIGN } from '../../../../modules/dragmap/align/main.nf' +include { DRAGMAP_ALIGN } from '../../../../modules/dragmap/align/main.nf' workflow test_dragmap_align_single_end { input = [ @@ -15,7 +15,20 @@ workflow test_dragmap_align_single_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) DRAGMAP_HASHTABLE ( fasta ) - DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, false ) +} + +workflow test_dragmap_align_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, true ) } workflow test_dragmap_align_paired_end { @@ -29,5 +42,19 @@ workflow test_dragmap_align_paired_end { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) DRAGMAP_HASHTABLE ( fasta ) - DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, false ) +} + +workflow test_dragmap_align_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, true ) } diff --git a/tests/modules/dragmap/align/nextflow.config b/tests/modules/dragmap/align/nextflow.config index 50f50a7a..b968c357 100644 --- a/tests/modules/dragmap/align/nextflow.config +++ b/tests/modules/dragmap/align/nextflow.config @@ -1,5 +1,9 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - -} \ No newline at end of file + + withName: DRAGMAP_ALIGN { + ext.args2 = { sort_bam ? "" : "-bh" } + } + +} diff --git a/tests/modules/dragmap/align/test.yml b/tests/modules/dragmap/align/test.yml index 75c5ea96..b0196e55 100644 --- a/tests/modules/dragmap/align/test.yml +++ b/tests/modules/dragmap/align/test.yml @@ -7,6 +7,15 @@ - path: output/dragmap/test.bam - path: output/dragmap/test.dragmap.log +- name: dragmap align single-end_sort + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log + - name: dragmap align paired-end command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config tags: @@ -15,3 +24,12 @@ files: - path: output/dragmap/test.bam - path: output/dragmap/test.dragmap.log + +- name: dragmap align paired-end_sort + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log From a9dd46f010e3974f00616606e209db8d81587c64 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Wed, 8 Dec 2021 23:43:36 +0100 Subject: [PATCH 092/101] add gatk4/gatherbqsr (#1130) * nf-core modules create * add module files * indent * remove templte code * manually revert pytest changes from tools * manually revert pytest changes from tools * add include statement back in Co-authored-by: Maxime U. Garcia --- modules/gatk4/gatherbqsrreports/main.nf | 41 ++++++++++++++++++ modules/gatk4/gatherbqsrreports/meta.yml | 43 +++++++++++++++++++ tests/config/pytest_modules.yml | 28 ++++++------ tests/modules/gatk4/gatherbqsrreports/main.nf | 27 ++++++++++++ .../gatk4/gatherbqsrreports/nextflow.config | 5 +++ .../modules/gatk4/gatherbqsrreports/test.yml | 21 +++++++++ 6 files changed, 153 insertions(+), 12 deletions(-) create mode 100644 modules/gatk4/gatherbqsrreports/main.nf create mode 100644 modules/gatk4/gatherbqsrreports/meta.yml create mode 100644 tests/modules/gatk4/gatherbqsrreports/main.nf create mode 100644 tests/modules/gatk4/gatherbqsrreports/nextflow.config create mode 100644 tests/modules/gatk4/gatherbqsrreports/test.yml diff --git a/modules/gatk4/gatherbqsrreports/main.nf b/modules/gatk4/gatherbqsrreports/main.nf new file mode 100644 index 00000000..1567f9aa --- /dev/null +++ b/modules/gatk4/gatherbqsrreports/main.nf @@ -0,0 +1,41 @@ +process GATK4_GATHERBQSRREPORTS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_1': + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_1' }" + + input: + tuple val(meta), path(recal_table) + + output: + tuple val(meta), path("*.table"), emit: table + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def input = recal_table.collect{"-I ${it}"}.join(' ') + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GatherBQSRReports] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" \\ + GatherBQSRReports \ + ${input} \ + --tmp-dir . \ + $args \ + --output ${prefix}.table + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/gatherbqsrreports/meta.yml b/modules/gatk4/gatherbqsrreports/meta.yml new file mode 100644 index 00000000..f71afd69 --- /dev/null +++ b/modules/gatk4/gatherbqsrreports/meta.yml @@ -0,0 +1,43 @@ +name: gatk4_gatherbqsrreports +description: write your description here +keywords: + - gatk4 + - gatk4_gatherbqsrreports + - base_recalibration +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - recal_table: + type: file + description: File(s) containing BQSR table(s) + pattern: "*.table" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - recal_table: + type: file + description: File containing joined BQSR table + pattern: "*.table" + +authors: + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index e35f8908..6dbfc1fd 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -314,26 +314,26 @@ cnvkit/batch: - modules/cnvkit/batch/** - tests/modules/cnvkit/batch/** -cooler/digest: - - modules/cooler/digest/** - - tests/modules/cooler/digest/** - cooler/cload: - modules/cooler/cload/** - tests/modules/cooler/cload/** +cooler/digest: + - modules/cooler/digest/** + - tests/modules/cooler/digest/** + cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** -cooler/zoomify: - - modules/cooler/zoomify/** - - tests/software/cooler/zoomify/** - cooler/merge: - modules/cooler/merge/** - tests/modules/cooler/merge/** +cooler/zoomify: + - modules/cooler/zoomify/** + - tests/software/cooler/zoomify/** + csvtk/concat: - modules/csvtk/concat/** - tests/modules/csvtk/concat/** @@ -538,6 +538,10 @@ gatk4/filtermutectcalls: #&gatk4_filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** +gatk4/gatherbqsrreports: + - modules/gatk4/gatherbqsrreports/** + - tests/modules/gatk4/gatherbqsrreports/** + gatk4/genomicsdbimport: #&gatk4_genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** @@ -1035,6 +1039,10 @@ pbccs: - modules/pbccs/** - tests/modules/pbccs/** +peddy: + - modules/peddy/** + - tests/modules/peddy/** + phyloflash: - modules/phyloflash/** - tests/modules/phyloflash/** @@ -1043,10 +1051,6 @@ picard/collecthsmetrics: - modules/picard/collecthsmetrics/** - tests/modules/picard/collecthsmetrics/** -peddy: - - modules/peddy/** - - tests/modules/peddy/** - picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** diff --git a/tests/modules/gatk4/gatherbqsrreports/main.nf b/tests/modules/gatk4/gatherbqsrreports/main.nf new file mode 100644 index 00000000..2693a06a --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_GATHERBQSRREPORTS } from '../../../../modules/gatk4/gatherbqsrreports/main.nf' + +workflow test_gatk4_gatherbqsrreports { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true) + ] + + GATK4_GATHERBQSRREPORTS ( input ) +} + +workflow test_gatk4_gatherbqsrreports_multiple { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_baserecalibrator_table'], checkIfExists: true) + ] + ] + + GATK4_GATHERBQSRREPORTS ( input ) +} diff --git a/tests/modules/gatk4/gatherbqsrreports/nextflow.config b/tests/modules/gatk4/gatherbqsrreports/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/gatk4/gatherbqsrreports/test.yml b/tests/modules/gatk4/gatherbqsrreports/test.yml new file mode 100644 index 00000000..76c90120 --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/test.yml @@ -0,0 +1,21 @@ +- name: gatk4 gatherbqsrreports test_gatk4_gatherbqsrreports + command: nextflow run tests/modules/gatk4/gatherbqsrreports -entry test_gatk4_gatherbqsrreports -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/gatherbqsrreports + files: + - path: output/gatk4/test.table + md5sum: 9603b69fdc3b5090de2e0dd78bfcc4bf + - path: output/gatk4/versions.yml + md5sum: 50238fd0f3b6f4efb2b5335b6324f905 + +- name: gatk4 gatherbqsrreports test_gatk4_gatherbqsrreports_multiple + command: nextflow run tests/modules/gatk4/gatherbqsrreports -entry test_gatk4_gatherbqsrreports_multiple -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/gatherbqsrreports + files: + - path: output/gatk4/test.table + md5sum: 0c1257eececf95db8ca378272d0f21f9 + - path: output/gatk4/versions.yml + md5sum: c6ce163062dd3609848fc5bc10660427 From 37c5cb495d40118b13a0ecda648da9512ee5a9fc Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Thu, 9 Dec 2021 11:04:53 +0100 Subject: [PATCH 093/101] feat: add original input as optional output channel (#1147) --- modules/samtools/index/main.nf | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index b033e225..74ae15df 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -11,10 +11,13 @@ process SAMTOOLS_INDEX { tuple val(meta), path(input) output: - tuple val(meta), path("*.bai") , optional:true, emit: bai - tuple val(meta), path("*.crai"), optional:true, emit: crai - tuple val(meta), path("*.csi") , optional:true, emit: csi - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam" , includeInputs:true), path("*.bai") , optional:true, emit: bam_bai + tuple val(meta), path("*.bai") , optional:true, emit: bai + tuple val(meta), path("*.bam" , includeInputs:true), path("*.csi") , optional:true, emit: bam_csi + tuple val(meta), path("*.csi") , optional:true, emit: csi + tuple val(meta), path("*.cram", includeInputs:true), path("*.crai"), optional:true, emit: cram_crai + tuple val(meta), path("*.crai") , optional:true, emit: crai + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' From a68c563e54d5f3720fc57ef6e34ff08c4b3ec398 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Thu, 9 Dec 2021 11:16:40 +0100 Subject: [PATCH 094/101] Added UMI sub-workflow (#1098) * added code for subworkflow fgbio call umi consensus * ironing out a few typos etc * fixing last things * fixed md5sum - lets see if it changes * removing file accidentally deleted * tidy indents * added bwamem2 alternative * fixed entry for both tests * changed name second test workflow entry * fixed workflow entry names * fixed md5sum for file generated with bwamem2 * added syntax new DSL2 * added new config location in test command line * added new config location in test command line * use of prefix instead of suffix because modules have been changed in this way * explicit alias to bwa mem1 to avoid confusion * removed param that should be an ext optional argument in fgbio groupreadsbyumi * missing colon in config * missing colon in module config too * order list alphabetically Co-authored-by: Maxime U. Garcia * remove params from body Co-authored-by: Maxime U. Garcia * improving readability of input structure Co-authored-by: Mahesh Binzer-Panchal * reverting to mandatory input * fixed tests and workflow take values * remove param Co-authored-by: Maxime U. Garcia * simplify tests params Co-authored-by: Maxime U. Garcia * formatting inputs for readability * factoring in changes to bwamem2_mem and bwa_mem sort/view inputs * updating test md5sum for grouped file following code update in bwamem Co-authored-by: Maxime U. Garcia Co-authored-by: Maxime U. Garcia Co-authored-by: Mahesh Binzer-Panchal --- .../fgbio_create_umi_consensus/main.nf | 86 +++++++++++++++++++ .../fgbio_create_umi_consensus/meta.yml | 67 +++++++++++++++ tests/modules/fgbio/groupreadsbyumi/main.nf | 3 +- .../fgbio_create_umi_consensus/main.nf | 33 +++++++ .../nextflow.config | 31 +++++++ .../fgbio_create_umi_consensus/test.yml | 22 +++++ 6 files changed, 240 insertions(+), 2 deletions(-) create mode 100644 subworkflows/nf-core/fgbio_create_umi_consensus/main.nf create mode 100644 subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml create mode 100644 tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf create mode 100644 tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config create mode 100644 tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml diff --git a/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf b/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf new file mode 100644 index 00000000..042d0bbd --- /dev/null +++ b/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf @@ -0,0 +1,86 @@ +// +// Runs FGBIO tools to remove UMI tags from FASTQ reads +// Convert them to unmapped BAM file, map them to the reference genome, +// use the mapped information to group UMIs and generate consensus reads +// + + +include { BWAMEM2_INDEX } from '../../../modules/bwamem2/index/main.nf' +include { BWAMEM2_MEM } from '../../../modules/bwamem2/mem/main' +include { BWA_INDEX as BWAMEM1_INDEX } from '../../../modules/bwa/index/main.nf' +include { BWA_MEM as BWAMEM1_MEM } from '../../../modules/bwa/mem/main' +include { FGBIO_CALLMOLECULARCONSENSUSREADS as CALLUMICONSENSUS } from '../../../modules/fgbio/callmolecularconsensusreads/main.nf' +include { FGBIO_FASTQTOBAM as FASTQTOBAM } from '../../../modules/fgbio/fastqtobam/main' +include { FGBIO_GROUPREADSBYUMI as GROUPREADSBYUMI } from '../../../modules/fgbio/groupreadsbyumi/main' +include { SAMBLASTER } from '../../../modules/samblaster/main' +include { SAMTOOLS_BAM2FQ as BAM2FASTQ } from '../../../modules/samtools/bam2fq/main.nf' + + +workflow CREATE_UMI_CONSENSUS { + take: + reads // channel: [mandatory] [ val(meta), [ reads ] ] + fasta // channel: [mandatory] /path/to/reference/fasta + read_structure // string: [mandatory] "read_structure" + groupreadsbyumi_strategy // string: [mandatory] grouping strategy - default: "Adjacency" + aligner // string: [mandatory] "bwa-mem" or "bwa-mem2" + + main: + ch_versions = Channel.empty() + + // using information in val(read_structure) FASTQ reads are converted into + // a tagged unmapped BAM file (uBAM) + FASTQTOBAM ( reads, read_structure ) + ch_versions = ch_versions.mix(FASTQTOBAM.out.version) + + // in order to map uBAM using BWA MEM, we need to convert uBAM to FASTQ + // but keep the appropriate UMI tags in the FASTQ comment field and produce + // an interleaved FASQT file (hence, split = false) + split = false + BAM2FASTQ ( FASTQTOBAM.out.umibam, split ) + ch_versions = ch_versions.mix(BAM2FASTQ.out.versions) + + // the user can choose here to use either bwa-mem (default) or bwa-mem2 + aligned_bam = Channel.empty() + + if (aligner == "bwa-mem") { + // reference is indexed + BWAMEM1_INDEX ( fasta ) + ch_versions = ch_versions.mix(BWAMEM1_INDEX.out.versions) + + // appropriately tagged interleaved FASTQ reads are mapped to the reference + BWAMEM1_MEM ( BAM2FASTQ.out.reads, BWAMEM1_INDEX.out.index, false ) + ch_versions = ch_versions.mix(BWAMEM1_MEM.out.versions) + aligned_bam = BWAMEM1_MEM.out.bam + } else { + // reference is indexed + BWAMEM2_INDEX ( fasta ) + ch_versions = ch_versions.mix(BWAMEM2_INDEX.out.versions) + + // appropriately tagged interleaved FASTQ reads are mapped to the reference + BWAMEM2_MEM ( BAM2FASTQ.out.reads, BWAMEM2_INDEX.out.index, false ) + ch_versions = ch_versions.mix(BWAMEM2_MEM.out.versions) + aligned_bam = BWAMEM2_MEM.out.bam + } + + // samblaster is used in order to tag mates information in the BAM file + // this is used in order to group reads by UMI + SAMBLASTER ( aligned_bam ) + ch_versions = ch_versions.mix(SAMBLASTER.out.versions) + + // appropriately tagged reads are now grouped by UMI information + GROUPREADSBYUMI ( SAMBLASTER.out.bam, groupreadsbyumi_strategy ) + ch_versions = ch_versions.mix(GROUPREADSBYUMI.out.versions) + + // using the above created groups, a consensus across reads in the same grou + // can be called + // this will emit a consensus BAM file + CALLUMICONSENSUS ( GROUPREADSBYUMI.out.bam ) + ch_versions = ch_versions.mix(CALLUMICONSENSUS.out.versions) + + emit: + ubam = FASTQTOBAM.out.umibam // channel: [ val(meta), [ bam ] ] + groupbam = GROUPREADSBYUMI.out.bam // channel: [ val(meta), [ bam ] ] + consensusbam = CALLUMICONSENSUS.out.bam // channel: [ val(meta), [ bam ] ] + versions = ch_versions // channel: [ versions.yml ] +} + diff --git a/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml b/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml new file mode 100644 index 00000000..2cb61206 --- /dev/null +++ b/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml @@ -0,0 +1,67 @@ +name: fgbio_create_umi_consensus +description: | + This workflow uses the suite FGBIO to identify and remove UMI tags from FASTQ reads + convert them to unmapped BAM file, map them to the reference genome, + and finally use the mapped information to group UMIs and generate consensus reads in each group +keywords: + - fgbio + - umi + - samblaster + - samtools + - bwa +modules: + - bwa/index + - bwa/mem + - fgbio/fastqtobam + - fgbio/groupreadsbyumi + - fgbio/callmolecularconsensusreads + - samblaster + - samtools/bam2fq +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - reads: + type: list + description: list umi-tagged reads + pattern: "[ *.{fastq.gz/fq.gz} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - read_structure: + type: string + description: | + A read structure should always be provided for each of the fastq files. + If single end, the string will contain only one structure (i.e. "2M11S+T"), if paired-end the string + will contain two structures separated by a blank space (i.e. "2M11S+T 2M11S+T"). + If the read does not contain any UMI, the structure will be +T (i.e. only template of any length). + https://github.com/fulcrumgenomics/fgbio/wiki/Read-Structures + - groupreadsbyumi_strategy: + type: string + description: | + Reguired argument: defines the UMI assignment strategy. + Must be chosen among: Identity, Edit, Adjacency, Paired. +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - ubam: + type: file + description: unmapped bam file + pattern: '*.bam' + - groupbam: + type: file + description: mapped bam file, where reads are grouped by UMI tag + pattern: '*.bam' + - consensusbam: + type: file + description: | + mapped bam file, where reads are created as consensus of those + belonging to the same UMI group + pattern: '*.bam' +authors: + - '@lescai' diff --git a/tests/modules/fgbio/groupreadsbyumi/main.nf b/tests/modules/fgbio/groupreadsbyumi/main.nf index 1d5fb474..b9bb350a 100644 --- a/tests/modules/fgbio/groupreadsbyumi/main.nf +++ b/tests/modules/fgbio/groupreadsbyumi/main.nf @@ -10,7 +10,6 @@ workflow test_fgbio_groupreadsbyumi { [ id:'test', single_end:false ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) ] - strategy = "Adjacency" - FGBIO_GROUPREADSBYUMI ( input, strategy ) + FGBIO_GROUPREADSBYUMI ( input, 'Adjacency' ) } diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf new file mode 100644 index 00000000..6b02bbc8 --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CREATE_UMI_CONSENSUS } from '../../../../subworkflows/nf-core/fgbio_create_umi_consensus/main' + +workflow test_fgbio_create_umi_consensus_mem1 { + reads = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + read_structure = "+T 12M11S+T" + + CREATE_UMI_CONSENSUS( reads, fasta, read_structure, "Adjacency", "bwa-mem" ) +} + +workflow test_fgbio_create_umi_consensus_mem2 { + reads = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + read_structure = "+T 12M11S+T" + + CREATE_UMI_CONSENSUS( reads, fasta, read_structure, "Adjacency", "bwa-mem2" ) +} diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config new file mode 100644 index 00000000..a55a4213 --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config @@ -0,0 +1,31 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + + withName: BWA_MEM { + ext.args = '-p -C -M' + } + + withName: BWAMEM2_MEM { + ext.args = '-p -C -M' + } + + withName: FGBIO_CALLMOLECULARCONSENSUSREADS { + ext.args = '-M 1 -S Coordinate' + ext.prefix = { "${meta.id}_umiconsensus" } + } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + + withName: SAMBLASTER { + ext.args = '-M --addMateTags' + ext.prefix = { "${meta.id}_processed" } + } + +} diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml new file mode 100644 index 00000000..2db70d3f --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml @@ -0,0 +1,22 @@ +- name: fgbio_create_umi_consensus_bwamem1 + command: nextflow run ./tests/subworkflows/nf-core/fgbio_create_umi_consensus -entry test_fgbio_create_umi_consensus_mem1 -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config + tags: + - subworkflows/fgbio_create_umi_consensus + files: + - path: ./output/fastqtobam/test_umi_converted.bam + md5sum: 9510735554e5eff29244077a72075fb6 + - path: ./output/groupreadsbyumi/test_umi-grouped.bam + md5sum: 44f31da850d5a8100b43b629426f2e17 + - path: ./output/callumiconsensus/test_umiconsensus.bam + md5sum: 24b48e3543de0ae7e8a95c116d5ca6a6 +- name: fgbio_create_umi_consensus_bwamem2 + command: nextflow run ./tests/subworkflows/nf-core/fgbio_create_umi_consensus -entry test_fgbio_create_umi_consensus_mem2 -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config + tags: + - subworkflows/fgbio_create_umi_consensus_bwamem2 + files: + - path: ./output/fastqtobam/test_umi_converted.bam + md5sum: 9510735554e5eff29244077a72075fb6 + - path: ./output/groupreadsbyumi/test_umi-grouped.bam + md5sum: c69333155038b9a968fd096627d4dfb0 + - path: ./output/callumiconsensus/test_umiconsensus.bam + md5sum: 24b48e3543de0ae7e8a95c116d5ca6a6 From 1f3f2b18bb11be73b90d3a19e4f764aa88612f91 Mon Sep 17 00:00:00 2001 From: Anan Ibrahim <81744003+Darcy220606@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:17:50 +0100 Subject: [PATCH 095/101] Add new module macrel/contigs (#1109) * Add new module macrel/contigs * removed trailing whitespace * removed whitespace * linting cleanup * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * Updated the test.yml It didnt upload the updated version earlier for some reason :( * Update test.yml * Update test.yml * Update test.yml as generated by pytest * Update test.yml * updated the version issue * Update tests/modules/macrel/contigs/test.yml * Update modules/macrel/contigs/main.nf * Update modules/macrel/contigs/main.nf * Update pytest_modules.yml * Update pytest_modules.yml * Update modules/macrel/contigs/main.nf Co-authored-by: James A. Fellows Yates * Zipped all fasta outputs * Update main.nf * Update test.yml * Update test.yml * Update main.nf * Update main.nf (gzip -n) * Update test.yml * Update main.nf * Update main.nf * Update test.yml * Update tests/modules/macrel/contigs/test.yml * Update modules/macrel/contigs/main.nf * Apply suggestions from code review Co-authored-by: James A. Fellows Yates Co-authored-by: darcy220606 Co-authored-by: James A. Fellows Yates --- modules/macrel/contigs/main.nf | 40 +++++++++++++ modules/macrel/contigs/meta.yml | 61 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/macrel/contigs/main.nf | 15 +++++ tests/modules/macrel/contigs/nextflow.config | 5 ++ tests/modules/macrel/contigs/test.yml | 16 +++++ 6 files changed, 141 insertions(+) create mode 100644 modules/macrel/contigs/main.nf create mode 100644 modules/macrel/contigs/meta.yml create mode 100644 tests/modules/macrel/contigs/main.nf create mode 100644 tests/modules/macrel/contigs/nextflow.config create mode 100644 tests/modules/macrel/contigs/test.yml diff --git a/modules/macrel/contigs/main.nf b/modules/macrel/contigs/main.nf new file mode 100644 index 00000000..558ef6e8 --- /dev/null +++ b/modules/macrel/contigs/main.nf @@ -0,0 +1,40 @@ +process MACREL_CONTIGS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::macrel=1.1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/macrel:1.1.0--py36hc5360cc_0': + 'quay.io/biocontainers/macrel:1.1.0--py36hc5360cc_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*/*.smorfs.faa.gz") , emit: smorfs + tuple val(meta), path("*/*.all_orfs.faa.gz") , emit: all_orfs + tuple val(meta), path("*/*.prediction.gz") , emit: amp_prediction + tuple val(meta), path("*/*.md") , emit: readme_file + tuple val(meta), path("*/*_log.txt") , emit: log_file + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + macrel contigs \\ + $args \\ + --fasta $fasta \\ + --output ${prefix}/ \\ + --tag ${prefix} \\ + --log-file ${prefix}/${prefix}_log.txt \\ + --threads $task.cpus + + gzip --no-name ${prefix}/*.faa + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + macrel: \$(echo \$(macrel --version | sed 's/macrel //g')) + END_VERSIONS + """ +} diff --git a/modules/macrel/contigs/meta.yml b/modules/macrel/contigs/meta.yml new file mode 100644 index 00000000..e0b2fabd --- /dev/null +++ b/modules/macrel/contigs/meta.yml @@ -0,0 +1,61 @@ +name: macrel_contigs +description: A tool that mines antimicrobial peptides (AMPs) from (meta)genomes by predicting peptides from genomes (provided as contigs) and outputs all the predicted anti-microbial peptides found. +keywords: + - AMP + - antimicrobial peptides + - genome mining + - metagenomes + - peptide prediction +tools: + - macrel: + description: A pipeline for AMP (antimicrobial peptide) prediction + homepage: https://macrel.readthedocs.io/en/latest/ + documentation: https://macrel.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/BigDataBiology/macrel + doi: "10.7717/peerj.10555" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: A fasta file with nucleotide sequences. + pattern: "*.{fasta,fa,fna,fasta.gz,fa.gz,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - amp_prediction: + type: file + description: A zipped file, with all predicted amps in a table format. + pattern: "*.prediction.gz" + - smorfs: + type: file + description: A zipped fasta file containing aminoacid sequences showing the general gene prediction information in the contigs. + pattern: "*.smorfs.faa.gz" + - all_orfs: + type: file + description: A zipped fasta file containing amino acid sequences showing the general gene prediction information in the contigs. + pattern: "*.all_orfs.faa.gz" + - readme_file: + type: file + description: A readme file containing tool specific information (e.g. citations, details about the output, etc.). + pattern: "*.md" + - log_file: + type: file + description: A log file containing the information pertaining to the run. + pattern: "*_log.txt" + +authors: + - "@darcy220606" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6dbfc1fd..7601671b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -842,6 +842,10 @@ lofreq/indelqual: - modules/lofreq/indelqual/** - tests/modules/lofreq/indelqual/** +macrel/contigs: + - modules/macrel/contigs/** + - tests/modules/macrel/contigs/** + macs2/callpeak: - modules/macs2/callpeak/** - tests/modules/macs2/callpeak/** diff --git a/tests/modules/macrel/contigs/main.nf b/tests/modules/macrel/contigs/main.nf new file mode 100644 index 00000000..a613dcc4 --- /dev/null +++ b/tests/modules/macrel/contigs/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MACREL_CONTIGS } from '../../../../modules/macrel/contigs/main.nf' + +workflow test_macrel_contigs { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) + ] + + MACREL_CONTIGS ( input ) +} diff --git a/tests/modules/macrel/contigs/nextflow.config b/tests/modules/macrel/contigs/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/macrel/contigs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/macrel/contigs/test.yml b/tests/modules/macrel/contigs/test.yml new file mode 100644 index 00000000..af272605 --- /dev/null +++ b/tests/modules/macrel/contigs/test.yml @@ -0,0 +1,16 @@ +- name: macrel contigs test_macrel_contigs + command: nextflow run ./tests/modules/macrel/contigs -entry test_macrel_contigs -c ./tests/config/nextflow.config -c ./tests/modules/macrel/contigs/nextflow.config + tags: + - macrel/contigs + - macrel + files: + - path: output/macrel/test/README.md + md5sum: fa3706dfc95d0538a52c4d0d824be5fb + - path: output/macrel/test/test.all_orfs.faa.gz + - path: output/macrel/test/test.prediction.gz + - path: output/macrel/test/test.smorfs.faa.gz + md5sum: 79704c6120c2f794518301af6f9b963d + - path: output/macrel/test/test_log.txt + md5sum: 6fdba143dce759597eb9f80e5d968729 + - path: output/macrel/versions.yml + md5sum: be8bf0d0647751c635c3736655f29f85 From be8528998134132c6f4823f627bbb0aec6a51747 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 9 Dec 2021 15:00:32 +0100 Subject: [PATCH 096/101] Update metabat2 output channels and compression (#1111) * feat: each output type has dedicated channel replace bgzip with gzip can only zip one at a time * Add condition moving of unbinned files * fix: solution for moving sometimes non-existant files * fix: update meta.yml to add the new channels * fix: remove most of the checksums due to variability * fix: tweaking of output * Update modules/metabat2/metabat2/main.nf Co-authored-by: Maxime Borry * Fix find commands * Fix find commands Co-authored-by: Maxime Borry --- modules/metabat2/metabat2/main.nf | 15 ++++++++++----- modules/metabat2/metabat2/meta.yml | 13 +++++++++++++ tests/modules/metabat2/metabat2/test.yml | 15 ++++++++------- 3 files changed, 31 insertions(+), 12 deletions(-) diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf index 2d01fdf6..a8af0ae9 100644 --- a/modules/metabat2/metabat2/main.nf +++ b/modules/metabat2/metabat2/main.nf @@ -11,9 +11,12 @@ process METABAT2_METABAT2 { tuple val(meta), path(fasta), path(depth) output: - tuple val(meta), path("bins/*.fa.gz") , optional:true , emit: fasta - tuple val(meta), path("*.tsv.gz"), optional:true , emit: membership - path "versions.yml" , emit: versions + tuple val(meta), path("*.tooShort.fa.gz") , optional:true , emit: tooshort + tuple val(meta), path("*.lowDepth.fa.gz") , optional:true , emit: lowdepth + tuple val(meta), path("*.unbinned.fa.gz") , optional:true , emit: unbinned + tuple val(meta), path("*.tsv.gz") , optional:true , emit: membership + tuple val(meta), path("bins/*.fa.gz") , optional:true , emit: fasta + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' @@ -33,8 +36,10 @@ process METABAT2_METABAT2 { mv metabat2/${prefix} ${prefix}.tsv mv metabat2 bins - bgzip --threads $task.cpus ${prefix}.tsv - bgzip --threads $task.cpus bins/*.fa + + gzip ${prefix}.tsv + find ./bins/ -name "*.fa" -type f | xargs -t -n 1 bgzip -@ ${task.cpus} + find ./bins/ -name "*[lowDepth,tooShort,unbinned].fa.gz" -type f -exec mv {} . \\; cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/metabat2/metabat2/meta.yml b/modules/metabat2/metabat2/meta.yml index a7f3a7ff..0ec07b02 100644 --- a/modules/metabat2/metabat2/meta.yml +++ b/modules/metabat2/metabat2/meta.yml @@ -46,6 +46,18 @@ output: type: file description: Bins created from assembled contigs in fasta file pattern: "*.fa.gz" + - tooshort: + type: file + description: Contigs that did not pass length filtering + pattern: "*.tooShort.fa.gz" + - lowdepth: + type: file + description: Contigs that did not have sufficient depth for binning + pattern: "*.lowDepth.fa.gz" + - unbinned: + type: file + description: Contigs that pass length and depth filtering but could not be binned + pattern: "*.unbinned.fa.gz" - membership: type: file description: cluster memberships as a matrix format. @@ -54,3 +66,4 @@ output: authors: - "@maxibor" + - "@jfy133" diff --git a/tests/modules/metabat2/metabat2/test.yml b/tests/modules/metabat2/metabat2/test.yml index 1a8660a7..9389295e 100644 --- a/tests/modules/metabat2/metabat2/test.yml +++ b/tests/modules/metabat2/metabat2/test.yml @@ -1,23 +1,24 @@ - name: metabat2 metabat2 test_metabat2_no_depth - command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c tests/config/nextflow.config tags: - - metabat2/metabat2 - metabat2 + - metabat2/metabat2 files: - path: output/metabat2/bins/test.1.fa.gz md5sum: 0e9bce5b5a0033fd4411a21dec881170 - path: output/metabat2/test.tsv.gz - md5sum: ea77e8c4426d2337419905b57f1ec335 + - path: output/metabat2/versions.yml + md5sum: 5742a71af36c3a748fd5726d76924ba8 - name: metabat2 metabat2 test_metabat2_depth - command: nextflow run ./tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/metabat2/nextflow.config + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c tests/config/nextflow.config tags: - - metabat2/metabat2 - metabat2 + - metabat2/metabat2 files: - path: output/metabat2/bins/test.1.fa.gz md5sum: 0e9bce5b5a0033fd4411a21dec881170 - path: output/metabat2/test.tsv.gz - md5sum: ea77e8c4426d2337419905b57f1ec335 - path: output/metabat2/test.txt.gz - md5sum: 8f735aa408d6c90e5a0310e06ace7a9a + - path: output/metabat2/versions.yml + md5sum: 538c56b2df7d90580f05097218b5d5b1 From 0bf40a26bdc7cd90472956771daebabf52c68b49 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Thu, 9 Dec 2021 11:07:47 -0600 Subject: [PATCH 097/101] Update seqwish to version 0.7.2 (#1144) --- modules/seqwish/induce/main.nf | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index 089f3478..6d6b33e7 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -1,14 +1,14 @@ -def VERSION = '0.7.1' // Version information not provided by tool on CLI +def VERSION = '0.7.2' // Version information not provided by tool on CLI process SEQWISH_INDUCE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::seqwish=0.7.1' : null) + conda (params.enable_conda ? 'bioconda::seqwish=0.7.2' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0' : - 'quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0' }" + 'https://depot.galaxyproject.org/singularity/seqwish:0.7.2--h2e03b76_0' : + 'quay.io/biocontainers/seqwish:0.7.2--h2e03b76_0' }" input: tuple val(meta), path(paf), path(fasta) From edbbbbf42097c0696305dd2fd6d0e1070a7dbb07 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 13 Dec 2021 09:13:24 +0100 Subject: [PATCH 098/101] fix: exclude conda/singularity images from pytest workflow output (#1162) --- .github/workflows/pytest-workflow.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 0bd892c8..cc7c9313 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -103,3 +103,5 @@ jobs: /home/runner/pytest_workflow_*/*/log.out /home/runner/pytest_workflow_*/*/log.err /home/runner/pytest_workflow_*/*/work + !/home/runner/pytest_workflow_*/*/work/conda + !/home/runner/pytest_workflow_*/*/work/singularity From 47a9cf8ecbe4de4dcb8b9cc6731fece82b934ab7 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Mon, 13 Dec 2021 14:10:29 +0000 Subject: [PATCH 099/101] Fix transcriptome staging issues on DNAnexus for rsem/prepareference (#1163) --- modules/rsem/preparereference/main.nf | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index 95597b74..a5b8922a 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -12,9 +12,9 @@ process RSEM_PREPAREREFERENCE { path gtf output: - path "rsem" , emit: index - path "rsem/*transcripts.fa", emit: transcript_fasta - path "versions.yml" , emit: versions + path "rsem" , emit: index + path "*transcripts.fa", emit: transcript_fasta + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' @@ -40,6 +40,8 @@ process RSEM_PREPAREREFERENCE { $fasta \\ rsem/genome + cp rsem/genome.transcripts.fa . + cat <<-END_VERSIONS > versions.yml "${task.process}": rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") @@ -55,6 +57,8 @@ process RSEM_PREPAREREFERENCE { $fasta \\ rsem/genome + cp rsem/genome.transcripts.fa . + cat <<-END_VERSIONS > versions.yml "${task.process}": rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") From 0fafaeebf52cc5ab554b83297ed02a48d852a848 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Mon, 13 Dec 2021 16:15:20 +0000 Subject: [PATCH 100/101] Revert PR #1147 (#1166) * Revert PR #1147 * Fix md5sum for crai file --- modules/samtools/index/main.nf | 11 ++++------- tests/modules/samtools/index/test.yml | 2 +- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index 74ae15df..db025a8f 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -11,13 +11,10 @@ process SAMTOOLS_INDEX { tuple val(meta), path(input) output: - tuple val(meta), path("*.bam" , includeInputs:true), path("*.bai") , optional:true, emit: bam_bai - tuple val(meta), path("*.bai") , optional:true, emit: bai - tuple val(meta), path("*.bam" , includeInputs:true), path("*.csi") , optional:true, emit: bam_csi - tuple val(meta), path("*.csi") , optional:true, emit: csi - tuple val(meta), path("*.cram", includeInputs:true), path("*.crai"), optional:true, emit: cram_crai - tuple val(meta), path("*.crai") , optional:true, emit: crai - path "versions.yml" , emit: versions + tuple val(meta), path("*.bai") , optional:true, emit: bai + tuple val(meta), path("*.csi") , optional:true, emit: csi + tuple val(meta), path("*.crai"), optional:true, emit: crai + path "versions.yml" , emit: versions script: def args = task.ext.args ?: '' diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 6972ed65..7184be8f 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -14,7 +14,7 @@ - samtools/index files: - path: output/samtools/test.paired_end.recalibrated.sorted.cram.crai - md5sum: 537e3d8c937bcc4e34e1cf47cd71d484 + md5sum: 14bc3bd5c89cacc8f4541f9062429029 - name: samtools index test_samtools_index_csi command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config From 826a5603db5cf5b4f1e55cef9cc0b7c37d3c7e70 Mon Sep 17 00:00:00 2001 From: Mingda Jin Date: Mon, 13 Dec 2021 11:18:27 -0800 Subject: [PATCH 101/101] Stage fastq for concat in subfolders to avoid name collision issue (#1107) * Stage fastq for concat in subfolders in task workdir * Update main.nf * Update test.yml Co-authored-by: Harshil Patel --- modules/cat/fastq/main.nf | 8 ++++---- tests/modules/cat/fastq/main.nf | 22 ++++++++++++++++++++++ tests/modules/cat/fastq/test.yml | 26 +++++++++++++++++++++++--- 3 files changed, 49 insertions(+), 7 deletions(-) diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index c5ece83a..d02598e1 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -8,7 +8,7 @@ process CAT_FASTQ { 'biocontainers/biocontainers:v1.2.0_cv1' }" input: - tuple val(meta), path(reads) + tuple val(meta), path(reads, stageAs: "input*/*") output: tuple val(meta), path("*.merged.fastq.gz"), emit: reads @@ -21,7 +21,7 @@ process CAT_FASTQ { if (meta.single_end) { if (readList.size > 1) { """ - cat ${readList.sort().join(' ')} > ${prefix}.merged.fastq.gz + cat ${readList.join(' ')} > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -35,8 +35,8 @@ process CAT_FASTQ { def read2 = [] readList.eachWithIndex{ v, ix -> ( ix & 1 ? read2 : read1 ) << v } """ - cat ${read1.sort().join(' ')} > ${prefix}_1.merged.fastq.gz - cat ${read2.sort().join(' ')} > ${prefix}_2.merged.fastq.gz + cat ${read1.join(' ')} > ${prefix}_1.merged.fastq.gz + cat ${read2.join(' ')} > ${prefix}_2.merged.fastq.gz cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/cat/fastq/main.nf b/tests/modules/cat/fastq/main.nf index c3da91d2..1ed23ce5 100644 --- a/tests/modules/cat/fastq/main.nf +++ b/tests/modules/cat/fastq/main.nf @@ -25,3 +25,25 @@ workflow test_cat_fastq_paired_end { CAT_FASTQ ( input ) } + +workflow test_cat_fastq_single_end_same_name { + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + CAT_FASTQ ( input ) +} + +workflow test_cat_fastq_paired_end_same_name { + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + CAT_FASTQ ( input ) +} diff --git a/tests/modules/cat/fastq/test.yml b/tests/modules/cat/fastq/test.yml index 89ddf331..56374060 100644 --- a/tests/modules/cat/fastq/test.yml +++ b/tests/modules/cat/fastq/test.yml @@ -5,7 +5,7 @@ - cat/fastq files: - path: ./output/cat/test.merged.fastq.gz - md5sum: 59f6dbe193741bb40f498f254aeb2e99 + md5sum: f9cf5e375f7de81a406144a2c70cc64d - name: cat fastq fastqc_paired_end command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config @@ -14,6 +14,26 @@ - cat/fastq files: - path: ./output/cat/test_2.merged.fastq.gz - md5sum: d2b1a836eef1058738ecab36c907c5ba + md5sum: 77c8e966e130d8c6b6ec9be52fcb2bda - path: ./output/cat/test_1.merged.fastq.gz - md5sum: 59f6dbe193741bb40f498f254aeb2e99 + md5sum: f9cf5e375f7de81a406144a2c70cc64d + +- name: cat fastq single-end-same-name + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end_same_name -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config + tags: + - cat + - cat/fastq + files: + - path: ./output/cat/test.merged.fastq.gz + md5sum: 63f817db7a29a03eb538104495556f66 + +- name: cat fastq fastqc_paired_end_same_name + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end_same_name -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config + tags: + - cat + - cat/fastq + files: + - path: ./output/cat/test_1.merged.fastq.gz + md5sum: 63f817db7a29a03eb538104495556f66 + - path: ./output/cat/test_2.merged.fastq.gz + md5sum: fe9f266f43a6fc3dcab690a18419a56e