diff --git a/.github/workflows/nf-core-linting.yml b/.github/workflows/nf-core-linting.yml index 55b8c296..121dd865 100644 --- a/.github/workflows/nf-core-linting.yml +++ b/.github/workflows/nf-core-linting.yml @@ -7,7 +7,6 @@ on: pull_request: branches: [master] - jobs: changes: name: Check for changes @@ -25,9 +24,6 @@ jobs: lint: runs-on: ubuntu-20.04 - env: - NXF_VER: 21.04.0 - name: ${{ matrix.tags }} needs: changes if: needs.changes.outputs.modules != '[]' diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index 0b509527..cc7c9313 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -6,11 +6,8 @@ on: branches: [master] jobs: - ########### - # Modules # - ########### - module_changes: - name: Check for changes in the modules + changes: + name: Check for changes runs-on: ubuntu-latest outputs: # Expose matched filters as job 'modules' output variable @@ -23,17 +20,16 @@ jobs: with: filters: "tests/config/pytest_modules.yml" - module_test: + test: runs-on: ubuntu-20.04 - name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} - needs: module_changes - if: needs.module_changes.outputs.modules != '[]' + name: ${{ matrix.tags }} ${{ matrix.profile }} + needs: changes + if: needs.changes.outputs.modules != '[]' strategy: fail-fast: false matrix: - nxf_version: ["21.04.0"] - tags: ${{ fromJson(needs.module_changes.outputs.modules) }} + tags: ["${{ fromJson(needs.changes.outputs.modules) }}"] profile: ["docker", "singularity", "conda"] env: NXF_ANSI_LOG: false @@ -63,13 +59,12 @@ jobs: - uses: actions/cache@v2 with: path: /usr/local/bin/nextflow - key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }} + key: ${{ runner.os }} restore-keys: | ${{ runner.os }}-nextflow- - name: Install Nextflow env: - NXF_VER: ${{ matrix.nxf_version }} CAPSULE_LOG: none run: | wget -qO- get.nextflow.io | bash @@ -96,118 +91,17 @@ jobs: # Test the module - name: Run pytest-workflow # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof + run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof - name: Upload logs on failure if: failure() uses: actions/upload-artifact@v2 with: - name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} - path: | - /home/runner/pytest_workflow_*/*/.nextflow.log - /home/runner/pytest_workflow_*/*/log.out - /home/runner/pytest_workflow_*/*/log.err - /home/runner/pytest_workflow_*/*/work - - ################ - # Subworkflows # - ################ - subworkflow_changes: - name: Check for changes in the subworkflows - runs-on: ubuntu-latest - outputs: - # Expose matched filters as job 'subworkflows' output variable - subworkflows: ${{ steps.filter.outputs.changes }} - steps: - - uses: actions/checkout@v2 - - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: "tests/config/pytest_subworkflows.yml" - - subworkflow_test: - runs-on: ubuntu-20.04 - - name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }} - needs: subworkflow_changes - if: needs.subworkflow_changes.outputs.subworkflows != '[]' - strategy: - fail-fast: false - matrix: - nxf_version: ["21.04.0"] - tags: ${{ fromJson(needs.subworkflow_changes.outputs.subworkflows) }} - profile: ["docker", "singularity", "conda"] - env: - NXF_ANSI_LOG: false - steps: - - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: "3.x" - - - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: "3.x" - - - name: Install Python dependencies - run: python -m pip install --upgrade pip pytest-workflow - - - uses: actions/cache@v2 - with: - path: /usr/local/bin/nextflow - key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }} - restore-keys: | - ${{ runner.os }}-nextflow- - - - name: Install Nextflow - env: - NXF_VER: ${{ matrix.nxf_version }} - CAPSULE_LOG: none - run: | - wget -qO- get.nextflow.io | bash - sudo mv nextflow /usr/local/bin/ - - - name: Set up Singularity - if: matrix.profile == 'singularity' - uses: eWaterCycle/setup-singularity@v5 - with: - singularity-version: 3.7.1 - - - name: Setup miniconda - if: matrix.profile == 'conda' - uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - channels: conda-forge,bioconda,defaults - python-version: ${{ matrix.python-version }} - - - name: Conda clean - if: matrix.profile == 'conda' - run: conda clean -a - - # Test the module - - name: Run pytest-workflow - # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: NF_CORE_MODULES_TEST=1 TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof - - - name: Upload logs on failure - if: failure() - uses: actions/upload-artifact@v2 - with: - name: logs-${{ matrix.profile }}-${{ matrix.nxf_version }} + name: logs-${{ matrix.profile }} path: | /home/runner/pytest_workflow_*/*/.nextflow.log /home/runner/pytest_workflow_*/*/log.out /home/runner/pytest_workflow_*/*/log.err /home/runner/pytest_workflow_*/*/work + !/home/runner/pytest_workflow_*/*/work/conda + !/home/runner/pytest_workflow_*/*/work/singularity diff --git a/README.md b/README.md index f25b37d9..beee42e7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # ![nf-core/modules](docs/images/nfcore-modules_logo.png) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.04.0-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A521.10.3-23aa62.svg?labelColor=000000)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) @@ -78,7 +78,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi nextflow.enable.dsl = 2 - include { FASTQC } from './modules/nf-core/modules/fastqc/main' addParams( options: [:] ) + include { FASTQC } from './modules/nf-core/modules/fastqc/main' ``` 5. Remove the module from the pipeline repository if required: diff --git a/main.nf b/main.nf new file mode 100644 index 00000000..de12f619 --- /dev/null +++ b/main.nf @@ -0,0 +1,3 @@ +/* + * not actually used - just a placeholder + */ diff --git a/modules/abacas/functions.nf b/modules/abacas/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/abacas/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/abacas/main.nf b/modules/abacas/main.nf index bc5440b1..49040214 100644 --- a/modules/abacas/main.nf +++ b/modules/abacas/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ABACAS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::abacas=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/abacas:1.3.1--pl526_0" - } else { - container "quay.io/biocontainers/abacas:1.3.1--pl526_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/abacas:1.3.1--pl526_0' : + 'quay.io/biocontainers/abacas:1.3.1--pl526_0' }" input: tuple val(meta), path(scaffold) @@ -27,12 +16,13 @@ process ABACAS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ abacas.pl \\ -r $fasta \\ -q $scaffold \\ - $options.args \\ + $args \\ -o ${prefix}.abacas mv nucmer.delta ${prefix}.abacas.nucmer.delta @@ -40,8 +30,8 @@ process ABACAS { mv nucmer.tiling ${prefix}.abacas.nucmer.tiling mv unused_contigs.out ${prefix}.abacas.unused.contigs.out cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//') + "${task.process}": + abacas: \$(echo \$(abacas.pl -v 2>&1) | sed 's/^.*ABACAS.//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/adapterremoval/functions.nf b/modules/adapterremoval/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/adapterremoval/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index fad3963f..0cf257ff 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ADAPTERREMOVAL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::adapterremoval=2.3.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/adapterremoval:2.3.2--hb7ba0dd_0" - } else { - container "quay.io/biocontainers/adapterremoval:2.3.2--hb7ba0dd_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/adapterremoval:2.3.2--hb7ba0dd_0' : + 'quay.io/biocontainers/adapterremoval:2.3.2--hb7ba0dd_0' }" input: tuple val(meta), path(reads) @@ -26,13 +16,14 @@ process ADAPTERREMOVAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ AdapterRemoval \\ --file1 $reads \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -41,16 +32,16 @@ process ADAPTERREMOVAL { --gzip \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } else if (!meta.single_end && !meta.collapse) { """ AdapterRemoval \\ --file1 ${reads[0]} \\ - --file2 ${reads[0]} \\ - $options.args \\ + --file2 ${reads[1]} \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -60,17 +51,17 @@ process ADAPTERREMOVAL { --gzip \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } else { """ AdapterRemoval \\ --file1 ${reads[0]} \\ - --file2 ${reads[0]} \\ + --file2 ${reads[1]} \\ --collapse \\ - $options.args \\ + $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ @@ -79,8 +70,8 @@ process ADAPTERREMOVAL { cat *.collapsed.gz *.collapsed.truncated.gz > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") + "${task.process}": + adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ } diff --git a/modules/agrvate/functions.nf b/modules/agrvate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/agrvate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/agrvate/main.nf b/modules/agrvate/main.nf index c1a6748e..aff72abc 100644 --- a/modules/agrvate/main.nf +++ b/modules/agrvate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process AGRVATE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::agrvate=1.0.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/agrvate:1.0.1--hdfd78af_0" - } else { - container "quay.io/biocontainers/agrvate:1.0.1--hdfd78af_0" - } + conda (params.enable_conda ? "bioconda::agrvate=1.0.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/agrvate:1.0.2--hdfd78af_0' : + 'quay.io/biocontainers/agrvate:1.0.2--hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -27,15 +16,16 @@ process AGRVATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ agrvate \\ - $options.args \\ + $args \\ -i $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(agrvate -v 2>&1) | sed 's/agrvate v//;') + "${task.process}": + agrvate: \$(echo \$(agrvate -v 2>&1) | sed 's/agrvate v//;') END_VERSIONS """ } diff --git a/modules/allelecounter/functions.nf b/modules/allelecounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/allelecounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/allelecounter/main.nf b/modules/allelecounter/main.nf index 8d986579..850a018f 100644 --- a/modules/allelecounter/main.nf +++ b/modules/allelecounter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ALLELECOUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cancerit-allelecount=4.3.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.3.0--h41abebc_0" - } else { - container "quay.io/biocontainers/cancerit-allelecount:4.3.0--h41abebc_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cancerit-allelecount:4.3.0--h41abebc_0' : + 'quay.io/biocontainers/cancerit-allelecount:4.3.0--h41abebc_0' }" input: tuple val(meta), path(input), path(input_index) @@ -28,20 +17,21 @@ process ALLELECOUNTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def reference_options = fasta ? "-r $fasta": "" """ alleleCounter \\ - $options.args \\ + $args \\ -l $loci \\ -b $input \\ $reference_options \\ -o ${prefix}.alleleCount cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(alleleCounter --version) + "${task.process}": + allelecounter: \$(alleleCounter --version) END_VERSIONS """ } diff --git a/modules/amps/functions.nf b/modules/amps/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/amps/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/amps/main.nf b/modules/amps/main.nf index f34423b5..871b57c6 100644 --- a/modules/amps/main.nf +++ b/modules/amps/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process AMPS { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hops=0.35" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" - } else { - container "quay.io/biocontainers/hops:0.35--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1' : + 'quay.io/biocontainers/hops:0.35--hdfd78af_1' }" input: path maltextract_results @@ -30,6 +19,7 @@ process AMPS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ postprocessing.AMPS.r \\ -r $maltextract_results \\ @@ -37,11 +27,11 @@ process AMPS { -m $filter \\ -t $task.cpus \\ -j \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') + "${task.process}": + amps: \$(echo \$(hops --version 2>&1) | sed 's/HOPS version//') END_VERSIONS """ } diff --git a/modules/arriba/functions.nf b/modules/arriba/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/arriba/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/arriba/main.nf b/modules/arriba/main.nf index 6abae233..0fcb6ba7 100644 --- a/modules/arriba/main.nf +++ b/modules/arriba/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARRIBA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::arriba=2.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/arriba:2.1.0--h3198e80_1" - } else { - container "quay.io/biocontainers/arriba:2.1.0--h3198e80_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/arriba:2.1.0--h3198e80_1' : + 'quay.io/biocontainers/arriba:2.1.0--h3198e80_1' }" input: tuple val(meta), path(bam) @@ -29,8 +18,9 @@ process ARRIBA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def blacklist = (options.args.contains('-b')) ? '' : '-f blacklist' + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def blacklist = (args.contains('-b')) ? '' : '-f blacklist' """ arriba \\ -x $bam \\ @@ -39,11 +29,11 @@ process ARRIBA { -o ${prefix}.fusions.tsv \\ -O ${prefix}.fusions.discarded.tsv \\ $blacklist \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(arriba -h | grep 'Version:' 2>&1 | sed 's/Version:\s//') + "${task.process}": + arriba: \$(arriba -h | grep 'Version:' 2>&1 | sed 's/Version:\s//') END_VERSIONS """ } diff --git a/modules/artic/guppyplex/functions.nf b/modules/artic/guppyplex/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/artic/guppyplex/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/artic/guppyplex/main.nf b/modules/artic/guppyplex/main.nf index 87bd99c8..780f5111 100644 --- a/modules/artic/guppyplex/main.nf +++ b/modules/artic/guppyplex/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARTIC_GUPPYPLEX { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::artic=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0" - } else { - container "quay.io/biocontainers/artic:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0' : + 'quay.io/biocontainers/artic:1.2.1--py_0' }" input: tuple val(meta), path(fastq_dir) @@ -26,18 +15,19 @@ process ARTIC_GUPPYPLEX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ artic \\ guppyplex \\ - $options.args \\ + $args \\ --directory $fastq_dir \\ --output ${prefix}.fastq pigz -p $task.cpus *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + "${task.process}": + artic: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/artic/minion/functions.nf b/modules/artic/minion/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/artic/minion/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/artic/minion/main.nf b/modules/artic/minion/main.nf index 68474f19..ce04fcc8 100644 --- a/modules/artic/minion/main.nf +++ b/modules/artic/minion/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ARTIC_MINION { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::artic=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0" - } else { - container "quay.io/biocontainers/artic:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/artic:1.2.1--py_0' : + 'quay.io/biocontainers/artic:1.2.1--py_0' }" input: tuple val(meta), path(fastq) @@ -43,20 +32,21 @@ process ARTIC_MINION { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def version = scheme_version.toString().toLowerCase().replaceAll('v','') - def fast5 = params.fast5_dir ? "--fast5-directory $fast5_dir" : "" - def summary = params.sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" + def fast5 = fast5_dir ? "--fast5-directory $fast5_dir" : "" + def summary = sequencing_summary ? "--sequencing-summary $sequencing_summary" : "" def model = "" - if (options.args.tokenize().contains('--medaka')) { + if (args.tokenize().contains('--medaka')) { fast5 = "" summary = "" - model = file(params.artic_minion_medaka_model).exists() ? "--medaka-model ./$medaka_model" : "--medaka-model $params.artic_minion_medaka_model" + model = file(medaka_model).exists() ? "--medaka-model ./$medaka_model" : "--medaka-model $medaka_model" } """ artic \\ minion \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --read-file $fastq \\ --scheme-directory ./primer-schemes \\ @@ -68,8 +58,8 @@ process ARTIC_MINION { $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') + "${task.process}": + artic: \$(artic --version 2>&1 | sed 's/^.*artic //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/assemblyscan/functions.nf b/modules/assemblyscan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/assemblyscan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/assemblyscan/main.nf b/modules/assemblyscan/main.nf index 5b82f922..56541222 100644 --- a/modules/assemblyscan/main.nf +++ b/modules/assemblyscan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ASSEMBLYSCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::assembly-scan=0.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0' : + 'quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0' }" input: tuple val(meta), path(assembly) @@ -26,13 +15,14 @@ process ASSEMBLYSCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ assembly-scan $assembly > ${prefix}.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' ) + "${task.process}": + assemblyscan: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' ) END_VERSIONS """ } diff --git a/modules/ataqv/ataqv/main.nf b/modules/ataqv/ataqv/main.nf new file mode 100644 index 00000000..20525e85 --- /dev/null +++ b/modules/ataqv/ataqv/main.nf @@ -0,0 +1,47 @@ +process ATAQV_ATAQV { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::ataqv=1.2.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ataqv:1.2.1--py39ha23c084_2' : + 'quay.io/biocontainers/ataqv:1.2.1--py36hfdecbe1_2' }" + + input: + tuple val(meta), path(bam), path(bai), path(peak_file) + val organism + path tss_file + path excl_regs_file + path autosom_ref_file + + output: + tuple val(meta), path("*.ataqv.json"), emit: json + tuple val(meta), path("*.problems") , emit: problems, optional: true + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def peak = peak_file ? "--peak-file $peak_file" : '' + def tss = tss_file ? "--tss-file $tss_file" : '' + def excl_regs = excl_regs_file ? "--excluded-region-file $excl_regs_file" : '' + def autosom_ref = autosom_ref_file ? "--autosomal-reference-file $autosom_ref_file" : '' + """ + ataqv \\ + $args \\ + $peak \\ + $tss \\ + $excl_regs \\ + $autosom_ref \\ + --metrics-file "${prefix}.ataqv.json" \\ + --threads $task.cpus \\ + --name $prefix \\ + $organism \\ + $bam + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ataqv: \$( ataqv --version ) + END_VERSIONS + """ +} diff --git a/modules/ataqv/ataqv/meta.yml b/modules/ataqv/ataqv/meta.yml new file mode 100644 index 00000000..760bf95f --- /dev/null +++ b/modules/ataqv/ataqv/meta.yml @@ -0,0 +1,66 @@ +name: ataqv_ataqv +description: ataqv function of a corresponding ataqv tool +keywords: + - ataqv +tools: + - ataqv: + description: ataqv is a toolkit for measuring and comparing ATAC-seq results. It was written to help understand how well ATAC-seq assays have worked, and to make it easier to spot differences that might be caused by library prep or sequencing. + homepage: https://github.com/ParkerLab/ataqv/blob/master/README.rst + documentation: https://github.com/ParkerLab/ataqv/blob/master/README.rst + tool_dev_url: https://github.com/ParkerLab/ataqv + doi: "https://doi.org/10.1016/j.cels.2020.02.009" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file with the same prefix as bam file. Required if tss_file input is provided. + pattern: "*.bam.bai" + - peak_file: + type: file + description: A BED file of peaks called for alignments in the BAM file + pattern: "*.bed" + - organism: + type: string + description: The subject of the experiment, which determines the list of autosomes (see "Reference Genome Configuration" section at https://github.com/ParkerLab/ataqv). + - tss_file: + type: file + description: A BED file of transcription start sites for the experiment organism. If supplied, a TSS enrichment score will be calculated according to the ENCODE data standards. This calculation requires that the BAM file of alignments be indexed. + pattern: "*.bed" + - excl_regs_file: + type: file + description: A BED file containing excluded regions. Peaks or TSS overlapping these will be ignored. + pattern: "*.bed" + - autosom_ref_file: + type: file + description: A file containing autosomal reference names, one per line. The names must match the reference names in the alignment file exactly, or the metrics based on counts of autosomal alignments will be wrong. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - json: + type: file + description: The JSON file to which metrics will be written. + - problems: + type: file + description: If given, problematic reads will be logged to a file per read group, with names derived from the read group IDs, with ".problems" appended. If no read groups are found, the reads will be written to one file named after the BAM file. + pattern: "*.problems" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@i-pletenev" diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf new file mode 100644 index 00000000..2582dac2 --- /dev/null +++ b/modules/bakta/main.nf @@ -0,0 +1,67 @@ +process BAKTA { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::bakta=1.2.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bakta:1.2.2--pyhdfd78af_0' : + 'quay.io/biocontainers/bakta:1.2.2--pyhdfd78af_0' }" + + input: + tuple val(meta), path(fasta) + path db + path proteins + path prodigal_tf + + output: + tuple val(meta), path("${prefix}.embl") , emit: embl + tuple val(meta), path("${prefix}.faa") , emit: faa + tuple val(meta), path("${prefix}.ffn") , emit: ffn + tuple val(meta), path("${prefix}.fna") , emit: fna + tuple val(meta), path("${prefix}.gbff") , emit: gbff + tuple val(meta), path("${prefix}.gff3") , emit: gff + tuple val(meta), path("${prefix}.hypotheticals.tsv"), emit: hypotheticals_tsv + tuple val(meta), path("${prefix}.hypotheticals.faa"), emit: hypotheticals_faa + tuple val(meta), path("${prefix}.tsv") , emit: tsv + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" + def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" + def prodigal_opt = prodigal_tf ? "--prodigal-tf ${prodigal_tf[0]}" : "" + """ + bakta \\ + $args \\ + --threads $task.cpus \\ + --prefix $prefix \\ + --db $db \\ + $proteins_opt \\ + $prodigal_tf \\ + $fasta + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bakta: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + END_VERSIONS + """ + + stub: + prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.embl + touch ${prefix}.faa + touch ${prefix}.ffn + touch ${prefix}.fna + touch ${prefix}.gbff + touch ${prefix}.gff3 + touch ${prefix}.hypotheticals.tsv + touch ${prefix}.hypotheticals.faa + touch ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bakta: \$( echo \$(bakta --version 2>&1) | sed 's/^.*bakta //' ) + END_VERSIONS + """ +} diff --git a/modules/bakta/meta.yml b/modules/bakta/meta.yml new file mode 100644 index 00000000..29e6edbe --- /dev/null +++ b/modules/bakta/meta.yml @@ -0,0 +1,85 @@ +name: bakta +description: Rapid annotation of bacterial genomes & plasmids. +keywords: + - annotation + - fasta + - prokaryote +tools: + - bakta: + description: Rapid & standardized annotation of bacterial genomes & plasmids. + homepage: https://github.com/oschwengers/bakta + documentation: https://github.com/oschwengers/bakta + tool_dev_url: https://github.com/oschwengers/bakta + doi: "10.1099/mgen.0.000685" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: | + FASTA file to be annotated. Has to contain at least a non-empty string dummy value. + - db: + type: file + description: | + Path to the Bakta database + - proteins: + type: file + description: FASTA file of trusted proteins to first annotate from (optional) + - prodigal_tf: + type: file + description: Training file to use for Prodigal (optional) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: annotations as simple human readble tab separated values + pattern: "*.tsv" + - gff: + type: file + description: annotations & sequences in GFF3 format + pattern: "*.gff3" + - gbff: + type: file + description: annotations & sequences in (multi) GenBank format + pattern: "*.gbff" + - embl: + type: file + description: annotations & sequences in (multi) EMBL format + pattern: "*.embl" + - fna: + type: file + description: replicon/contig DNA sequences as FASTA + pattern: "*.fna" + - faa: + type: file + description: CDS/sORF amino acid sequences as FASTA + pattern: "*.faa" + - ffn: + type: file + description: feature nucleotide sequences as FASTA + pattern: "*.ffn" + - hypotheticals_tsv: + type: file + description: further information on hypothetical protein CDS as simple human readble tab separated values + pattern: "*.hypotheticals.tsv" + - hypotheticals_faa: + type: file + description: hypothetical protein CDS amino acid sequences as FASTA + pattern: "*.hypotheticals.faa" + +authors: + - "@rpetit3" diff --git a/modules/bamaligncleaner/functions.nf b/modules/bamaligncleaner/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamaligncleaner/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamaligncleaner/main.nf b/modules/bamaligncleaner/main.nf index 720b495a..88fe21aa 100644 --- a/modules/bamaligncleaner/main.nf +++ b/modules/bamaligncleaner/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMALIGNCLEANER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamaligncleaner=0.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamaligncleaner:0.2.1--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/bamaligncleaner:0.2.1--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamaligncleaner:0.2.1--pyhdfd78af_0' : + 'quay.io/biocontainers/bamaligncleaner:0.2.1--pyhdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process BAMALIGNCLEANER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bamAlignCleaner \\ - $options.args \\ + $args \\ -o ${prefix}.bam \\ ${bam} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bamAlignCleaner --version | sed 's/.*version //') + "${task.process}": + bamaligncleaner: \$(bamAlignCleaner --version | sed 's/.*version //') END_VERSIONS """ } diff --git a/modules/bamtools/split/functions.nf b/modules/bamtools/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bamtools/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 86eaa5d6..8d5e5690 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BAMTOOLS_SPLIT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bamtools=2.5.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9" - } else { - container "quay.io/biocontainers/bamtools:2.5.1--h9a82719_9" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9' : + 'quay.io/biocontainers/bamtools:2.5.1--h9a82719_9' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process BAMTOOLS_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bamtools \\ split \\ -in $bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) + "${task.process}": + bamtools: \$( bamtools --version | grep -e 'bamtools' | sed 's/^.*bamtools //' ) END_VERSIONS """ } diff --git a/modules/bamutil/trimbam/main.nf b/modules/bamutil/trimbam/main.nf new file mode 100644 index 00000000..9ceb2b65 --- /dev/null +++ b/modules/bamutil/trimbam/main.nf @@ -0,0 +1,34 @@ +process BAMUTIL_TRIMBAM { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::bamutil=1.0.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bamutil:1.0.15--h2e03b76_1' : + 'quay.io/biocontainers/bamutil:1.0.15--h2e03b76_1' }" + + input: + tuple val(meta), path(bam), val(trim_left), val(trim_right) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + bam \\ + trimBam \\ + $bam \\ + ${prefix}.bam \\ + $args \\ + -L $trim_left \\ + -R $trim_right + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bamutil: \$( echo \$( bam trimBam 2>&1 ) | sed 's/^Version: //;s/;.*//' ) + END_VERSIONS + """ +} diff --git a/modules/bamutil/trimbam/meta.yml b/modules/bamutil/trimbam/meta.yml new file mode 100644 index 00000000..a91ba0e1 --- /dev/null +++ b/modules/bamutil/trimbam/meta.yml @@ -0,0 +1,51 @@ +name: bamutil_trimbam +description: trims the end of reads in a SAM/BAM file, changing read ends to ‘N’ and quality to ‘!’, or by soft clipping +keywords: + - bam + - trim + - clipping + - bamUtil + - trimBam +tools: + - bamutil: + description: Programs that perform operations on SAM/BAM files, all built into a single executable, bam. + homepage: https://genome.sph.umich.edu/wiki/BamUtil + documentation: https://genome.sph.umich.edu/wiki/BamUtil:_trimBam + tool_dev_url: https://github.com/statgen/bamUtil + doi: "10.1101/gr.176552.114" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - trim_left: + type: integer + description: Number of bases to trim off the right-hand side of a read. Reverse strands are reversed before trimming. + - trim_right: + type: integer + description: Number of bases to trim off the right-hand side of a read. Reverse strands are reversed before trimming. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Trimmed but unsorted BAM file + pattern: "*.bam" + +authors: + - "@jfy133" diff --git a/modules/bandage/image/functions.nf b/modules/bandage/image/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bandage/image/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bandage/image/main.nf b/modules/bandage/image/main.nf index b7a30a0b..bc2a9495 100644 --- a/modules/bandage/image/main.nf +++ b/modules/bandage/image/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BANDAGE_IMAGE { tag "${meta.id}" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bandage=0.8.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bandage:0.8.1--hc9558a2_2" - } else { - container "quay.io/biocontainers/bandage:0.8.1--hc9558a2_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bandage:0.8.1--hc9558a2_2' : + 'quay.io/biocontainers/bandage:0.8.1--hc9558a2_2' }" input: tuple val(meta), path(gfa) @@ -27,14 +16,15 @@ process BANDAGE_IMAGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - Bandage image $gfa ${prefix}.png $options.args - Bandage image $gfa ${prefix}.svg $options.args + Bandage image $gfa ${prefix}.png $args + Bandage image $gfa ${prefix}.svg $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + bandage: \$(echo \$(Bandage --version 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bbmap/align/functions.nf b/modules/bbmap/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 733fd4d5..ac839497 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_ALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.13 pigz=2.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" - } else { - container "quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' : + 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' }" input: tuple val(meta), path(fastq) @@ -24,10 +13,12 @@ process BBMAP_ALIGN { output: tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path("*.log"), emit: log path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" input = meta.single_end ? "in=${fastq}" : "in=${fastq[0]} in2=${fastq[1]}" @@ -49,13 +40,14 @@ process BBMAP_ALIGN { $db \\ $input \\ out=${prefix}.bam \\ - $options.args \\ + $args \\ threads=$task.cpus \\ - -Xmx${task.memory.toGiga()}g + -Xmx${task.memory.toGiga()}g \\ + &> ${prefix}.bbmap.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS diff --git a/modules/bbmap/bbduk/functions.nf b/modules/bbmap/bbduk/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/bbduk/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/bbduk/main.nf b/modules/bbmap/bbduk/main.nf index d7243fdb..79c3c306 100644 --- a/modules/bbmap/bbduk/main.nf +++ b/modules/bbmap/bbduk/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_BBDUK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.90" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.90--he522d1c_1" - } else { - container "quay.io/biocontainers/bbmap:38.90--he522d1c_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.90--he522d1c_1' : + 'quay.io/biocontainers/bbmap:38.90--he522d1c_1' }" input: tuple val(meta), path(reads) @@ -27,7 +17,8 @@ process BBMAP_BBDUK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def raw = meta.single_end ? "in=${reads[0]}" : "in1=${reads[0]} in2=${reads[1]}" def trimmed = meta.single_end ? "out=${prefix}.fastq.gz" : "out1=${prefix}_1.fastq.gz out2=${prefix}_2.fastq.gz" def contaminants_fa = contaminants ? "ref=$contaminants" : '' @@ -38,12 +29,12 @@ process BBMAP_BBDUK { $raw \\ $trimmed \\ threads=$task.cpus \\ - $options.args \\ + $args \\ $contaminants_fa \\ &> ${prefix}.bbduk.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) END_VERSIONS """ } diff --git a/modules/bbmap/bbsplit/functions.nf b/modules/bbmap/bbsplit/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/bbsplit/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/bbsplit/main.nf b/modules/bbmap/bbsplit/main.nf index b2249b17..0c916dfe 100644 --- a/modules/bbmap/bbsplit/main.nf +++ b/modules/bbmap/bbsplit/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_BBSPLIT { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bbmap=38.93" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0" - } else { - container "quay.io/biocontainers/bbmap:38.93--he522d1c_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.93--he522d1c_0' : + 'quay.io/biocontainers/bbmap:38.93--he522d1c_0' }" input: tuple val(meta), path(reads) @@ -32,7 +21,8 @@ process BBMAP_BBSPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { @@ -54,11 +44,11 @@ process BBMAP_BBSPLIT { ${other_refs.join(' ')} \\ path=bbsplit \\ threads=$task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + "${task.process}": + bbmap: \$(bbversion.sh 2>&1) END_VERSIONS """ } else { @@ -83,11 +73,11 @@ process BBMAP_BBSPLIT { $fastq_in \\ $fastq_out \\ refstats=${prefix}.stats.txt \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh 2>&1) + "${task.process}": + bbmap: \$(bbversion.sh 2>&1) END_VERSIONS """ } diff --git a/modules/bbmap/index/functions.nf b/modules/bbmap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bbmap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bbmap/index/main.nf b/modules/bbmap/index/main.nf index b9e52ec7..4c02f84e 100644 --- a/modules/bbmap/index/main.nf +++ b/modules/bbmap/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BBMAP_INDEX { tag "$fasta" label 'process_long' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bbmap=38.92" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bbmap:38.92--he522d1c_0" - } else { - container "quay.io/biocontainers/bbmap:38.92--he522d1c_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bbmap:38.92--he522d1c_0' : + 'quay.io/biocontainers/bbmap:38.92--he522d1c_0' }" input: path fasta @@ -26,16 +15,17 @@ process BBMAP_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bbmap.sh \\ ref=${fasta} \\ - $options.args \\ + $args \\ threads=$task.cpus \\ -Xmx${task.memory.toGiga()}g cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bbversion.sh) + "${task.process}": + bbmap: \$(bbversion.sh) END_VERSIONS """ } diff --git a/modules/bcftools/concat/functions.nf b/modules/bcftools/concat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/concat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/concat/main.nf b/modules/bcftools/concat/main.nf index 48280eea..cebd2443 100644 --- a/modules/bcftools/concat/main.nf +++ b/modules/bcftools/concat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_CONCAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0" - } else { - container "quay.io/biocontainers/bcftools:1.11--h7c999a4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.11--h7c999a4_0' : + 'quay.io/biocontainers/bcftools:1.11--h7c999a4_0' }" input: tuple val(meta), path(vcfs) @@ -26,17 +15,18 @@ process BCFTOOLS_CONCAT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools concat \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcfs} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/consensus/functions.nf b/modules/bcftools/consensus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/consensus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/consensus/main.nf b/modules/bcftools/consensus/main.nf index 954b0eb8..4633790e 100644 --- a/modules/bcftools/consensus/main.nf +++ b/modules/bcftools/consensus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_CONSENSUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' - } else { - container 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(tbi), path(fasta) @@ -26,15 +15,16 @@ process BCFTOOLS_CONSENSUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - cat $fasta | bcftools consensus $vcf $options.args > ${prefix}.fa + cat $fasta | bcftools consensus $vcf $args > ${prefix}.fa header=\$(head -n 1 ${prefix}.fa | sed 's/>//g') sed -i 's/\${header}/${meta.id}/g' ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/filter/functions.nf b/modules/bcftools/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/filter/main.nf b/modules/bcftools/filter/main.nf index 5323e0fb..95e0249a 100644 --- a/modules/bcftools/filter/main.nf +++ b/modules/bcftools/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_FILTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -26,16 +15,17 @@ process BCFTOOLS_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools filter \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/index/functions.nf b/modules/bcftools/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/index/main.nf b/modules/bcftools/index/main.nf index d67614d8..0cdebf31 100644 --- a/modules/bcftools/index/main.nf +++ b/modules/bcftools/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_INDEX { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -27,18 +16,19 @@ process BCFTOOLS_INDEX { path "versions.yml" , emit: version script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools \\ index \\ - $options.args \\ + $args \\ --threads $task.cpus \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/isec/functions.nf b/modules/bcftools/isec/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/isec/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/isec/main.nf b/modules/bcftools/isec/main.nf index cc3e425e..08323f28 100644 --- a/modules/bcftools/isec/main.nf +++ b/modules/bcftools/isec/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_ISEC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcfs), path(tbis) @@ -26,15 +15,16 @@ process BCFTOOLS_ISEC { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools isec \\ - $options.args \\ + $args \\ -p $prefix \\ *.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/merge/functions.nf b/modules/bcftools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/merge/main.nf b/modules/bcftools/merge/main.nf index bb68f184..bfb0f162 100644 --- a/modules/bcftools/merge/main.nf +++ b/modules/bcftools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcfs), path(tbis) @@ -26,15 +15,16 @@ process BCFTOOLS_MERGE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ bcftools merge -Oz \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ *.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/mpileup/functions.nf b/modules/bcftools/mpileup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/mpileup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/mpileup/main.nf b/modules/bcftools/mpileup/main.nf index df8455a5..8a209a66 100644 --- a/modules/bcftools/mpileup/main.nf +++ b/modules/bcftools/mpileup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(bam) @@ -29,21 +18,28 @@ process BCFTOOLS_MPILEUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ echo "${meta.id}" > sample_name.list + bcftools mpileup \\ --fasta-ref $fasta \\ - $options.args \\ + $args \\ $bam \\ - | bcftools call --output-type v $options.args2 \\ + | bcftools call --output-type v $args2 \\ | bcftools reheader --samples sample_name.list \\ - | bcftools view --output-file ${prefix}.vcf.gz --output-type z $options.args3 + | bcftools view --output-file ${prefix}.vcf.gz --output-type z $args3 + tabix -p vcf -f ${prefix}.vcf.gz + bcftools stats ${prefix}.vcf.gz > ${prefix}.bcftools_stats.txt + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/norm/functions.nf b/modules/bcftools/norm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/norm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index 7e506e49..95da56db 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_NORM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -27,18 +16,19 @@ process BCFTOOLS_NORM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bcftools norm \\ --fasta-ref ${fasta} \\ --output ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/query/functions.nf b/modules/bcftools/query/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/query/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/query/main.nf b/modules/bcftools/query/main.nf index dae8bbc4..d1098f99 100644 --- a/modules/bcftools/query/main.nf +++ b/modules/bcftools/query/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_QUERY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(index) @@ -29,7 +18,8 @@ process BCFTOOLS_QUERY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" @@ -40,12 +30,12 @@ process BCFTOOLS_QUERY { ${regions_file} \\ ${targets_file} \\ ${samples_file} \\ - $options.args \\ + $args \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/reheader/functions.nf b/modules/bcftools/reheader/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/reheader/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/reheader/main.nf b/modules/bcftools/reheader/main.nf index 953a8adb..018431a9 100644 --- a/modules/bcftools/reheader/main.nf +++ b/modules/bcftools/reheader/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_REHEADER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -28,7 +17,8 @@ process BCFTOOLS_REHEADER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def update_sequences = fai ? "-f $fai" : "" def new_header = header ? "-h $header" : "" """ @@ -36,14 +26,14 @@ process BCFTOOLS_REHEADER { reheader \\ $update_sequences \\ $new_header \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -o ${prefix}.vcf.gz \\ $vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/stats/functions.nf b/modules/bcftools/stats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/stats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/stats/main.nf b/modules/bcftools/stats/main.nf index 31bed814..67e8dca7 100644 --- a/modules/bcftools/stats/main.nf +++ b/modules/bcftools/stats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_STATS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bcftools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf) @@ -26,12 +15,13 @@ process BCFTOOLS_STATS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - bcftools stats $options.args $vcf > ${prefix}.bcftools_stats.txt + bcftools stats $args $vcf > ${prefix}.bcftools_stats.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bcftools/view/functions.nf b/modules/bcftools/view/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bcftools/view/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index ef72f081..f37c1ab9 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BCFTOOLS_VIEW { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bcftools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0" - } else { - container "quay.io/biocontainers/bcftools:1.13--h3a49de5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.13--h3a49de5_0' : + 'quay.io/biocontainers/bcftools:1.13--h3a49de5_0' }" input: tuple val(meta), path(vcf), path(index) @@ -29,25 +18,24 @@ process BCFTOOLS_VIEW { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def regions_file = regions ? "--regions-file ${regions}" : "" def targets_file = targets ? "--targets-file ${targets}" : "" def samples_file = samples ? "--samples-file ${samples}" : "" - - """ bcftools view \\ --output ${prefix}.vcf.gz \\ ${regions_file} \\ ${targets_file} \\ ${samples_file} \\ - $options.args \\ + $args \\ --threads $task.cpus \\ ${vcf} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bedtools/bamtobed/functions.nf b/modules/bedtools/bamtobed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/bamtobed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/bamtobed/main.nf b/modules/bedtools/bamtobed/main.nf index 71c439d3..98d9ea2f 100644 --- a/modules/bedtools/bamtobed/main.nf +++ b/modules/bedtools/bamtobed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_BAMTOBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,18 @@ process BEDTOOLS_BAMTOBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ bamtobed \\ - $options.args \\ + $args \\ -i $bam \\ | bedtools sort > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/complement/functions.nf b/modules/bedtools/complement/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/complement/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/complement/main.nf b/modules/bedtools/complement/main.nf index 77214c64..3146827c 100644 --- a/modules/bedtools/complement/main.nf +++ b/modules/bedtools/complement/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_COMPLEMENT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,18 +16,19 @@ process BEDTOOLS_COMPLEMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ complement \\ -i $bed \\ -g $sizes \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/genomecov/functions.nf b/modules/bedtools/genomecov/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/genomecov/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/genomecov/main.nf b/modules/bedtools/genomecov/main.nf index 52f37f23..ca491e75 100644 --- a/modules/bedtools/genomecov/main.nf +++ b/modules/bedtools/genomecov/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_GENOMECOV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals), val(scale) @@ -28,15 +17,14 @@ process BEDTOOLS_GENOMECOV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args_token = options.args.tokenize() - def args = options.args + def args = task.ext.args ?: '' + def args_list = args.tokenize() args += (scale > 0 && scale != 1) ? " -scale $scale" : "" - - if (!args_token.contains('-bg') && (scale > 0 && scale != 1)) { + if (!args_list.contains('-bg') && (scale > 0 && scale != 1)) { args += " -bg" } + def prefix = task.ext.prefix ?: "${meta.id}" if (intervals.name =~ /\.bam/) { """ bedtools \\ @@ -46,8 +34,8 @@ process BEDTOOLS_GENOMECOV { > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } else { @@ -60,8 +48,8 @@ process BEDTOOLS_GENOMECOV { > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/getfasta/functions.nf b/modules/bedtools/getfasta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/getfasta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/getfasta/main.nf b/modules/bedtools/getfasta/main.nf index b27f6183..5a283e94 100644 --- a/modules/bedtools/getfasta/main.nf +++ b/modules/bedtools/getfasta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_GETFASTA { tag "$bed" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: path bed @@ -27,18 +16,19 @@ process BEDTOOLS_GETFASTA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${bed.baseName}${options.suffix}" : "${bed.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${bed.baseName}" """ bedtools \\ getfasta \\ - $options.args \\ + $args \\ -fi $fasta \\ -bed $bed \\ -fo ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/intersect/functions.nf b/modules/bedtools/intersect/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/intersect/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/intersect/main.nf b/modules/bedtools/intersect/main.nf index 1ab0a8b2..afb0d056 100644 --- a/modules/bedtools/intersect/main.nf +++ b/modules/bedtools/intersect/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_INTERSECT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals1), path(intervals2) @@ -27,18 +16,19 @@ process BEDTOOLS_INTERSECT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ intersect \\ -a $intervals1 \\ -b $intervals2 \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/makewindows/functions.nf b/modules/bedtools/makewindows/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/makewindows/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/makewindows/main.nf b/modules/bedtools/makewindows/main.nf index c9f863d0..2414393c 100644 --- a/modules/bedtools/makewindows/main.nf +++ b/modules/bedtools/makewindows/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MAKEWINDOWS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--h7d7f7ad_1" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--h7d7f7ad_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--h7d7f7ad_1' : + 'quay.io/biocontainers/bedtools:2.30.0--h7d7f7ad_1' }" input: tuple val(meta), path(regions) @@ -27,18 +16,19 @@ process BEDTOOLS_MAKEWINDOWS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def arg_input = use_bed ? "-b $regions" : "-g $regions" """ bedtools \\ makewindows \\ ${arg_input} \\ - $options.args \\ + $args \\ > ${prefix}.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/maskfasta/functions.nf b/modules/bedtools/maskfasta/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/maskfasta/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/maskfasta/main.nf b/modules/bedtools/maskfasta/main.nf index 8ee33d7a..7eeb4c7d 100644 --- a/modules/bedtools/maskfasta/main.nf +++ b/modules/bedtools/maskfasta/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MASKFASTA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,17 +16,18 @@ process BEDTOOLS_MASKFASTA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ maskfasta \\ - $options.args \\ + $args \\ -fi $fasta \\ -bed $bed \\ -fo ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/merge/functions.nf b/modules/bedtools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/merge/main.nf b/modules/bedtools/merge/main.nf index 92a59f9e..5f1da95b 100644 --- a/modules/bedtools/merge/main.nf +++ b/modules/bedtools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -26,17 +15,18 @@ process BEDTOOLS_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ merge \\ -i $bed \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/slop/functions.nf b/modules/bedtools/slop/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/slop/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/slop/main.nf b/modules/bedtools/slop/main.nf index 4b412b1f..9d8633ec 100644 --- a/modules/bedtools/slop/main.nf +++ b/modules/bedtools/slop/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SLOP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(bed) @@ -27,18 +16,19 @@ process BEDTOOLS_SLOP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ slop \\ -i $bed \\ -g $sizes \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ diff --git a/modules/bedtools/sort/functions.nf b/modules/bedtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/sort/main.nf b/modules/bedtools/sort/main.nf index 4a51c4b2..1ed95a57 100644 --- a/modules/bedtools/sort/main.nf +++ b/modules/bedtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SORT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals) @@ -27,17 +16,18 @@ process BEDTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ sort \\ -i $intervals \\ - $options.args \\ + $args \\ > ${prefix}.${extension} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bedtools/subtract/functions.nf b/modules/bedtools/subtract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bedtools/subtract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bedtools/subtract/main.nf b/modules/bedtools/subtract/main.nf index 54a12bf4..b2efefe5 100644 --- a/modules/bedtools/subtract/main.nf +++ b/modules/bedtools/subtract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BEDTOOLS_SUBTRACT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0" - } else { - container "quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0' : + 'quay.io/biocontainers/bedtools:2.30.0--hc088bd4_0' }" input: tuple val(meta), path(intervals1), path(intervals2) @@ -26,18 +15,19 @@ process BEDTOOLS_SUBTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedtools \\ subtract \\ -a $intervals1 \\ -b $intervals2 \\ - $options.args \\ + $args \\ > ${prefix}.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bedtools --version | sed -e "s/bedtools v//g") + "${task.process}": + bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") END_VERSIONS """ } diff --git a/modules/bismark/align/functions.nf b/modules/bismark/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/align/main.nf b/modules/bismark/align/main.nf index aa4879ba..e490b48c 100644 --- a/modules/bismark/align/main.nf +++ b/modules/bismark/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(reads) @@ -29,18 +18,19 @@ process BISMARK_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def fastq = meta.single_end ? reads : "-1 ${reads[0]} -2 ${reads[1]}" """ bismark \\ $fastq \\ - $options.args \\ + $args \\ --genome $index \\ --bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/deduplicate/functions.nf b/modules/bismark/deduplicate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/deduplicate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/deduplicate/main.nf b/modules/bismark/deduplicate/main.nf index c3ff27d6..16c624f1 100644 --- a/modules/bismark/deduplicate/main.nf +++ b/modules/bismark/deduplicate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_DEDUPLICATE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(bam) @@ -27,17 +16,18 @@ process BISMARK_DEDUPLICATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def seqtype = meta.single_end ? '-s' : '-p' """ deduplicate_bismark \\ - $options.args \\ + $args \\ $seqtype \\ --bam $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/genomepreparation/functions.nf b/modules/bismark/genomepreparation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/genomepreparation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/genomepreparation/main.nf b/modules/bismark/genomepreparation/main.nf index 0a86173d..e096b2b8 100644 --- a/modules/bismark/genomepreparation/main.nf +++ b/modules/bismark/genomepreparation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_GENOMEPREPARATION { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: path fasta, stageAs: "BismarkIndex/*" @@ -26,14 +15,15 @@ process BISMARK_GENOMEPREPARATION { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bismark_genome_preparation \\ - $options.args \\ + $args \\ BismarkIndex cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/methylationextractor/functions.nf b/modules/bismark/methylationextractor/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/methylationextractor/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/methylationextractor/main.nf b/modules/bismark/methylationextractor/main.nf index 5e89e6f8..d99c2b5e 100644 --- a/modules/bismark/methylationextractor/main.nf +++ b/modules/bismark/methylationextractor/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_METHYLATIONEXTRACTOR { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(bam) @@ -31,6 +20,7 @@ process BISMARK_METHYLATIONEXTRACTOR { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def seqtype = meta.single_end ? '-s' : '-p' """ bismark_methylation_extractor \\ @@ -39,12 +29,12 @@ process BISMARK_METHYLATIONEXTRACTOR { --gzip \\ --report \\ $seqtype \\ - $options.args \\ + $args \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/report/functions.nf b/modules/bismark/report/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/report/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/report/main.nf b/modules/bismark/report/main.nf index 70c6ba3b..f828ecd8 100644 --- a/modules/bismark/report/main.nf +++ b/modules/bismark/report/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_REPORT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: tuple val(meta), path(align_report), path(dedup_report), path(splitting_report), path(mbias) @@ -26,12 +15,13 @@ process BISMARK_REPORT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - bismark2report $options.args + bismark2report $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/bismark/summary/functions.nf b/modules/bismark/summary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bismark/summary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bismark/summary/main.nf b/modules/bismark/summary/main.nf index 3d5f294e..72dba72e 100644 --- a/modules/bismark/summary/main.nf +++ b/modules/bismark/summary/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BISMARK_SUMMARY { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bismark=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bismark:0.23.0--0" - } else { - container "quay.io/biocontainers/bismark:0.23.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bismark:0.23.0--0' : + 'quay.io/biocontainers/bismark:0.23.0--0' }" input: path(bam) @@ -29,12 +18,13 @@ process BISMARK_SUMMARY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bismark2summary cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') + "${task.process}": + bismark: \$(echo \$(bismark -v 2>&1) | sed 's/^.*Bismark Version: v//; s/Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/blast/blastn/functions.nf b/modules/blast/blastn/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/blast/blastn/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/blast/blastn/main.nf b/modules/blast/blastn/main.nf index 0d65f1d0..3a0bafe0 100644 --- a/modules/blast/blastn/main.nf +++ b/modules/blast/blastn/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BLAST_BLASTN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' - } else { - container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' : + 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' }" input: tuple val(meta), path(fasta) @@ -27,18 +16,19 @@ process BLAST_BLASTN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.ndb" | sed 's/.ndb//'` blastn \\ -num_threads $task.cpus \\ -db \$DB \\ -query $fasta \\ - $options.args \\ + $args \\ -out ${prefix}.blastn.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + "${task.process}": + blast: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/blast/makeblastdb/functions.nf b/modules/blast/makeblastdb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/blast/makeblastdb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/blast/makeblastdb/main.nf b/modules/blast/makeblastdb/main.nf index 0538e0db..b4c426a4 100644 --- a/modules/blast/makeblastdb/main.nf +++ b/modules/blast/makeblastdb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BLAST_MAKEBLASTDB { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::blast=2.12.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' - } else { - container 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/blast:2.12.0--pl5262h3289130_0' : + 'quay.io/biocontainers/blast:2.12.0--pl5262h3289130_0' }" input: path fasta @@ -26,15 +15,16 @@ process BLAST_MAKEBLASTDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ makeblastdb \\ -in $fasta \\ - $options.args + $args mkdir blast_db mv ${fasta}* blast_db cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') + "${task.process}": + blast: \$(blastn -version 2>&1 | sed 's/^.*blastn: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie/align/functions.nf b/modules/bowtie/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index 764b5be2..b25b5e21 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bowtie=1.3.0 bioconda::samtools=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' - } else { - container 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' : + 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,9 @@ process BOWTIE_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def unaligned = params.save_unaligned ? "--un ${prefix}.unmapped.fastq" : '' def endedness = meta.single_end ? "$reads" : "-1 ${reads[0]} -2 ${reads[1]}" """ @@ -40,10 +31,10 @@ process BOWTIE_ALIGN { -x \$INDEX \\ -q \\ $unaligned \\ - $options.args \\ + $args \\ $endedness \\ 2> ${prefix}.out \\ - | samtools view $options.args2 -@ $task.cpus -bS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq ]; then gzip ${prefix}.unmapped.fastq @@ -54,8 +45,8 @@ process BOWTIE_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + "${task.process}": + bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bowtie/build/functions.nf b/modules/bowtie/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie/build/main.nf b/modules/bowtie/build/main.nf index 1b83541b..dbbc8efa 100644 --- a/modules/bowtie/build/main.nf +++ b/modules/bowtie/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE_BUILD { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::bowtie=1.3.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bowtie:1.3.0--py38hed8969a_1' - } else { - container 'quay.io/biocontainers/bowtie:1.3.0--py38hed8969a_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bowtie:1.3.0--py38hed8969a_1' : + 'quay.io/biocontainers/bowtie:1.3.0--py38hed8969a_1' }" input: path fasta @@ -26,12 +15,13 @@ process BOWTIE_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bowtie bowtie-build --threads $task.cpus $fasta bowtie/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') + "${task.process}": + bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bowtie2/align/functions.nf b/modules/bowtie2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 6f923951..41c8a6bf 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE2_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::bowtie2=2.4.2 bioconda::samtools=1.11 conda-forge::pigz=2.3.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0" - } else { - container "quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0' : + 'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:577a697be67b5ae9b16f637fd723b8263a3898b3-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,9 @@ process BOWTIE2_ALIGN { tuple val(meta), path('*fastq.gz'), optional:true, emit: fastq script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { def unaligned = params.save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : '' """ @@ -39,13 +30,13 @@ process BOWTIE2_ALIGN { -U $reads \\ --threads $task.cpus \\ $unaligned \\ - $options.args \\ + $args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS @@ -60,9 +51,9 @@ process BOWTIE2_ALIGN { -2 ${reads[1]} \\ --threads $task.cpus \\ $unaligned \\ - $options.args \\ + $args \\ 2> ${prefix}.bowtie2.log \\ - | samtools view -@ $task.cpus $options.args2 -bhS -o ${prefix}.bam - + | samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam - if [ -f ${prefix}.unmapped.fastq.1.gz ]; then mv ${prefix}.unmapped.fastq.1.gz ${prefix}.unmapped_1.fastq.gz @@ -72,8 +63,8 @@ process BOWTIE2_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS diff --git a/modules/bowtie2/build/functions.nf b/modules/bowtie2/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bowtie2/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bowtie2/build/main.nf b/modules/bowtie2/build/main.nf index bc95eea8..c0cbcd79 100644 --- a/modules/bowtie2/build/main.nf +++ b/modules/bowtie2/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BOWTIE2_BUILD { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' - } else { - container 'quay.io/biocontainers/bowtie2:2.4.4--py36hd4290be_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' : + 'quay.io/biocontainers/bowtie2:2.4.4--py36hd4290be_0' }" input: path fasta @@ -26,12 +15,13 @@ process BOWTIE2_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bowtie2 - bowtie2-build $options.args --threads $task.cpus $fasta bowtie2/${fasta.baseName} + bowtie2-build $args --threads $task.cpus $fasta bowtie2/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') + "${task.process}": + bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/bwa/aln/functions.nf b/modules/bwa/aln/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/aln/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/aln/main.nf b/modules/bwa/aln/main.nf index 07135aea..992e25de 100644 --- a/modules/bwa/aln/main.nf +++ b/modules/bwa/aln/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_ALN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--h5bf99c6_8" - } else { - container "quay.io/biocontainers/bwa:0.7.17--h5bf99c6_8" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa:0.7.17--h5bf99c6_8' : + 'quay.io/biocontainers/bwa:0.7.17--h5bf99c6_8' }" input: tuple val(meta), path(reads) @@ -27,22 +16,23 @@ process BWA_ALN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.sai \\ \$INDEX \\ ${reads} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } else { @@ -50,22 +40,22 @@ process BWA_ALN { INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.1.sai \\ \$INDEX \\ ${reads[0]} bwa aln \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f ${prefix}.2.sai \\ \$INDEX \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/index/functions.nf b/modules/bwa/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/index/main.nf b/modules/bwa/index/main.nf index 479431ed..89102737 100644 --- a/modules/bwa/index/main.nf +++ b/modules/bwa/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwa=0.7.17" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa:0.7.17--hed695b0_7" - } else { - container "quay.io/biocontainers/bwa:0.7.17--hed695b0_7" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa:0.7.17--hed695b0_7' : + 'quay.io/biocontainers/bwa:0.7.17--hed695b0_7' }" input: path fasta @@ -26,17 +15,18 @@ process BWA_INDEX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ mkdir bwa bwa \\ index \\ - $options.args \\ - $fasta \\ - -p bwa/${fasta.baseName} + $args \\ + -p bwa/${fasta.baseName} \\ + $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') END_VERSIONS """ } diff --git a/modules/bwa/mem/functions.nf b/modules/bwa/mem/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/mem/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index b6a548d7..9695bd2d 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -1,48 +1,41 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_MEM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads) path index + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" + def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa mem \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwa/mem/meta.yml b/modules/bwa/mem/meta.yml index 61eaddef..c7c28f19 100644 --- a/modules/bwa/mem/meta.yml +++ b/modules/bwa/mem/meta.yml @@ -32,6 +32,10 @@ input: type: file description: BWA genome index files pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}" + - sort_bam: + type: boolean + description: use samtools sort (true) or samtools view (false) + pattern: "true or false" output: - bam: type: file diff --git a/modules/bwa/sampe/functions.nf b/modules/bwa/sampe/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/sampe/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index 38127793..0b5ec255 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_SAMPE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads), path(sai) @@ -27,22 +16,23 @@ process BWA_SAMPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa sampe \\ - $options.args \\ + $args \\ $read_group \\ \$INDEX \\ $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwa/samse/functions.nf b/modules/bwa/samse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwa/samse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 68fa95c7..bee06bc8 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWA_SAMSE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } else { - container "quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" input: tuple val(meta), path(reads), path(sai) @@ -27,22 +16,23 @@ process BWA_SAMSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-r ${meta.read_group}" : "" """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa samse \\ - $options.args \\ + $args \\ $read_group \\ \$INDEX \\ $sai \\ $reads | samtools sort -@ ${task.cpus - 1} -O bam - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') + "${task.process}": + bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwamem2/index/functions.nf b/modules/bwamem2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwamem2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwamem2/index/main.nf b/modules/bwamem2/index/main.nf index 5732017f..e00538c9 100644 --- a/modules/bwamem2/index/main.nf +++ b/modules/bwamem2/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMEM2_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwa-mem2:2.2.1--he513fc3_0" - } else { - container "quay.io/biocontainers/bwa-mem2:2.2.1--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwa-mem2:2.2.1--he513fc3_0' : + 'quay.io/biocontainers/bwa-mem2:2.2.1--he513fc3_0' }" input: path fasta @@ -26,16 +15,17 @@ process BWAMEM2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ mkdir bwamem2 bwa-mem2 \\ index \\ - $options.args \\ + $args \\ $fasta -p bwamem2/${fasta} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') END_VERSIONS """ } diff --git a/modules/bwamem2/mem/functions.nf b/modules/bwamem2/mem/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwamem2/mem/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index f88d840f..6d4d8028 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -1,49 +1,42 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMEM2_MEM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0" - } else { - container "quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' : + 'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' }" input: tuple val(meta), path(reads) path index + val sort_bam output: tuple val(meta), path("*.bam"), emit: bam path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" + def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa-mem2 \\ mem \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools $samtools_command $args2 -@ $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/bwamem2/mem/meta.yml b/modules/bwamem2/mem/meta.yml index 58a35e08..71e83759 100644 --- a/modules/bwamem2/mem/meta.yml +++ b/modules/bwamem2/mem/meta.yml @@ -11,9 +11,9 @@ keywords: tools: - bwa: description: | - BWA is a software package for mapping DNA sequences against + BWA-mem2 is a software package for mapping DNA sequences against a large reference genome, such as the human genome. - homepage: http://bio-bwa.sourceforge.net/ + homepage: https://github.com/bwa-mem2/bwa-mem2 documentation: http://www.htslib.org/doc/samtools.html arxiv: arXiv:1303.3997 licence: ['MIT'] @@ -31,7 +31,11 @@ input: - index: type: file description: BWA genome index files - pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}" + pattern: "Directory containing BWA index *.{0132,amb,ann,bwt.2bit.64,pac}" + - sort_bam: + type: boolean + description: use samtools sort (true) or samtools view (false) + pattern: "true or false" output: - bam: type: file diff --git a/modules/bwameth/align/functions.nf b/modules/bwameth/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwameth/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwameth/align/main.nf b/modules/bwameth/align/main.nf index 9b1d2b86..0bcd9bac 100644 --- a/modules/bwameth/align/main.nf +++ b/modules/bwameth/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMETH_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::bwameth=0.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1" - } else { - container "quay.io/biocontainers/bwameth:0.2.2--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1' : + 'quay.io/biocontainers/bwameth:0.2.2--py_1' }" input: tuple val(meta), path(reads) @@ -27,22 +16,28 @@ process BWAMETH_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def read_group = meta.read_group ? "-R ${meta.read_group}" : "" """ INDEX=`find -L ${index} -name "*.bwameth.c2t" | sed 's/.bwameth.c2t//'` + # Modify the timestamps so that bwameth doesn't complain about building the index + # See https://github.com/nf-core/methylseq/pull/217 + touch -c -- * + bwameth.py \\ - $options.args \\ + $args \\ $read_group \\ -t $task.cpus \\ --reference \$INDEX \\ $reads \\ - | samtools view $options.args2 -@ $task.cpus -bhS -o ${prefix}.bam - + | samtools view $args2 -@ $task.cpus -bhS -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") + "${task.process}": + bwameth: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/bwameth/index/functions.nf b/modules/bwameth/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/bwameth/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/bwameth/index/main.nf b/modules/bwameth/index/main.nf index 68fb33d4..f5b8ff59 100644 --- a/modules/bwameth/index/main.nf +++ b/modules/bwameth/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process BWAMETH_INDEX { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::bwameth=0.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1" - } else { - container "quay.io/biocontainers/bwameth:0.2.2--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bwameth:0.2.2--py_1' : + 'quay.io/biocontainers/bwameth:0.2.2--py_1' }" input: path fasta, stageAs: "bwameth/*" @@ -26,12 +15,13 @@ process BWAMETH_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ bwameth.py index $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") + "${task.process}": + bwameth: \$(echo \$(bwameth.py --version 2>&1) | cut -f2 -d" ") END_VERSIONS """ } diff --git a/modules/cat/cat/functions.nf b/modules/cat/cat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cat/cat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index dac301cb..0c087270 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CAT_CAT { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::pigz=2.3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pigz:2.3.4" - } else { - container "quay.io/biocontainers/pigz:2.3.4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pigz:2.3.4' : + 'quay.io/biocontainers/pigz:2.3.4' }" input: path files_in @@ -26,6 +15,8 @@ process CAT_CAT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' def file_list = files_in.collect { it.toString() } if (file_list.size > 1) { @@ -39,16 +30,16 @@ process CAT_CAT { def in_zip = file_list[0].endsWith('.gz') def out_zip = file_out.endsWith('.gz') def command1 = (in_zip && !out_zip) ? 'zcat' : 'cat' - def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $options.args2" : '' + def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $args2" : '' """ $command1 \\ - $options.args \\ + $args \\ ${file_list.join(' ')} \\ $command2 \\ > $file_out cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/cat/fastq/functions.nf b/modules/cat/fastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cat/fastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index 538915a7..d02598e1 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -1,41 +1,31 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CAT_FASTQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'merged_fastq', meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: - tuple val(meta), path(reads) + tuple val(meta), path(reads, stageAs: "input*/*") output: tuple val(meta), path("*.merged.fastq.gz"), emit: reads path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def readList = reads.collect{ it.toString() } if (meta.single_end) { if (readList.size > 1) { """ - cat ${readList.sort().join(' ')} > ${prefix}.merged.fastq.gz + cat ${readList.join(' ')} > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + "${task.process}": + cat: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') END_VERSIONS """ } @@ -45,12 +35,12 @@ process CAT_FASTQ { def read2 = [] readList.eachWithIndex{ v, ix -> ( ix & 1 ? read2 : read1 ) << v } """ - cat ${read1.sort().join(' ')} > ${prefix}_1.merged.fastq.gz - cat ${read2.sort().join(' ')} > ${prefix}_2.merged.fastq.gz + cat ${read1.join(' ')} > ${prefix}_1.merged.fastq.gz + cat ${read2.join(' ')} > ${prefix}_2.merged.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') + "${task.process}": + cat: \$(echo \$(cat --version 2>&1) | sed 's/^.*coreutils) //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/cellranger/.gitignore b/modules/cellranger/.gitignore new file mode 100644 index 00000000..9f8cb0f5 --- /dev/null +++ b/modules/cellranger/.gitignore @@ -0,0 +1 @@ +cellranger-*.tar.gz diff --git a/modules/cellranger/Dockerfile b/modules/cellranger/Dockerfile index aced4233..e9437bf6 100644 --- a/modules/cellranger/Dockerfile +++ b/modules/cellranger/Dockerfile @@ -4,7 +4,7 @@ LABEL authors="Gisela Gabernet " \ # Disclaimer: this container is not provided nor supported by 10x Genomics. # Install procps and clean apt cache -RUN apt-get update \ +RUN apt-get update --allow-releaseinfo-change \ && apt-get install -y procps \ && apt-get clean -y && rm -rf /var/lib/apt/lists/* diff --git a/modules/cellranger/readme.md b/modules/cellranger/README.md similarity index 100% rename from modules/cellranger/readme.md rename to modules/cellranger/README.md diff --git a/modules/cellranger/count/main.nf b/modules/cellranger/count/main.nf new file mode 100644 index 00000000..be3f512a --- /dev/null +++ b/modules/cellranger/count/main.nf @@ -0,0 +1,49 @@ +process CELLRANGER_COUNT { + tag "$meta.gem" + label 'process_high' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "nfcore/cellranger:6.0.2" + + input: + tuple val(meta), path(reads) + path reference + + output: + path("sample-${meta.gem}/outs/*"), emit: outs + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def sample_arg = meta.samples.unique().join(",") + def reference_name = reference.name + """ + cellranger \\ + count \\ + --id='sample-${meta.gem}' \\ + --fastqs=. \\ + --transcriptome=$reference_name \\ + --sample=$sample_arg \\ + --localcores=$task.cpus \\ + --localmem=${task.memory.toGiga()} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ + + stub: + """ + mkdir -p "sample-${meta.gem}/outs/" + touch sample-${meta.gem}/outs/fake_file.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/count/meta.yml b/modules/cellranger/count/meta.yml new file mode 100644 index 00000000..e4647c98 --- /dev/null +++ b/modules/cellranger/count/meta.yml @@ -0,0 +1,40 @@ +name: cellranger_count +description: Module to use Cell Ranger's pipelines analyze sequencing data produced from Chromium Single Cell Gene Expression. +keywords: + - align + - count + - reference +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - reference: + type: folder + description: Folder containing all the reference indices needed by Cell Ranger +output: + - outs: + type: file + description: Files containing the outputs of Cell Ranger + pattern: "sample-${meta.gem}/outs/*" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" diff --git a/modules/cellranger/mkfastq/main.nf b/modules/cellranger/mkfastq/main.nf new file mode 100644 index 00000000..14d68665 --- /dev/null +++ b/modules/cellranger/mkfastq/main.nf @@ -0,0 +1,31 @@ +process CELLRANGER_MKFASTQ { + tag "mkfastq" + label 'process_medium' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "litd/docker-cellranger:v6.1.1" // FIXME Add bcl2fastq to nf-core docker image + + input: + path bcl + path csv + + output: + path "versions.yml", emit: versions + path "*.fastq.gz" , emit: fastq + + script: + def args = task.ext.args ?: '' + """ + cellranger mkfastq --id=${bcl.getSimpleName()} \ + --run=$bcl \ + --csv=$csv + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/mkfastq/meta.yml b/modules/cellranger/mkfastq/meta.yml new file mode 100644 index 00000000..e288fb8c --- /dev/null +++ b/modules/cellranger/mkfastq/meta.yml @@ -0,0 +1,38 @@ +name: cellranger_mkfastq +description: Module to create fastqs needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkfastq command. +keywords: + - reference + - mkfastq + - fastq + - illumina + - bcl2fastq +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - bcl: + type: file + description: Base call files + pattern: "*.bcl.bgzf" + - csv: + type: file + description: Sample sheet + pattern: "*.csv" +output: + - fastq: + type: file + description: Unaligned FastQ files + pattern: "*.fastq.gz" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" + - "@RHReynolds" diff --git a/modules/cellranger/mkgtf/main.nf b/modules/cellranger/mkgtf/main.nf new file mode 100644 index 00000000..4db274d7 --- /dev/null +++ b/modules/cellranger/mkgtf/main.nf @@ -0,0 +1,31 @@ +process CELLRANGER_MKGTF { + tag "$gtf" + label 'process_low' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." + } + container "nfcore/cellranger:6.0.2" + + input: + path gtf + + output: + path "*.filtered.gtf", emit: gtf + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + cellranger \\ + mkgtf \\ + $gtf \\ + ${gtf.baseName}.filtered.gtf \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/cellranger/mkgtf/meta.yml b/modules/cellranger/mkgtf/meta.yml new file mode 100644 index 00000000..c160072f --- /dev/null +++ b/modules/cellranger/mkgtf/meta.yml @@ -0,0 +1,31 @@ +name: cellranger_mkgtf +description: Module to build a filtered gtf needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkgtf command. +keywords: + - reference + - mkref + - index +tools: + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA +input: + - gtf: + type: file + description: + pattern: "*.gtf" +output: + - gtf: + type: folder + description: gtf transcriptome file + pattern: "*.filtered.gtf" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" +authors: + - "@ggabernet" + - "@Emiller88" diff --git a/modules/cellranger/mkref/functions.nf b/modules/cellranger/mkref/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cellranger/mkref/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cellranger/mkref/main.nf b/modules/cellranger/mkref/main.nf index 22ad66ba..c5d83ac9 100644 --- a/modules/cellranger/mkref/main.nf +++ b/modules/cellranger/mkref/main.nf @@ -1,15 +1,6 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CELLRANGER_MKREF { tag 'mkref' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } if (params.enable_conda) { exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers." @@ -19,22 +10,24 @@ process CELLRANGER_MKREF { input: path fasta path gtf - val(reference_name) + val reference_name output: - path "versions.yml" , emit: versions path "${reference_name}", emit: reference + path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - cellranger mkref \\ - --genome=${reference_name} \\ - --fasta=${fasta} \\ - --genes=${gtf} + cellranger \\ + mkref \\ + --genome=$reference_name \\ + --fasta=$fasta \\ + --genes=$gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) + "${task.process}": + cellranger: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' ) END_VERSIONS """ } diff --git a/modules/cellranger/mkref/meta.yml b/modules/cellranger/mkref/meta.yml index 9b849af7..06bf5b93 100644 --- a/modules/cellranger/mkref/meta.yml +++ b/modules/cellranger/mkref/meta.yml @@ -1,39 +1,37 @@ name: cellranger_mkref description: Module to build the reference needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkref command. keywords: - - reference - - mkref - - index + - reference + - mkref + - index tools: - - cellranger: - description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. - homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger - documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov - tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov - doi: "" - licence: 10x Genomics EULA - + - cellranger: + description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more. + homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger + documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov + doi: "" + licence: 10x Genomics EULA input: - - fasta: - type: file - description: fasta genome file - pattern: "*.{fasta,fa}" - - gtf: - type: file - description: gtf transcriptome file - pattern: "*.gtf" - - reference_name: - type: val - description: name to give the reference folder - pattern: str - + - fasta: + type: file + description: fasta genome file + pattern: "*.{fasta,fa}" + - gtf: + type: file + description: gtf transcriptome file + pattern: "*.gtf" + - reference_name: + type: val + description: name to give the reference folder + pattern: str output: - - versions: - type: file - description: File containing software version - pattern: "versions.yml" - - reference: - type: folder - description: Folder containing all the reference indices needed by Cell Ranger + - reference: + type: folder + description: Folder containing all the reference indices needed by Cell Ranger + - versions: + type: file + description: File containing software version + pattern: "versions.yml" authors: - "@ggabernet" diff --git a/modules/checkm/lineagewf/functions.nf b/modules/checkm/lineagewf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/checkm/lineagewf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/checkm/lineagewf/main.nf b/modules/checkm/lineagewf/main.nf index e655e5f5..992b165e 100644 --- a/modules/checkm/lineagewf/main.nf +++ b/modules/checkm/lineagewf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CHECKM_LINEAGEWF { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::checkm-genome=1.1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/checkm-genome:1.1.3--py_1" - } else { - container "quay.io/biocontainers/checkm-genome:1.1.3--py_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/checkm-genome:1.1.3--py_1' : + 'quay.io/biocontainers/checkm-genome:1.1.3--py_1' }" input: tuple val(meta), path(fasta) @@ -28,7 +17,8 @@ process CHECKM_LINEAGEWF { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ checkm \\ lineage_wf \\ @@ -37,13 +27,13 @@ process CHECKM_LINEAGEWF { --tab_table \\ --pplacer_threads $task.cpus \\ -x $fasta_ext \\ - $options.args \\ + $args \\ . \\ $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( checkm 2>&1 | grep '...:::' | sed 's/.*CheckM v//;s/ .*//' ) + "${task.process}": + checkm: \$( checkm 2>&1 | grep '...:::' | sed 's/.*CheckM v//;s/ .*//' ) END_VERSIONS """ } diff --git a/modules/chromap/chromap/functions.nf b/modules/chromap/chromap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/chromap/chromap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index 9826eed1..4a7f0097 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1' // No version information printed +def VERSION = '0.1' // Version information not provided by tool on CLI process CHROMAP_CHROMAP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } else { - container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' }" input: tuple val(meta), path(reads) @@ -37,34 +26,37 @@ process CHROMAP_CHROMAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def args_list = args.tokenize() - def file_extension = options.args.contains("--SAM") ? 'sam' : options.args.contains("--TagAlign")? 'tagAlign' : options.args.contains("--pairs")? 'pairs' : 'bed' + def file_extension = args.contains("--SAM") ? 'sam' : args.contains("--TagAlign")? 'tagAlign' : args.contains("--pairs")? 'pairs' : 'bed' if (barcodes) { - args << "-b ${barcodes.join(',')}" + args_list << "-b ${barcodes.join(',')}" if (whitelist) { - args << "--barcode-whitelist $whitelist" + args_list << "--barcode-whitelist $whitelist" } } if (chr_order) { - args << "--chr-order $chr_order" + args_list << "--chr-order $chr_order" } if (pairs_chr_order){ - args << "--pairs-natural-chr-order $pairs_chr_order" + args_list << "--pairs-natural-chr-order $pairs_chr_order" } - def final_args = args.join(' ') + def final_args = args_list.join(' ') def compression_cmds = "gzip ${prefix}.${file_extension}" - if (options.args.contains("--SAM")) { + if (args.contains("--SAM")) { compression_cmds = """ - samtools view $options.args2 -@ ${task.cpus} -bh \\ + samtools view $args2 -@ $task.cpus -bh \\ -o ${prefix}.bam ${prefix}.${file_extension} rm ${prefix}.${file_extension} """ } if (meta.single_end) { """ - chromap ${final_args} \\ + chromap \\ + $final_args \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ @@ -74,13 +66,14 @@ process CHROMAP_CHROMAP { $compression_cmds cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION END_VERSIONS """ } else { """ - chromap ${final_args} \\ + chromap \\ + $final_args \\ -t $task.cpus \\ -x $index \\ -r $fasta \\ @@ -91,8 +84,8 @@ process CHROMAP_CHROMAP { $compression_cmds cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION END_VERSIONS """ } diff --git a/modules/chromap/index/functions.nf b/modules/chromap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/chromap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index efe85733..cafeca2f 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = 0.1 // No version information printed +def VERSION = '0.1' // Version information not provided by tool on CLI process CHROMAP_INDEX { tag '$fasta' label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::chromap=0.1 bioconda::samtools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } else { - container "quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:2cad7c5aa775241887eff8714259714a39baf016-0' }" input: path fasta @@ -28,18 +17,19 @@ process CHROMAP_INDEX { path "versions.yml", emit: versions script: - def prefix = fasta.baseName + def args = task.ext.args ?: '' + def prefix = fasta.baseName """ chromap \\ -i \\ - $options.args \\ + $args \\ -t $task.cpus \\ -r $fasta \\ -o ${prefix}.index cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + chromap: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/clonalframeml/main.nf b/modules/clonalframeml/main.nf new file mode 100644 index 00000000..db647a38 --- /dev/null +++ b/modules/clonalframeml/main.nf @@ -0,0 +1,37 @@ +process CLONALFRAMEML { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::clonalframeml=1.12" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/clonalframeml:1.12--h7d875b9_1' : + 'quay.io/biocontainers/clonalframeml:1.12--h7d875b9_1' }" + + input: + tuple val(meta), path(newick), path(msa) + + output: + tuple val(meta), path("*.emsim.txt") , emit: emsim, optional: true + tuple val(meta), path("*.em.txt") , emit: em + tuple val(meta), path("*.importation_status.txt") , emit: status + tuple val(meta), path("*.labelled_tree.newick") , emit: newick + tuple val(meta), path("*.ML_sequence.fasta") , emit: fasta + tuple val(meta), path("*.position_cross_reference.txt"), emit: pos_ref + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + ClonalFrameML \\ + $newick \\ + <(gzip -cdf $msa) \\ + $prefix \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + clonalframeml: \$( echo \$(ClonalFrameML -version 2>&1) | sed 's/^.*ClonalFrameML v//' ) + END_VERSIONS + """ +} diff --git a/modules/clonalframeml/meta.yml b/modules/clonalframeml/meta.yml new file mode 100644 index 00000000..874a04be --- /dev/null +++ b/modules/clonalframeml/meta.yml @@ -0,0 +1,67 @@ +name: clonalframeml +description: Predict recomination events in bacterial genomes +keywords: + - fasta + - multiple sequence alignment + - recombination +tools: + - clonalframeml: + description: Efficient inferencing of recombination in bacterial genomes + homepage: https://github.com/xavierdidelot/ClonalFrameML + documentation: https://github.com/xavierdidelot/clonalframeml/wiki + tool_dev_url: https://github.com/xavierdidelot/ClonalFrameML + doi: "10.1371/journal.pcbi.1004041" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - msa: + type: file + description: A multiple seqeunce alignmnet in FASTA format + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - newick: + type: file + description: A Newick formated tree based on multiple sequence alignment + pattern: "*.{newick,treefile,dnd}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - emsim: + type: file + description: Bootstrapped values for the three parameters R/theta, nu and delta + pattern: "*.emsim.txt" + - em: + type: file + description: Point estimates for R/theta, nu, delta and the branch lengths + pattern: "*.em.txt" + - fasta: + type: file + description: Sequence reconstructed by maximum likelihood + pattern: "*.ML_sequence.fasta" + - newick: + type: file + description: Tree with all nodes labelled + pattern: "*.labelled_tree.newick" + - pos_ref: + type: file + description: CSV mapping input sequence files to the sequences in the *.ML_sequence.fasta + pattern: "*.position_cross_reference.txt" + - status: + type: file + description: List of reconstructed recombination events + pattern: "*.importation_status.txt" + +authors: + - "@rpetit3" diff --git a/modules/cmseq/polymut/main.nf b/modules/cmseq/polymut/main.nf new file mode 100644 index 00000000..47e86f0c --- /dev/null +++ b/modules/cmseq/polymut/main.nf @@ -0,0 +1,37 @@ +def VERSION = '1.0.4' // Version information not provided by tool on CLI + +process CMSEQ_POLYMUT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::cmseq=1.0.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cmseq:1.0.4--pyhb7b1952_0' : + 'quay.io/biocontainers/cmseq:1.0.4--pyhb7b1952_0' }" + + input: + tuple val(meta), path(bam), path(bai), path(gff), path(fasta) + + output: + tuple val(meta), path("*.txt"), emit: polymut + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def fasta_refid = fasta ? "-c $fasta" : "" + def sortindex = bai ? "" : "--sortindex" + """ + polymut.py \\ + $args \\ + $sortindex \\ + $fasta_refid \\ + --gff_file $gff \\ + $bam > ${prefix}.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cmseq: $VERSION + END_VERSIONS + """ +} diff --git a/modules/cmseq/polymut/meta.yml b/modules/cmseq/polymut/meta.yml new file mode 100644 index 00000000..49e6b519 --- /dev/null +++ b/modules/cmseq/polymut/meta.yml @@ -0,0 +1,61 @@ +name: cmseq_polymut +description: Calculates polymorphic site rates over protein coding genes +keywords: + - polymut + - polymorphic + - mags + - assembly + - polymorphic sites + - estimation + - protein coding genes + - cmseq + - bam + - coverage +tools: + - cmseq: + description: Set of utilities on sequences and BAM files + homepage: https://github.com/SegataLab/cmseq + documentation: https://github.com/SegataLab/cmseq + tool_dev_url: https://github.com/SegataLab/cmseq + licence: ['MIT License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bai" + - gff: + type: file + description: GFF file used to extract protein-coding genes + pattern: "*.gff" + - fasta: + type: file + description: Optional fasta file to run on a subset of references in the BAM file. + pattern: .{fa,fasta,fas,fna} + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - polymut: + type: file + description: Polymut report in `.txt` format. + pattern: "*.txt" + +authors: + - "@maxibor" diff --git a/modules/cnvkit/batch/main.nf b/modules/cnvkit/batch/main.nf new file mode 100644 index 00000000..811cb409 --- /dev/null +++ b/modules/cnvkit/batch/main.nf @@ -0,0 +1,52 @@ +process CNVKIT_BATCH { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? 'bioconda::cnvkit=0.9.9' : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cnvkit:0.9.9--pyhdfd78af_0' : + 'quay.io/biocontainers/cnvkit:0.9.9--pyhdfd78af_0' }" + + input: + tuple val(meta), path(tumor), path(normal) + path fasta + path targets + path reference + + output: + tuple val(meta), path("*.bed"), emit: bed + tuple val(meta), path("*.cnn"), emit: cnn, optional: true + tuple val(meta), path("*.cnr"), emit: cnr, optional: true + tuple val(meta), path("*.cns"), emit: cns, optional: true + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def normal_args = normal ? "--normal $normal" : "" + def fasta_args = fasta ? "--fasta $fasta" : "" + def reference_args = reference ? "--reference $reference" : "" + + def target_args = "" + if (args.contains("--method wgs") || args.contains("-m wgs")) { + target_args = targets ? "--targets $targets" : "" + } + else { + target_args = "--targets $targets" + } + """ + cnvkit.py \\ + batch \\ + $tumor \\ + $normal_args \\ + $fasta_args \\ + $reference_args \\ + $target_args \\ + --processes $task.cpus \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvkit: \$(cnvkit.py version | sed -e "s/cnvkit v//g") + END_VERSIONS + """ +} diff --git a/modules/cnvkit/meta.yml b/modules/cnvkit/batch/meta.yml old mode 100755 new mode 100644 similarity index 89% rename from modules/cnvkit/meta.yml rename to modules/cnvkit/batch/meta.yml index 3e760d16..0d263041 --- a/modules/cnvkit/meta.yml +++ b/modules/cnvkit/batch/meta.yml @@ -1,4 +1,4 @@ -name: cnvkit +name: cnvkit_batch description: Copy number variant detection from high-throughput sequencing data keywords: - bam @@ -38,14 +38,14 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - tumourbam: + - tumour: type: file description: | - Input tumour sample bam file - - normalbam: + Input tumour sample bam file (or cram) + - normal: type: file description: | - Input normal sample bam file + Input normal sample bam file (or cram) - fasta: type: file description: | @@ -54,6 +54,10 @@ input: type: file description: | Input target bed file + - reference: + type: file + description: | + Input reference cnn-file (only for germline and tumor-only running) output: - meta: type: map @@ -85,4 +89,5 @@ authors: - "@KevinMenden" - "@MaxUlysse" - "@drpatelh" - + - "@fbdtemme" + - "@lassefolkersen" diff --git a/modules/cnvkit/functions.nf b/modules/cnvkit/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/cnvkit/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cnvkit/main.nf b/modules/cnvkit/main.nf deleted file mode 100755 index 27c8bb0d..00000000 --- a/modules/cnvkit/main.nf +++ /dev/null @@ -1,48 +0,0 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -process CNVKIT { - tag "$meta.id" - label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - - conda (params.enable_conda ? 'bioconda::cnvkit=0.9.9' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cnvkit:0.9.9--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/cnvkit:0.9.9--pyhdfd78af_0" - } - - input: - tuple val(meta), path(tumourbam), path(normalbam) - path fasta - path targetfile - - output: - tuple val(meta), path("*.bed"), emit: bed - tuple val(meta), path("*.cnn"), emit: cnn - tuple val(meta), path("*.cnr"), emit: cnr - tuple val(meta), path("*.cns"), emit: cns - path "versions.yml" , emit: versions - - script: - """ - cnvkit.py \\ - batch \\ - $tumourbam \\ - --normal $normalbam\\ - --fasta $fasta \\ - --targets $targetfile \\ - $options.args - - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cnvkit.py version | sed -e "s/cnvkit v//g") - END_VERSIONS - """ -} diff --git a/modules/cooler/cload/main.nf b/modules/cooler/cload/main.nf new file mode 100644 index 00000000..d8bdc031 --- /dev/null +++ b/modules/cooler/cload/main.nf @@ -0,0 +1,37 @@ +process COOLER_CLOAD { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" + + input: + tuple val(meta), path(pairs), path(index) + val cool_bin + path chromsizes + + output: + tuple val(meta), val(cool_bin), path("*.cool"), emit: cool + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def nproc = args.contains('pairix') || args.contains('tabix')? "--nproc $task.cpus" : '' + + """ + cooler cload \\ + $args \\ + $nproc \\ + ${chromsizes}:${cool_bin} \\ + $pairs \\ + ${prefix}.${cool_bin}.cool + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/cload/meta.yml b/modules/cooler/cload/meta.yml new file mode 100644 index 00000000..8ac75911 --- /dev/null +++ b/modules/cooler/cload/meta.yml @@ -0,0 +1,52 @@ +name: cooler_cload +description: Create a cooler from genomic pairs and bins +keywords: + - cool +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - pairs: + type: file + description: Path to contacts (i.e. read pairs) file. + - index: + type: file + description: Path to index file of the contacts. + - cool_bin: + type: value + description: Bins size in bp + - chromsizes: + type: file + description: Path to a chromsizes file. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - cool: + type: file + description: Output COOL file path + pattern: "*.cool" + - cool_bin: + type: value + description: Bins size in bp + +authors: + - "@jianhong" diff --git a/modules/cooler/digest/functions.nf b/modules/cooler/digest/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/digest/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/digest/main.nf b/modules/cooler/digest/main.nf index 5728b649..9658ec31 100644 --- a/modules/cooler/digest/main.nf +++ b/modules/cooler/digest/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_DIGEST { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: path fasta @@ -28,17 +17,18 @@ process COOLER_DIGEST { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ cooler digest \\ - $options.args \\ + $args \\ -o "${fasta.baseName}_${enzyme.replaceAll(/[^0-9a-zA-Z]+/, '_')}.bed" \\ $chromsizes \\ $fasta \\ $enzyme cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/dump/functions.nf b/modules/cooler/dump/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cooler/dump/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cooler/dump/main.nf b/modules/cooler/dump/main.nf index 2028f5f0..a438acc8 100644 --- a/modules/cooler/dump/main.nf +++ b/modules/cooler/dump/main.nf @@ -1,41 +1,33 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process COOLER_DUMP { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0" - } else { - container "quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" input: tuple val(meta), path(cool) + val resolution output: tuple val(meta), path("*.bedpe"), emit: bedpe - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = resolution ? "::$resolution" : "" """ cooler dump \\ - $options.args \\ + $args \\ -o ${prefix}.bedpe \\ - $cool + $cool$suffix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cooler --version 2>&1 | sed 's/cooler, version //') + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') END_VERSIONS """ } diff --git a/modules/cooler/dump/meta.yml b/modules/cooler/dump/meta.yml index 659b06a1..a9d1afd5 100644 --- a/modules/cooler/dump/meta.yml +++ b/modules/cooler/dump/meta.yml @@ -21,6 +21,9 @@ input: type: file description: Path to COOL file pattern: "*.{cool,mcool}" + - resolution: + type: value + description: Resolution output: - meta: diff --git a/modules/cooler/merge/main.nf b/modules/cooler/merge/main.nf new file mode 100644 index 00000000..b1814b68 --- /dev/null +++ b/modules/cooler/merge/main.nf @@ -0,0 +1,31 @@ +process COOLER_MERGE { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" + + input: + tuple val(meta), path(cool) + + output: + tuple val(meta), path("*.cool"), emit: cool + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + cooler merge \\ + $args \\ + ${prefix}.cool \\ + ${cool} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/merge/meta.yml b/modules/cooler/merge/meta.yml new file mode 100644 index 00000000..f5c0a733 --- /dev/null +++ b/modules/cooler/merge/meta.yml @@ -0,0 +1,41 @@ +name: cooler_merge +description: Merge multiple coolers with identical axes +keywords: + - merge +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cool: + type: file + description: Path to COOL file + pattern: "*.{cool,mcool}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + - cool: + type: file + description: Path to COOL file + pattern: "*.cool" + +authors: + - "@jianhong" diff --git a/modules/cooler/zoomify/main.nf b/modules/cooler/zoomify/main.nf new file mode 100644 index 00000000..226d4114 --- /dev/null +++ b/modules/cooler/zoomify/main.nf @@ -0,0 +1,32 @@ +process COOLER_ZOOMIFY { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::cooler=0.8.11" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cooler:0.8.11--pyh3252c3a_0' : + 'quay.io/biocontainers/cooler:0.8.11--pyh3252c3a_0' }" + + input: + tuple val(meta), path(cool) + + output: + tuple val(meta), path("*.mcool"), emit: mcool + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + cooler zoomify \\ + $args \\ + -n $task.cpus \\ + -o ${prefix}.mcool \\ + $cool + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cooler: \$(cooler --version 2>&1 | sed 's/cooler, version //') + END_VERSIONS + """ +} diff --git a/modules/cooler/zoomify/meta.yml b/modules/cooler/zoomify/meta.yml new file mode 100644 index 00000000..74bdbf44 --- /dev/null +++ b/modules/cooler/zoomify/meta.yml @@ -0,0 +1,41 @@ +name: cooler_zoomify +description: Generate a multi-resolution cooler file by coarsening +keywords: + - mcool +tools: + - cooler: + description: Sparse binary format for genomic interaction matrices + homepage: https://cooler.readthedocs.io/en/latest/index.html + documentation: https://cooler.readthedocs.io/en/latest/index.html + tool_dev_url: https://github.com/open2c/cooler + doi: "10.1093/bioinformatics/btz540" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cool: + type: file + description: Path to COOL file + pattern: "*.{cool,mcool}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - mcool: + type: file + description: Output mcool file + pattern: "*.mcool" + +authors: + - "@jianhong" diff --git a/modules/csvtk/concat/functions.nf b/modules/csvtk/concat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/csvtk/concat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/csvtk/concat/main.nf b/modules/csvtk/concat/main.nf index 194b1e14..94b1925a 100644 --- a/modules/csvtk/concat/main.nf +++ b/modules/csvtk/concat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CSVTK_CONCAT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0' : + 'quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0' }" input: tuple val(meta), path(csv) @@ -28,14 +17,15 @@ process CSVTK_CONCAT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def delimiter = in_format == "tsv" ? "\t" : (in_format == "csv" ? "," : in_format) def out_delimiter = out_format == "tsv" ? "\t" : (out_format == "csv" ? "," : out_format) out_extension = out_format == "tsv" ? 'tsv' : 'csv' """ csvtk \\ concat \\ - $options.args \\ + $args \\ --num-cpus $task.cpus \\ --delimiter "${delimiter}" \\ --out-delimiter "${out_delimiter}" \\ @@ -43,7 +33,7 @@ process CSVTK_CONCAT { $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": csvtk: \$(echo \$( csvtk version | sed -e "s/csvtk v//g" )) END_VERSIONS """ diff --git a/modules/csvtk/split/main.nf b/modules/csvtk/split/main.nf new file mode 100644 index 00000000..52ab7ec7 --- /dev/null +++ b/modules/csvtk/split/main.nf @@ -0,0 +1,40 @@ +process CSVTK_SPLIT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::csvtk=0.23.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/csvtk:0.23.0--h9ee0642_0' : + 'quay.io/biocontainers/csvtk:0.23.0--h9ee0642_0' }" + + input: + tuple val(meta), path(csv) + val in_format + val out_format + + output: + tuple val(meta), path("*.${out_extension}"), emit: split_csv + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def delimiter = in_format == "tsv" ? "--tabs" : (in_format == "csv" ? "--delimiter ',' " : in_format) + def out_delimiter = out_format == "tsv" ? "--out-tabs" : (out_format == "csv" ? "--out-delimiter ',' " : out_format) + out_extension = out_format == "tsv" ? 'tsv' : 'csv' + """ + sed -i.bak '/^##/d' $csv + csvtk \\ + split \\ + $args \\ + --num-cpus $task.cpus \\ + $delimiter \\ + $out_delimiter \\ + $csv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + csvtk: \$(echo \$( csvtk version | sed -e 's/csvtk v//g' )) + END_VERSIONS + """ +} diff --git a/modules/csvtk/split/meta.yml b/modules/csvtk/split/meta.yml new file mode 100644 index 00000000..45b71d14 --- /dev/null +++ b/modules/csvtk/split/meta.yml @@ -0,0 +1,52 @@ +name: csvtk_split +description: Splits CSV/TSV into multiple files according to column values +keywords: + - split + - csv + - tsv +tools: + - csvtk: + description: + CSVTK is a cross-platform, efficient and practical CSV/TSV toolkit + that allows rapid data investigation and manipulation. + homepage: https://bioinf.shenwei.me/csvtk/ + documentation: https://bioinf.shenwei.me/csvtk/ + tool_dev_url: https://github.com/shenwei356/csvtk + doi: "" + licence: ['MIT'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - csv: + type: file + description: CSV/TSV file + pattern: "*.{csv,tsv}" + - in_format: + type: string + description: Input format (csv, tab, or a delimiting character) + pattern: "*" + - out_format: + type: string + description: Output format (csv, tab, or a delimiting character) + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - split_csv: + type: file + description: Split CSV/TSV file + pattern: "*.{csv,tsv}" + +authors: + - "@SusiJo" diff --git a/modules/custom/dumpsoftwareversions/functions.nf b/modules/custom/dumpsoftwareversions/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/custom/dumpsoftwareversions/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/custom/dumpsoftwareversions/main.nf b/modules/custom/dumpsoftwareversions/main.nf index faf2073f..934bb467 100644 --- a/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/custom/dumpsoftwareversions/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'pipeline_info', meta:[:], publish_by_meta:[]) } // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" input: path versions @@ -27,80 +16,6 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { path "versions.yml" , emit: versions script: - """ - #!/usr/bin/env python - - import yaml - import platform - from textwrap import dedent - - def _make_versions_html(versions): - html = [ - dedent( - '''\\ - - - - - - - - - - ''' - ) - ] - for process, tmp_versions in sorted(versions.items()): - html.append("") - for i, (tool, version) in enumerate(sorted(tmp_versions.items())): - html.append( - dedent( - f'''\\ - - - - - - ''' - ) - ) - html.append("") - html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") - return "\\n".join(html) - - module_versions = {} - module_versions["${getProcessName(task.process)}"] = { - 'python': platform.python_version(), - 'yaml': yaml.__version__ - } - - with open("$versions") as f: - workflow_versions = yaml.load(f, Loader=yaml.BaseLoader) | module_versions - - workflow_versions["Workflow"] = { - "Nextflow": "$workflow.nextflow.version", - "$workflow.manifest.name": "$workflow.manifest.version" - } - - versions_mqc = { - 'id': 'software_versions', - 'section_name': '${workflow.manifest.name} Software Versions', - 'section_href': 'https://github.com/${workflow.manifest.name}', - 'plot_type': 'html', - 'description': 'are collected at run time from the software output.', - 'data': _make_versions_html(workflow_versions) - } - - with open("software_versions.yml", 'w') as f: - yaml.dump(workflow_versions, f, default_flow_style=False) - with open("software_versions_mqc.yml", 'w') as f: - yaml.dump(versions_mqc, f, default_flow_style=False) - - with open('versions.yml', 'w') as f: - yaml.dump(module_versions, f, default_flow_style=False) - """ + def args = task.ext.args ?: '' + template 'dumpsoftwareversions.py' } diff --git a/modules/custom/dumpsoftwareversions/meta.yml b/modules/custom/dumpsoftwareversions/meta.yml index c8310e35..5b5b8a60 100644 --- a/modules/custom/dumpsoftwareversions/meta.yml +++ b/modules/custom/dumpsoftwareversions/meta.yml @@ -31,3 +31,4 @@ output: authors: - "@drpatelh" + - "@grst" diff --git a/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py new file mode 100644 index 00000000..d1390392 --- /dev/null +++ b/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +import yaml +import platform +from textwrap import dedent + + +def _make_versions_html(versions): + html = [ + dedent( + """\\ + + + + + + + + + + """ + ) + ] + for process, tmp_versions in sorted(versions.items()): + html.append("") + for i, (tool, version) in enumerate(sorted(tmp_versions.items())): + html.append( + dedent( + f"""\\ + + + + + + """ + ) + ) + html.append("") + html.append("
Process Name Software Version
{process if (i == 0) else ''}{tool}{version}
") + return "\\n".join(html) + + +versions_this_module = {} +versions_this_module["${task.process}"] = { + "python": platform.python_version(), + "yaml": yaml.__version__, +} + +with open("$versions") as f: + versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module + +# aggregate versions by the module name (derived from fully-qualified process name) +versions_by_module = {} +for process, process_versions in versions_by_process.items(): + module = process.split(":")[-1] + try: + assert versions_by_module[module] == process_versions, ( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) + except KeyError: + versions_by_module[module] = process_versions + +versions_by_module["Workflow"] = { + "Nextflow": "$workflow.nextflow.version", + "$workflow.manifest.name": "$workflow.manifest.version", +} + +versions_mqc = { + "id": "software_versions", + "section_name": "${workflow.manifest.name} Software Versions", + "section_href": "https://github.com/${workflow.manifest.name}", + "plot_type": "html", + "description": "are collected at run time from the software output.", + "data": _make_versions_html(versions_by_module), +} + +with open("software_versions.yml", "w") as f: + yaml.dump(versions_by_module, f, default_flow_style=False) +with open("software_versions_mqc.yml", "w") as f: + yaml.dump(versions_mqc, f, default_flow_style=False) + +with open("versions.yml", "w") as f: + yaml.dump(versions_this_module, f, default_flow_style=False) diff --git a/modules/custom/getchromsizes/main.nf b/modules/custom/getchromsizes/main.nf new file mode 100644 index 00000000..270b3f48 --- /dev/null +++ b/modules/custom/getchromsizes/main.nf @@ -0,0 +1,29 @@ +process CUSTOM_GETCHROMSIZES { + tag "$fasta" + label 'process_low' + + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + + input: + path fasta + + output: + path '*.sizes' , emit: sizes + path '*.fai' , emit: fai + path "versions.yml", emit: versions + + script: + def args = task.ext.args ?: '' + """ + samtools faidx $fasta + cut -f 1,2 ${fasta}.fai > ${fasta}.sizes + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + custom: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/custom/getchromsizes/meta.yml b/modules/custom/getchromsizes/meta.yml new file mode 100644 index 00000000..eb1db4bb --- /dev/null +++ b/modules/custom/getchromsizes/meta.yml @@ -0,0 +1,39 @@ +name: custom_getchromsizes +description: Generates a FASTA file of chromosome sizes and a fasta index file +keywords: + - fasta + - chromosome + - indexing +tools: + - samtools: + description: Tools for dealing with SAM, BAM and CRAM files + homepage: http://www.htslib.org/ + documentation: http://www.htslib.org/doc/samtools.html + tool_dev_url: https://github.com/samtools/samtools + doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] + +input: + - fasta: + type: file + description: FASTA file + pattern: "*.{fasta}" + +output: + - sizes: + type: file + description: File containing chromosome lengths + pattern: "*.{sizes}" + - fai: + type: file + description: FASTA index file + pattern: "*.{fai}" + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + + +authors: + - "@tamara-hodgetts" + - "@chris-cheshire" diff --git a/modules/cutadapt/functions.nf b/modules/cutadapt/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/cutadapt/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/cutadapt/main.nf b/modules/cutadapt/main.nf index 32faf2cf..89105715 100644 --- a/modules/cutadapt/main.nf +++ b/modules/cutadapt/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process CUTADAPT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::cutadapt=3.4' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/cutadapt:3.4--py39h38f01e4_1' - } else { - container 'quay.io/biocontainers/cutadapt:3.4--py37h73a75cf_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cutadapt:3.4--py39h38f01e4_1' : + 'quay.io/biocontainers/cutadapt:3.4--py37h73a75cf_1' }" input: tuple val(meta), path(reads) @@ -27,18 +16,19 @@ process CUTADAPT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def trimmed = meta.single_end ? "-o ${prefix}.trim.fastq.gz" : "-o ${prefix}_1.trim.fastq.gz -p ${prefix}_2.trim.fastq.gz" """ cutadapt \\ --cores $task.cpus \\ - $options.args \\ + $args \\ $trimmed \\ $reads \\ > ${prefix}.cutadapt.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cutadapt --version) + "${task.process}": + cutadapt: \$(cutadapt --version) END_VERSIONS """ } diff --git a/modules/damageprofiler/functions.nf b/modules/damageprofiler/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/damageprofiler/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/damageprofiler/main.nf b/modules/damageprofiler/main.nf index 3800a305..23eb9397 100644 --- a/modules/damageprofiler/main.nf +++ b/modules/damageprofiler/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DAMAGEPROFILER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::damageprofiler=1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/damageprofiler:1.1--hdfd78af_2" - } else { - container "quay.io/biocontainers/damageprofiler:1.1--hdfd78af_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/damageprofiler:1.1--hdfd78af_2' : + 'quay.io/biocontainers/damageprofiler:1.1--hdfd78af_2' }" input: tuple val(meta), path(bam) @@ -29,23 +18,21 @@ process DAMAGEPROFILER { path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "-r $fasta" : "" def species_list = specieslist ? "-sf $specieslist" : "" - """ damageprofiler \\ - -i $bam \\ - -o $prefix/ \\ - $options.args \\ - $reference \\ - $species_list + -i $bam \\ + -o $prefix/ \\ + $args \\ + $reference \\ + $species_list cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(damageprofiler -v | sed 's/^DamageProfiler v//') + "${task.process}": + damageprofiler: \$(damageprofiler -v | sed 's/^DamageProfiler v//') END_VERSIONS """ - } diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf new file mode 100644 index 00000000..722f6c55 --- /dev/null +++ b/modules/dastool/dastool/main.nf @@ -0,0 +1,64 @@ +process DASTOOL_DASTOOL { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : + 'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" + + input: + tuple val(meta), path(contigs), path(bins) + path(proteins) + path(db_directory) + val(search_engine) + + output: + tuple val(meta), path("*.log") , emit: log + tuple val(meta), path("*_summary.txt") , emit: summary + tuple val(meta), path("*_DASTool_scaffolds2bin.txt") , emit: scaffolds2bin + tuple val(meta), path("*.eval") , optional: true, emit: eval + tuple val(meta), path("*_DASTool_bins/*.fa") , optional: true, emit: bins + tuple val(meta), path("*.pdf") , optional: true, emit: pdfs + tuple val(meta), path("*.proteins.faa") , optional: true, emit: fasta_proteins + tuple val(meta), path("*.archaea.scg") , optional: true, emit: fasta_archaea_scg + tuple val(meta), path("*.bacteria.scg") , optional: true, emit: fasta_bacteria_scg + tuple val(meta), path("*.seqlength") , optional: true, emit: seqlength + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def bin_list = bins instanceof List ? bins.join(",") : "$bins" + def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" + def db_dir = db_directory ? "--db_directory $db_directory" : "" + def clean_contigs = contigs.toString() - ".gz" + def decompress_contigs = contigs.toString() == clean_contigs ? "" : "gunzip -q -f $contigs" + def decompress_proteins = proteins ? "gunzip -f $proteins" : "" + def clean_proteins = proteins ? proteins.toString() - ".gz" : "" + def proteins_pred = proteins ? "--proteins $clean_proteins" : "" + + if (! search_engine) { + log.info('[DAS_Tool] Default search engine (USEARCH) is proprietary software and not available in bioconda. Using DIAMOND as alternative.') + } + + """ + $decompress_proteins + $decompress_contigs + + DAS_Tool \\ + $args \\ + $proteins_pred \\ + $db_dir \\ + $engine \\ + -t $task.cpus \\ + --bins $bin_list \\ + -c $clean_contigs \\ + -o $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + END_VERSIONS + """ +} diff --git a/modules/dastool/dastool/meta.yml b/modules/dastool/dastool/meta.yml new file mode 100644 index 00000000..12d31e9f --- /dev/null +++ b/modules/dastool/dastool/meta.yml @@ -0,0 +1,100 @@ +name: dastool_dastool +description: DAS Tool binning step. +keywords: + - binning + - das tool + - table + - de novo + - bins + - contigs + - assembly + - das_tool +tools: + - dastool: + description: | + DAS Tool is an automated method that integrates the results + of a flexible number of binning algorithms to calculate an optimized, non-redundant + set of bins from a single assembly. + + homepage: https://github.com/cmks/DAS_Tool + documentation: https://github.com/cmks/DAS_Tool + tool_dev_url: https://github.com/cmks/DAS_Tool + doi: "10.1038/s41564-018-0171-1" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - contigs: + type: file + description: fasta file + pattern: "*.{fa.gz,fas.gz,fasta.gz}" + - bins: + type: file + description: "Scaffolds2bin tabular file generated with dastool/scaffolds2bin" + pattern: "*.scaffolds2bin.tsv" + - proteins: + type: file + description: Predicted proteins in prodigal fasta format (>scaffoldID_geneNo) + pattern: "*.{fa.gz,fas.gz,fasta.gz}" + - db_directory: + type: file + description: (optional) Directory of single copy gene database. + - search_engine: + type: val + description: Engine used for single copy gene identification. USEARCH is not supported due to it being proprietary [blast/diamond] + + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - version: + type: file + description: File containing software version + pattern: "versions.yml" + - log: + type: file + description: Log file of the run + pattern: "*.log" + - summary: + type: file + description: Summary of output bins including quality and completeness estimates + pattern: "*summary.txt" + - scaffolds2bin: + type: file + description: Scaffolds to bin file of output bins + pattern: "*.scaffolds2bin.txt" + - eval: + type: file + description: Quality and completeness estimates of input bin sets + pattern: "*.eval" + - pdfs: + type: file + description: Plots showing the amount of high quality bins and score distribution of bins per method + pattern: "*.pdf" + - fasta_proteins: + type: file + description: Output from prodigal if not already supplied + pattern: "*.proteins.faa" + - fasta_archaea_scg: + type: file + description: Results of archaeal single-copy-gene prediction + pattern: "*.archaea.scg" + - fasta_bacteria_scg: + type: file + description: Results of bacterial single-copy-gene prediction + pattern: "*.bacteria.scg" + - seqlength: + type: file + description: Summary of contig lengths + pattern: "*.seqlength" + +authors: + - "@maxibor" + - "@jfy133" diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf new file mode 100644 index 00000000..09f800bb --- /dev/null +++ b/modules/dastool/scaffolds2bin/main.nf @@ -0,0 +1,37 @@ +process DASTOOL_SCAFFOLDS2BIN { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : + 'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" + + input: + tuple val(meta), path(fasta) + val(extension) + + output: + tuple val(meta), path("*.tsv"), emit: scaffolds2bin + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def file_extension = extension ? extension : "fasta" + + """ + gunzip -f *.${file_extension}.gz + + Fasta_to_Scaffolds2Bin.sh \\ + $args \\ + -i . \\ + -e $file_extension \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + END_VERSIONS + """ +} diff --git a/modules/dastool/scaffolds2bin/meta.yml b/modules/dastool/scaffolds2bin/meta.yml new file mode 100644 index 00000000..f41a3cf2 --- /dev/null +++ b/modules/dastool/scaffolds2bin/meta.yml @@ -0,0 +1,58 @@ +name: dastool_scaffolds2bin +description: Helper script to convert a set of bins in fasta format to tabular scaffolds2bin format +keywords: + - binning + - das tool + - table + - de novo + - bins + - contigs + - assembly + - das_tool +tools: + - dastool: + description: | + DAS Tool is an automated method that integrates the results + of a flexible number of binning algorithms to calculate an optimized, non-redundant + set of bins from a single assembly. + + homepage: https://github.com/cmks/DAS_Tool + documentation: https://github.com/cmks/DAS_Tool + tool_dev_url: https://github.com/cmks/DAS_Tool + doi: "10.1038/s41564-018-0171-1" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Fasta of list of fasta files recommended to be gathered via with .collect() of bins + pattern: "*.{fa,fas,fasta}" + - binner: + type: val + description: Name of the binning software (optional) + - extension: + type: val + description: Fasta file extension (fa | fas | fasta | ...) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - scaffolds2bin: + type: file + description: tabular scaffolds2bin file for DAS tool input + pattern: "*.scaffolds2bin.tsv" + +authors: + - "@maxibor" diff --git a/modules/dedup/functions.nf b/modules/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dedup/main.nf b/modules/dedup/main.nf index 62d720f6..8b4bdc37 100644 --- a/modules/dedup/main.nf +++ b/modules/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEDUP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dedup=0.12.8" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1" - } else { - container "quay.io/biocontainers/dedup:0.12.8--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1' : + 'quay.io/biocontainers/dedup:0.12.8--hdfd78af_1' }" input: tuple val(meta), path(bam) @@ -29,18 +18,19 @@ process DEDUP { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ dedup \\ -Xmx${task.memory.toGiga()}g \\ -i $bam \\ -o . \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//') + "${task.process}": + dedup: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//') END_VERSIONS """ diff --git a/modules/deeptools/computematrix/functions.nf b/modules/deeptools/computematrix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/computematrix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/computematrix/main.nf b/modules/deeptools/computematrix/main.nf index 9fffdb8e..70be934b 100644 --- a/modules/deeptools/computematrix/main.nf +++ b/modules/deeptools/computematrix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_COMPUTEMATRIX { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(bigwig) @@ -28,10 +17,11 @@ process DEEPTOOLS_COMPUTEMATRIX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ computeMatrix \\ - $options.args \\ + $args \\ --regionsFileName $bed \\ --scoreFileName $bigwig \\ --outFileName ${prefix}.computeMatrix.mat.gz \\ @@ -39,8 +29,8 @@ process DEEPTOOLS_COMPUTEMATRIX { --numberOfProcessors $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(computeMatrix --version | sed -e "s/computeMatrix //g") + "${task.process}": + deeptools: \$(computeMatrix --version | sed -e "s/computeMatrix //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotfingerprint/functions.nf b/modules/deeptools/plotfingerprint/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotfingerprint/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotfingerprint/main.nf b/modules/deeptools/plotfingerprint/main.nf index b2d167f9..7925c9a9 100644 --- a/modules/deeptools/plotfingerprint/main.nf +++ b/modules/deeptools/plotfingerprint/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTFINGERPRINT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(bams), path(bais) @@ -28,11 +17,12 @@ process DEEPTOOLS_PLOTFINGERPRINT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def extend = (meta.single_end && params.fragment_size > 0) ? "--extendReads ${params.fragment_size}" : '' """ plotFingerprint \\ - $options.args \\ + $args \\ $extend \\ --bamfiles ${bams.join(' ')} \\ --plotFile ${prefix}.plotFingerprint.pdf \\ @@ -41,8 +31,8 @@ process DEEPTOOLS_PLOTFINGERPRINT { --numberOfProcessors $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") + "${task.process}": + deeptools: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotheatmap/functions.nf b/modules/deeptools/plotheatmap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotheatmap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotheatmap/main.nf b/modules/deeptools/plotheatmap/main.nf index 19c243df..992c9058 100644 --- a/modules/deeptools/plotheatmap/main.nf +++ b/modules/deeptools/plotheatmap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTHEATMAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(matrix) @@ -27,17 +16,18 @@ process DEEPTOOLS_PLOTHEATMAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ plotHeatmap \\ - $options.args \\ + $args \\ --matrixFile $matrix \\ --outFileName ${prefix}.plotHeatmap.pdf \\ --outFileNameMatrix ${prefix}.plotHeatmap.mat.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") + "${task.process}": + deeptools: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") END_VERSIONS """ } diff --git a/modules/deeptools/plotprofile/functions.nf b/modules/deeptools/plotprofile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/deeptools/plotprofile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/deeptools/plotprofile/main.nf b/modules/deeptools/plotprofile/main.nf index 3a196bd5..60184fa6 100644 --- a/modules/deeptools/plotprofile/main.nf +++ b/modules/deeptools/plotprofile/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DEEPTOOLS_PLOTPROFILE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::deeptools=3.5.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0" - } else { - container "quay.io/biocontainers/deeptools:3.5.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/deeptools:3.5.1--py_0' : + 'quay.io/biocontainers/deeptools:3.5.1--py_0' }" input: tuple val(meta), path(matrix) @@ -27,17 +16,18 @@ process DEEPTOOLS_PLOTPROFILE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ plotProfile \\ - $options.args \\ + $args \\ --matrixFile $matrix \\ --outFileName ${prefix}.plotProfile.pdf \\ --outFileNameData ${prefix}.plotProfile.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(plotProfile --version | sed -e "s/plotProfile //g") + "${task.process}": + deeptools: \$(plotProfile --version | sed -e "s/plotProfile //g") END_VERSIONS """ } diff --git a/modules/delly/call/functions.nf b/modules/delly/call/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/delly/call/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/delly/call/main.nf b/modules/delly/call/main.nf index 59979dc9..fc04cda7 100644 --- a/modules/delly/call/main.nf +++ b/modules/delly/call/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DELLY_CALL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::delly=0.8.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/delly:0.8.7--he03298f_1" - } else { - container "quay.io/biocontainers/delly:0.8.7--he03298f_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/delly:0.8.7--he03298f_1' : + 'quay.io/biocontainers/delly:0.8.7--he03298f_1' }" input: tuple val(meta), path(bam), path(bai) @@ -29,18 +18,19 @@ process DELLY_CALL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ delly \\ call \\ - $options.args \\ + $args \\ -o ${prefix}.bcf \\ -g $fasta \\ $bam \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(delly --version 2>&1) | sed 's/^.*Delly version: v//; s/ using.*\$//') + "${task.process}": + delly: \$( echo \$(delly --version 2>&1) | sed 's/^.*Delly version: v//; s/ using.*\$//') END_VERSIONS """ } diff --git a/modules/diamond/blastp/functions.nf b/modules/diamond/blastp/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/blastp/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 6afc66c4..c7342767 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_BLASTP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: tuple val(meta), path(fasta) @@ -29,7 +18,8 @@ process DIAMOND_BLASTP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -38,12 +28,12 @@ process DIAMOND_BLASTP { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - $options.args \\ + $args \\ --out ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/diamond/blastx/functions.nf b/modules/diamond/blastx/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/blastx/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index db2953da..bd7d1dd9 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_BLASTX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container "quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: tuple val(meta), path(fasta) @@ -29,7 +18,8 @@ process DIAMOND_BLASTX { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -38,12 +28,12 @@ process DIAMOND_BLASTX { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - $options.args \\ + $args \\ --out ${prefix}.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/diamond/makedb/functions.nf b/modules/diamond/makedb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/diamond/makedb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/diamond/makedb/main.nf b/modules/diamond/makedb/main.nf index e4533f8f..cccfcce9 100644 --- a/modules/diamond/makedb/main.nf +++ b/modules/diamond/makedb/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DIAMOND_MAKEDB { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } // Dimaond is limited to v2.0.9 because there is not a // singularity version higher than this at the current time. conda (params.enable_conda ? 'bioconda::diamond=2.0.9' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' - } else { - container 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : + 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" input: path fasta @@ -28,17 +17,18 @@ process DIAMOND_MAKEDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ diamond \\ makedb \\ --threads $task.cpus \\ --in $fasta \\ -d $fasta \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') + "${task.process}": + diamond: \$(diamond --version 2>&1 | tail -n 1 | sed 's/^diamond version //') END_VERSIONS """ } diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf new file mode 100644 index 00000000..8a6f082a --- /dev/null +++ b/modules/dragmap/align/main.nf @@ -0,0 +1,61 @@ +process DRAGMAP_ALIGN { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::dragmap=1.2.1 bioconda::samtools=1.14 conda-forge::pigz=2.3.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0': + 'quay.io/biocontainers/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0' }" + + input: + tuple val(meta), path(reads) + path hashmap + val sort_bam + + output: + tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path('*.log'), emit: log + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def samtools_command = sort_bam ? 'sort' : 'view' + if (meta.single_end) { + """ + dragen-os \\ + -r $hashmap \\ + -1 $reads \\ + --num-threads $task.cpus \\ + $args \\ + 2> ${prefix}.dragmap.log \\ + | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ + } else { + """ + dragen-os \\ + -r $hashmap \\ + -1 ${reads[0]} \\ + -2 ${reads[1]} \\ + --num-threads $task.cpus \\ + $args \\ + 2> ${prefix}.dragmap.log \\ + | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ + } +} diff --git a/modules/dragmap/align/meta.yml b/modules/dragmap/align/meta.yml new file mode 100644 index 00000000..e943ccf8 --- /dev/null +++ b/modules/dragmap/align/meta.yml @@ -0,0 +1,42 @@ +name: dragmap_align +description: Performs fastq alignment to a reference using DRAGMAP +keywords: + - alignment + - map + - fastq + - bam + - sam +tools: + - dragmap: + description: Dragmap is the Dragen mapper/aligner Open Source Software. + homepage: https://github.com/Illumina/dragmap + documentation: https://github.com/Illumina/dragmap + tool_dev_url: https://github.com/Illumina/dragmap#basic-command-line-usage + doi: "" + licence: ['GPL v3'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - hashmap: + type: file + description: DRAGMAP hash table + pattern: "Directory containing DRAGMAP hash table *.{cmp,.bin,.txt}" +output: + - bam: + type: file + description: Output BAM file containing read alignments + pattern: "*.{bam}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@Emiller88" diff --git a/modules/dragmap/hashtable/main.nf b/modules/dragmap/hashtable/main.nf new file mode 100644 index 00000000..ab55364b --- /dev/null +++ b/modules/dragmap/hashtable/main.nf @@ -0,0 +1,33 @@ +process DRAGMAP_HASHTABLE { + tag "$fasta" + label 'process_high' + + conda (params.enable_conda ? "bioconda::dragmap=1.2.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dragmap:1.2.1--hd4ca14e_0': + 'quay.io/biocontainers/dragmap:1.2.1--hd4ca14e_0' }" + + input: + path fasta + + output: + path "dragmap" , emit: hashmap + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + mkdir dragmap + dragen-os \\ + --build-hash-table true \\ + --ht-reference $fasta \\ + --output-directory dragmap \\ + $args \\ + --ht-num-threads $task.cpus + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + END_VERSIONS + """ +} diff --git a/modules/dragmap/hashtable/meta.yml b/modules/dragmap/hashtable/meta.yml new file mode 100644 index 00000000..86e58789 --- /dev/null +++ b/modules/dragmap/hashtable/meta.yml @@ -0,0 +1,30 @@ +name: dragmap_hashtable +description: Create DRAGEN hashtable for reference genome +keywords: + - index + - fasta + - genome + - reference +tools: + - dragmap: + description: Dragmap is the Dragen mapper/aligner Open Source Software. + homepage: https://github.com/Illumina/dragmap + documentation: https://github.com/Illumina/dragmap + tool_dev_url: https://github.com/Illumina/dragmap#basic-command-line-usage + doi: "" + licence: ['GPL v3'] +input: + - fasta: + type: file + description: Input genome fasta file +output: + - hashmap: + type: file + description: DRAGMAP hash table + pattern: "*.{cmp,.bin,.txt}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@Emiller88" diff --git a/modules/dragonflye/functions.nf b/modules/dragonflye/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dragonflye/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dragonflye/main.nf b/modules/dragonflye/main.nf index f9dc9004..8ca98832 100644 --- a/modules/dragonflye/main.nf +++ b/modules/dragonflye/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DRAGONFLYE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::dragonflye=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0" - } else { - container "quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0' : + 'quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -30,18 +19,19 @@ process DRAGONFLYE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def memory = task.memory.toGiga() """ dragonflye \\ --reads ${reads} \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --ram $memory \\ --outdir ./ \\ --force cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) + "${task.process}": + dragonflye: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) END_VERSIONS """ } diff --git a/modules/dshbio/exportsegments/functions.nf b/modules/dshbio/exportsegments/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/exportsegments/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index 84f59e89..d506a4b6 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_EXPORTSEGMENTS { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" - } + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gfa) @@ -26,17 +15,18 @@ process DSHBIO_EXPORTSEGMENTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ export-segments \\ - $options.args \\ + $args \\ -i $gfa \\ -o ${prefix}.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/filterbed/functions.nf b/modules/dshbio/filterbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/filterbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 35039f21..6480f4a4 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_FILTERBED { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" - } + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -26,17 +15,18 @@ process DSHBIO_FILTERBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ filter-bed \\ - $options.args \\ + $args \\ -i $bed \\ -o ${prefix}.bed.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/filtergff3/functions.nf b/modules/dshbio/filtergff3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/filtergff3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index bf677da8..a0bbf3af 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_FILTERGFF3 { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" - } + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gff3) @@ -26,17 +15,18 @@ process DSHBIO_FILTERGFF3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ filter-gff3 \\ - $options.args \\ + $args \\ -i $gff3 \\ -o ${prefix}.gff3.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/splitbed/functions.nf b/modules/dshbio/splitbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/splitbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 3e8d656c..8dbf1104 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_SPLITBED { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" - } + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -26,18 +15,19 @@ process DSHBIO_SPLITBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ split-bed \\ - $options.args \\ + $args \\ -p $prefix \\ -s '.bed.gz' \\ -i $bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/dshbio/splitgff3/functions.nf b/modules/dshbio/splitgff3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/dshbio/splitgff3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index dd477181..fc868a39 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process DSHBIO_SPLITGFF3 { tag "${meta.id}" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/dsh-bio:2.0.5--hdfd78af_0" - } else { - container "quay.io/biocontainers/dsh-bio:2.0.5--hdfd78af_0" - } + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.6--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.6--hdfd78af_0' }" input: tuple val(meta), path(gff3) @@ -26,18 +15,19 @@ process DSHBIO_SPLITGFF3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ dsh-bio \\ split-gff3 \\ - $options.args \\ + $args \\ -p $prefix \\ -s '.gff3.gz' \\ -i $gff3 cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') + "${task.process}": + dshbio: \$(dsh-bio --version 2>&1 | grep -o 'dsh-bio-tools .*' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/ectyper/main.nf b/modules/ectyper/main.nf new file mode 100644 index 00000000..0e040958 --- /dev/null +++ b/modules/ectyper/main.nf @@ -0,0 +1,42 @@ +process ECTYPER { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::ectyper=1.0.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ectyper:1.0.0--pyhdfd78af_1' : + 'quay.io/biocontainers/ectyper:1.0.0--pyhdfd78af_1' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.log"), emit: log + tuple val(meta), path("*.tsv"), emit: tsv + tuple val(meta), path("*.txt"), emit: txt + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def is_compressed = fasta.getName().endsWith(".gz") ? true : false + def fasta_name = fasta.getName().replace(".gz", "") + """ + if [ "$is_compressed" == "true" ]; then + gzip -c -d $fasta > $fasta_name + fi + + ectyper \\ + $args \\ + --cores $task.cpus \\ + --output ./ \\ + --input $fasta_name + + mv output.tsv ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ectyper: \$(echo \$(ectyper --version 2>&1) | sed 's/.*ectyper //; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/ectyper/meta.yml b/modules/ectyper/meta.yml new file mode 100644 index 00000000..a6beca29 --- /dev/null +++ b/modules/ectyper/meta.yml @@ -0,0 +1,51 @@ +name: ectyper +description: In silico prediction of E. coli serotype +keywords: + - escherichia coli + - fasta + - serotype +tools: + - ectyper: + description: ECtyper is a python program for serotyping E. coli genomes + homepage: https://github.com/phac-nml/ecoli_serotyping + documentation: https://github.com/phac-nml/ecoli_serotyping + tool_dev_url: https://github.com/phac-nml/ecoli_serotyping + doi: "" + licence: ['Apache 2'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA formatted assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - log: + type: file + description: ectyper log output + pattern: "*.log" + - tsv: + type: file + description: ectyper serotyping results in TSV format + pattern: "*.tsv" + - txt: + type: file + description: Allele report generated from BLAST results + pattern: "*.tst" + +authors: + - "@rpetit3" diff --git a/modules/emmtyper/main.nf b/modules/emmtyper/main.nf new file mode 100644 index 00000000..70dabfb7 --- /dev/null +++ b/modules/emmtyper/main.nf @@ -0,0 +1,31 @@ +process EMMTYPER { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::emmtyper=0.2.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/emmtyper:0.2.0--py_0' : + 'quay.io/biocontainers/emmtyper:0.2.0--py_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + emmtyper \\ + $args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + emmtyper: \$( echo \$(emmtyper --version 2>&1) | sed 's/^.*emmtyper v//' ) + END_VERSIONS + """ +} diff --git a/modules/emmtyper/meta.yml b/modules/emmtyper/meta.yml new file mode 100644 index 00000000..019a8e4c --- /dev/null +++ b/modules/emmtyper/meta.yml @@ -0,0 +1,43 @@ +name: emmtyper +description: EMM typing of Streptococcus pyogenes assemblies +keywords: + - fasta + - Streptococcus pyogenes + - typing +tools: + - emmtyper: + description: Streptococcus pyogenes in silico EMM typer + homepage: https://github.com/MDU-PHL/emmtyper + documentation: https://github.com/MDU-PHL/emmtyper + tool_dev_url: https://github.com/MDU-PHL/emmtyper + doi: "" + licence: ['GNU General Public v3 (GPL v3)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/modules/ensemblvep/functions.nf b/modules/ensemblvep/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ensemblvep/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ensemblvep/main.nf b/modules/ensemblvep/main.nf index ad9c38a6..e3d0c286 100644 --- a/modules/ensemblvep/main.nf +++ b/modules/ensemblvep/main.nf @@ -1,27 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) -params.use_cache = false -params.vep_tag = "" - process ENSEMBLVEP { + tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ensembl-vep=104.3" : null) - if (params.use_cache) { - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0" - } else { - container "quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0" - } - } else { - container "nfcore/vep:${params.vep_tag}" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ensembl-vep:104.3--pl5262h4a94de4_0' : + 'quay.io/biocontainers/ensembl-vep:104.3--pl5262h4a94de4_0' }" input: tuple val(meta), path(vcf) @@ -36,29 +20,30 @@ process ENSEMBLVEP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - dir_cache = params.use_cache ? "\${PWD}/${cache}" : "/.vep" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def dir_cache = cache ? "\${PWD}/${cache}" : "/.vep" """ mkdir $prefix vep \\ -i $vcf \\ -o ${prefix}.ann.vcf \\ - $options.args \\ + $args \\ --assembly $genome \\ --species $species \\ --cache \\ --cache_version $cache_version \\ --dir_cache $dir_cache \\ --fork $task.cpus \\ - --format vcf \\ + --vcf \\ --stats_file ${prefix}.summary.html rm -rf $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(vep --help 2>&1) | sed 's/^.*Versions:.*ensembl-vep : //;s/ .*\$//') + "${task.process}": + ensemblvep: \$( echo \$(vep --help 2>&1) | sed 's/^.*Versions:.*ensembl-vep : //;s/ .*\$//') END_VERSIONS """ } diff --git a/modules/expansionhunter/functions.nf b/modules/expansionhunter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/expansionhunter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 845de15d..4db78230 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process EXPANSIONHUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::expansionhunter=4.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/expansionhunter:4.0.2--he785bd8_0" - } else { - container "quay.io/biocontainers/expansionhunter:4.0.2--he785bd8_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/expansionhunter:4.0.2--he785bd8_0' : + 'quay.io/biocontainers/expansionhunter:4.0.2--he785bd8_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,11 +17,12 @@ process EXPANSIONHUNTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def gender = (meta.gender == 'male' || meta.gender == 1 || meta.gender == 'XY') ? "male" : "female" """ ExpansionHunter \\ - $options.args \\ + $args \\ --reads $bam \\ --output-prefix $prefix \\ --reference $fasta \\ @@ -40,8 +30,8 @@ process EXPANSIONHUNTER { --sex $gender cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') + "${task.process}": + expansionhunter: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') END_VERSIONS """ } diff --git a/modules/fargene/main.nf b/modules/fargene/main.nf new file mode 100644 index 00000000..73bdd411 --- /dev/null +++ b/modules/fargene/main.nf @@ -0,0 +1,50 @@ +def VERSION = '0.1' // Version information not provided by tool on CLI + +process FARGENE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::fargene=0.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fargene:0.1--py27h21c881e_4' : + 'quay.io/biocontainers/fargene:0.1--py27h21c881e_4' }" + + input: + // input may be fasta (for genomes or longer contigs) or paired-end fastq (for metagenome), the latter in addition with --meta flag + tuple val(meta), path(input) + val hmm_model + + output: + path "*.log" , emit: log + path "${prefix}/results_summary.txt" , emit: txt + tuple val(meta), path("${prefix}/hmmsearchresults/*.out") , optional: true, emit: hmm + tuple val(meta), path("${prefix}/predictedGenes/predicted-orfs.fasta") , optional: true, emit: orfs + tuple val(meta), path("${prefix}/predictedGenes/predicted-orfs-amino.fasta") , optional: true, emit: orfs_amino + tuple val(meta), path("${prefix}/predictedGenes/retrieved-contigs.fasta") , optional: true, emit: contigs + tuple val(meta), path("${prefix}/predictedGenes/retrieved-contigs-peptides.fasta") , optional: true, emit: contigs_pept + tuple val(meta), path("${prefix}/predictedGenes/*filtered.fasta") , optional: true, emit: filtered + tuple val(meta), path("${prefix}/predictedGenes/*filtered-peptides.fasta") , optional: true, emit: filtered_pept + tuple val(meta), path("${prefix}/retrievedFragments/all_retrieved_*.fastq") , optional: true, emit: fragments + tuple val(meta), path("${prefix}/retrievedFragments/retrievedFragments/trimmedReads/*.fasta"), optional: true, emit: trimmed + tuple val(meta), path("${prefix}/spades_assembly/*") , optional: true, emit: spades + tuple val(meta), path("${prefix}/tmpdir/*.fasta") , optional: true, emit: metagenome + tuple val(meta), path("${prefix}/tmpdir/*.out") , optional: true, emit: tmp + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" + """ + fargene \\ + $args \\ + -p $task.cpus \\ + -i $input \\ + --hmm-model $hmm_model \\ + -o $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fargene: $VERSION + END_VERSIONS + """ +} diff --git a/modules/fargene/meta.yml b/modules/fargene/meta.yml new file mode 100644 index 00000000..35e98008 --- /dev/null +++ b/modules/fargene/meta.yml @@ -0,0 +1,101 @@ +name: fargene +description: tool that takes either fragmented metagenomic data or longer sequences as input and predicts and delivers full-length antiobiotic resistance genes as output. +keywords: + - antibiotic resistance genes + - ARGs + - identifier + - metagenomic + - contigs +tools: + - fargene: + description: Fragmented Antibiotic Resistance Gene Identifier takes either fragmented metagenomic data or longer sequences as input and predicts and delivers full-length antiobiotic resistance genes as output + homepage: https://github.com/fannyhb/fargene + documentation: https://github.com/fannyhb/fargene + tool_dev_url: https://github.com/fannyhb/fargene + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: uncompressed fasta file or paired-end fastq files containing either genomes or longer contigs as nucleotide or protein sequences (fasta) or fragmented metagenomic reads (fastq) + pattern: "*.{fasta}" + - hmm_model: + type: string + description: name of custom hidden markov model to be used [pre-defined class_a, class_b_1_2, class_b_3, class_c, class_d_1, class_d_2, qnr, tet_efflux, tet_rpg, tet_enzyme] + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - log: + type: file + description: log file + pattern: "*.{log}" + - txt: + type: file + description: analysis summary text file + pattern: "*.{txt}" + - hmm: + type: file + description: output from hmmsearch + pattern: "*.{out}" + - orfs: + type: file + description: open reading frames (ORFs) + pattern: "*.{fasta}" + - orfs_amino: + type: file + description: protein translation of open reading frames (ORFs) + pattern: "*.{fasta}" + - contigs: + type: file + description: (complete) contigs that passed the final full-length classification + pattern: "*.{fasta}" + - contigs_pept: + type: file + description: parts of the contigs that passed the final classification step that aligned with the HMM, as amino acid sequences + pattern: "*.{fasta}" + - filtered: + type: file + description: sequences that passed the final classification step, but only the parts that where predicted by the HMM to be part of the gene + pattern: "*.{fasta}" + - filtered_pept: + type: file + description: sequences from filtered.fasta, translated in the same frame as the gene is predicted to be located + pattern: "*.{fasta}" + - fragments: + type: file + description: All quality controlled retrieved fragments that were classified as positive, together with its read-pair, gathered in two files + pattern: "*.{fastq}" + - trimmed: + type: file + description: The quality controlled retrieved fragments from each input file. + pattern: "*.{fasta}" + - spades: + type: directory + description: The output from the SPAdes assembly + pattern: "spades_assembly" + - metagenome: + type: file + description: The FASTQ to FASTA converted input files from metagenomic reads. + pattern: "*.{fasta}" + - tmp: + type: file + description: The from FASTQ to FASTA converted input files and their translated input sequences. Are only saved if option --store-peptides is used. + pattern: "*.{fasta}" + + +authors: + - "@louperelo" diff --git a/modules/fastani/functions.nf b/modules/fastani/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastani/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastani/main.nf b/modules/fastani/main.nf index 5c6366f9..cc1c4902 100644 --- a/modules/fastani/main.nf +++ b/modules/fastani/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTANI { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastani=1.32" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastani:1.32--he1c1bb9_0" - } else { - container "quay.io/biocontainers/fastani:1.32--he1c1bb9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastani:1.32--he1c1bb9_0' : + 'quay.io/biocontainers/fastani:1.32--he1c1bb9_0' }" input: tuple val(meta), path(query) @@ -27,7 +16,8 @@ process FASTANI { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.batch_input) { """ @@ -37,8 +27,8 @@ process FASTANI { -o ${prefix}.ani.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + "${task.process}": + fastani: \$(fastANI --version 2>&1 | sed 's/version//;') END_VERSIONS """ } else { @@ -49,8 +39,8 @@ process FASTANI { -o ${prefix}.ani.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastANI --version 2>&1 | sed 's/version//;') + "${task.process}": + fastani: \$(fastANI --version 2>&1 | sed 's/version//;') END_VERSIONS """ } diff --git a/modules/fastp/functions.nf b/modules/fastp/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastp/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastp/main.nf b/modules/fastp/main.nf index e99540d5..33603842 100644 --- a/modules/fastp/main.nf +++ b/modules/fastp/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::fastp=0.20.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/fastp:0.20.1--h8b12597_0' - } else { - container 'quay.io/biocontainers/fastp:0.20.1--h8b12597_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastp:0.20.1--h8b12597_0' : + 'quay.io/biocontainers/fastp:0.20.1--h8b12597_0' }" input: tuple val(meta), path(reads) @@ -33,8 +22,9 @@ process FASTP { tuple val(meta), path('*.merged.fastq.gz'), optional:true, emit: reads_merged script: + def args = task.ext.args ?: '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { def fail_fastq = save_trimmed_fail ? "--failed_out ${prefix}.fail.fastq.gz" : '' """ @@ -46,11 +36,11 @@ process FASTP { --json ${prefix}.fastp.json \\ --html ${prefix}.fastp.html \\ $fail_fastq \\ - $options.args \\ + $args \\ 2> ${prefix}.fastp.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + "${task.process}": + fastp: \$(fastp --version 2>&1 | sed -e "s/fastp //g") END_VERSIONS """ } else { @@ -70,12 +60,12 @@ process FASTP { $merge_fastq \\ --thread $task.cpus \\ --detect_adapter_for_pe \\ - $options.args \\ + $args \\ 2> ${prefix}.fastp.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fastp --version 2>&1 | sed -e "s/fastp //g") + "${task.process}": + fastp: \$(fastp --version 2>&1 | sed -e "s/fastp //g") END_VERSIONS """ } diff --git a/modules/fastp/meta.yml b/modules/fastp/meta.yml index 6e133871..a1875faf 100644 --- a/modules/fastp/meta.yml +++ b/modules/fastp/meta.yml @@ -40,7 +40,7 @@ output: - html: type: file description: Results in HTML format - pattern: "*.thml" + pattern: "*.html" - log: type: file description: fastq log file diff --git a/modules/fastqc/functions.nf b/modules/fastqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fastqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index 9f6cfc55..d250eca0 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTQC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fastqc=0.11.9" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0" - } else { - container "quay.io/biocontainers/fastqc:0.11.9--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : + 'quay.io/biocontainers/fastqc:0.11.9--0' }" input: tuple val(meta), path(reads) @@ -27,27 +16,28 @@ process FASTQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' // Add soft-links to original FastQs for consistent naming in pipeline - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}.fastq.gz + fastqc $args --threads $task.cpus ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } else { """ [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz - fastqc $options.args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz + fastqc $args --threads $task.cpus ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( fastqc --version | sed -e "s/FastQC v//g" ) + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) END_VERSIONS """ } diff --git a/modules/fastqscan/main.nf b/modules/fastqscan/main.nf new file mode 100644 index 00000000..a0dcc46a --- /dev/null +++ b/modules/fastqscan/main.nf @@ -0,0 +1,30 @@ +process FASTQSCAN { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::fastq-scan=0.4.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fastq-scan:0.4.4--h7d875b9_0' : + 'quay.io/biocontainers/fastq-scan:0.4.4--h7d875b9_0' }" + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.json"), emit: json + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + zcat $reads | \\ + fastq-scan \\ + $args > ${prefix}.json + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqscan: \$( echo \$(fastq-scan -v 2>&1) | sed 's/^.*fastq-scan //' ) + END_VERSIONS + """ +} diff --git a/modules/fastqscan/meta.yml b/modules/fastqscan/meta.yml new file mode 100644 index 00000000..99538b5a --- /dev/null +++ b/modules/fastqscan/meta.yml @@ -0,0 +1,43 @@ +name: fastqscan +description: FASTQ summary statistics in JSON format +keywords: + - fastq + - summary + - statistics +tools: + - fastqscan: + description: FASTQ summary statistics in JSON format + homepage: https://github.com/rpetit3/fastq-scan + documentation: https://github.com/rpetit3/fastq-scan + tool_dev_url: https://github.com/rpetit3/fastq-scan + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ file + pattern: "*.{fastq.gz,fq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: JSON formatted file of summary statistics + pattern: "*.json" + +authors: + - "@rpetit3" diff --git a/modules/fasttree/functions.nf b/modules/fasttree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fasttree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fasttree/main.nf b/modules/fasttree/main.nf index 5f81d1f2..5e57aae9 100644 --- a/modules/fasttree/main.nf +++ b/modules/fasttree/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FASTTREE { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::fasttree=2.1.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fasttree:2.1.10--h516909a_4" - } else { - container "quay.io/biocontainers/fasttree:2.1.10--h516909a_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fasttree:2.1.10--h516909a_4' : + 'quay.io/biocontainers/fasttree:2.1.10--h516909a_4' }" input: path alignment @@ -25,16 +14,17 @@ process FASTTREE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ fasttree \\ - $options.args \\ + $args \\ -log fasttree_phylogeny.tre.log \\ -nt $alignment \\ > fasttree_phylogeny.tre cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fasttree -help 2>&1 | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/') + "${task.process}": + fasttree: \$(fasttree -help 2>&1 | head -1 | sed 's/^FastTree \\([0-9\\.]*\\) .*\$/\\1/') END_VERSIONS """ } diff --git a/modules/fgbio/callmolecularconsensusreads/functions.nf b/modules/fgbio/callmolecularconsensusreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/callmolecularconsensusreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/callmolecularconsensusreads/main.nf b/modules/fgbio/callmolecularconsensusreads/main.nf index 23056b90..3aab935b 100644 --- a/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/modules/fgbio/callmolecularconsensusreads/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_CALLMOLECULARCONSENSUSREADS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0" - } else { - container "quay.io/biocontainers/fgbio:1.3.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0' : + 'quay.io/biocontainers/fgbio:1.3.0--0' }" input: tuple val(meta), path(bam) @@ -25,17 +15,18 @@ process FGBIO_CALLMOLECULARCONSENSUSREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ fgbio \\ CallMolecularConsensusReads \\ -i $bam \\ - $options.args \\ + $args \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/fastqtobam/functions.nf b/modules/fgbio/fastqtobam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/fastqtobam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/fastqtobam/main.nf b/modules/fgbio/fastqtobam/main.nf index 68a85508..126c3dd8 100644 --- a/modules/fgbio/fastqtobam/main.nf +++ b/modules/fgbio/fastqtobam/main.nf @@ -1,51 +1,39 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_FASTQTOBAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0' : + 'quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0' }" input: tuple val(meta), path(reads) - val(read_structure) + val read_structure output: tuple val(meta), path("*_umi_converted.bam"), emit: umibam path "versions.yml" , emit: version script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - mkdir tmpFolder + mkdir tmp fgbio \\ - --tmp-dir=${PWD}/tmpFolder \\ + --tmp-dir=${PWD}/tmp \\ FastqToBam \\ -i $reads \\ -o "${prefix}_umi_converted.bam" \\ --read-structures $read_structure \\ --sample $meta.id \\ --library $meta.id \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/groupreadsbyumi/functions.nf b/modules/fgbio/groupreadsbyumi/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/groupreadsbyumi/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/groupreadsbyumi/main.nf b/modules/fgbio/groupreadsbyumi/main.nf index 8e16f0a5..47f000a5 100644 --- a/modules/fgbio/groupreadsbyumi/main.nf +++ b/modules/fgbio/groupreadsbyumi/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_GROUPREADSBYUMI { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0' : + 'quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0' }" input: tuple val(meta), path(taggedbam) @@ -28,7 +17,8 @@ process FGBIO_GROUPREADSBYUMI { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir tmp @@ -37,14 +27,14 @@ process FGBIO_GROUPREADSBYUMI { --tmp-dir=${PWD}/tmp \\ GroupReadsByUmi \\ -s $strategy \\ - ${options.args} \\ + $args \\ -i $taggedbam \\ -o ${prefix}_umi-grouped.bam \\ -f ${prefix}_umi_histogram.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/fgbio/sortbam/functions.nf b/modules/fgbio/sortbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/fgbio/sortbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/fgbio/sortbam/main.nf b/modules/fgbio/sortbam/main.nf index 34e0b377..c542f3df 100644 --- a/modules/fgbio/sortbam/main.nf +++ b/modules/fgbio/sortbam/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FGBIO_SORTBAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::fgbio=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0" - } else { - container "quay.io/biocontainers/fgbio:1.3.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/fgbio:1.3.0--0' : + 'quay.io/biocontainers/fgbio:1.3.0--0' }" input: tuple val(meta), path(bam) @@ -25,16 +15,17 @@ process FGBIO_SORTBAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ fgbio \\ SortBam \\ -i $bam \\ - $options.args \\ + $args \\ -o ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') + "${task.process}": + fgbio: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//') END_VERSIONS """ } diff --git a/modules/filtlong/functions.nf b/modules/filtlong/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/filtlong/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/filtlong/main.nf b/modules/filtlong/main.nf index 6e82f112..bb1c1eb3 100644 --- a/modules/filtlong/main.nf +++ b/modules/filtlong/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FILTLONG { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::filtlong=0.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0" - } else { - container "quay.io/biocontainers/filtlong:0.2.1--h9a82719_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0' : + 'quay.io/biocontainers/filtlong:0.2.1--h9a82719_0' }" input: tuple val(meta), path(shortreads), path(longreads) @@ -26,18 +15,19 @@ process FILTLONG { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" """ filtlong \\ $short_reads \\ - $options.args \\ + $args \\ $longreads \\ | gzip -n > ${prefix}_lr_filtlong.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( filtlong --version | sed -e "s/Filtlong v//g" ) + "${task.process}": + filtlong: \$( filtlong --version | sed -e "s/Filtlong v//g" ) END_VERSIONS """ } diff --git a/modules/flash/functions.nf b/modules/flash/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/flash/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/flash/main.nf b/modules/flash/main.nf index 912b2961..7bc38c97 100644 --- a/modules/flash/main.nf +++ b/modules/flash/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FLASH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::flash=1.2.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/flash:1.2.11--hed695b0_5" - } else { - container "quay.io/biocontainers/flash:1.2.11--hed695b0_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/flash:1.2.11--hed695b0_5' : + 'quay.io/biocontainers/flash:1.2.11--hed695b0_5' }" input: tuple val(meta), path(reads) @@ -25,18 +14,19 @@ process FLASH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ flash \\ - $options.args \\ + $args \\ -o ${prefix} \\ -z \\ ${reads[0]} \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(flash --version 2>&1) | sed 's/^.*FLASH v//; s/ .*\$//') + "${task.process}": + flash: \$(echo \$(flash --version 2>&1) | sed 's/^.*FLASH v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/freebayes/functions.nf b/modules/freebayes/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/freebayes/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/freebayes/main.nf b/modules/freebayes/main.nf index 0b23dc40..1dd91fef 100644 --- a/modules/freebayes/main.nf +++ b/modules/freebayes/main.nf @@ -1,38 +1,28 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process FREEBAYES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::freebayes=1.3.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3" - } else { - container "quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/freebayes:1.3.5--py38ha193a2f_3' : + 'quay.io/biocontainers/freebayes:1.3.5--py38ha193a2f_3' }" input: tuple val(meta), path(input_1), path(input_1_index), path(input_2), path(input_2_index) path fasta - path fai + path fasta_fai path targets path samples path populations path cnv output: - tuple val(meta), path("*.vcf.gz") , emit: vcf - path "versions.yml" , emit: versions + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def input = input_2 ? "${input_1} ${input_2}" : "${input_1}" def targets_file = targets ? "--target ${targets}" : "" def samples_file = samples ? "--samples ${samples}" : "" @@ -42,20 +32,20 @@ process FREEBAYES { if (task.cpus > 1) { """ freebayes-parallel \\ - <(fasta_generate_regions.py ${fasta}.fai 10000) ${task.cpus} \\ + <(fasta_generate_regions.py $fasta_fai 10000) $task.cpus \\ -f $fasta \\ $targets_file \\ $samples_file \\ $populations_file \\ $cnv_file \\ - $options.args \\ + $args \\ $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + "${task.process}": + freebayes: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) END_VERSIONS """ @@ -67,14 +57,14 @@ process FREEBAYES { $samples_file \\ $populations_file \\ $cnv_file \\ - $options.args \\ + $args \\ $input > ${prefix}.vcf gzip --no-name ${prefix}.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) + "${task.process}": + freebayes: \$(echo \$(freebayes --version 2>&1) | sed 's/version:\s*v//g' ) END_VERSIONS """ } diff --git a/modules/freebayes/meta.yml b/modules/freebayes/meta.yml index 75d44826..abba1daa 100644 --- a/modules/freebayes/meta.yml +++ b/modules/freebayes/meta.yml @@ -36,10 +36,10 @@ input: type: file description: reference fasta file pattern: ".{fa,fa.gz,fasta,fasta.gz}" - - fai: + - fasta_fai: type: file description: reference fasta file index - pattern: "*.fai" + pattern: "*.{fa,fasta}.fai" - targets: type: file description: Optional - Limit analysis to targets listed in this BED-format FILE. @@ -55,8 +55,7 @@ input: - cnv: type: file description: | - A copy number map BED file, which has - either a sample-level ploidy: + A copy number map BED file, which has either a sample-level ploidy: sample_name copy_number or a region-specific format: seq_name start end sample_name copy_number diff --git a/modules/gatk4/applybqsr/functions.nf b/modules/gatk4/applybqsr/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/applybqsr/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index 508a29ca..3cc69ddf 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_APPLYBQSR { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index), path(bqsr_table) path fasta - path fastaidx + path fai path dict path intervals @@ -30,26 +19,28 @@ process GATK4_APPLYBQSR { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def interval = intervals ? "-L ${intervals}" : "" + def avail_mem = 3 if (!task.memory) { log.info '[GATK ApplyBQSR] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk ApplyBQSR \\ + gatk --java-options "-Xmx${avail_mem}g" ApplyBQSR \\ -R $fasta \\ -I $input \\ --bqsr-recal-file $bqsr_table \\ $interval \\ --tmp-dir . \\ -O ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index e7419860..4e3b2f9a 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -34,12 +34,15 @@ input: - fasta: type: file description: The reference fasta file - - fastaidx: + pattern: "*.fasta" + - fai: type: file description: Index of reference fasta file + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary + pattern: "*.dict" - intervalsBed: type: file description: Bed file with the genomic regions included in the library (optional) diff --git a/modules/gatk4/baserecalibrator/functions.nf b/modules/gatk4/baserecalibrator/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/baserecalibrator/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index 85c30daf..17b37943 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_BASERECALIBRATOR { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index) path fasta - path fastaidx + path fai path dict path intervalsBed path knownSites @@ -32,28 +21,29 @@ process GATK4_BASERECALIBRATOR { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def intervalsCommand = intervalsBed ? "-L ${intervalsBed}" : "" def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') - + def avail_mem = 3 if (!task.memory) { log.info '[GATK BaseRecalibrator] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk BaseRecalibrator \ + gatk --java-options "-Xmx${avail_mem}g" BaseRecalibrator \ -R $fasta \ -I $input \ $sitesCommand \ $intervalsCommand \ --tmp-dir . \ - $options.args \ + $args \ -O ${prefix}.table cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index 7fd273e1..188340b4 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -31,12 +31,15 @@ input: - fasta: type: file description: The reference fasta file - - fastaidx: + pattern: "*.fasta" + - fai: type: file description: Index of reference fasta file + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary + pattern: "*.dict" - intervalsBed: type: file description: Bed file with the genomic regions included in the library (optional) diff --git a/modules/gatk4/bedtointervallist/functions.nf b/modules/gatk4/bedtointervallist/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/bedtointervallist/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 064247cc..2f6266b9 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_BEDTOINTERVALLIST { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -27,17 +16,24 @@ process GATK4_BEDTOINTERVALLIST { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK BedToIntervalList] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk BedToIntervalList \\ + gatk --java-options "-Xmx${avail_mem}g" BedToIntervalList \\ -I $bed \\ -SD $sequence_dict \\ -O ${prefix}.interval_list \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/calculatecontamination/functions.nf b/modules/gatk4/calculatecontamination/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/calculatecontamination/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index bfe9b8fd..8840356a 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CALCULATECONTAMINATION { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(pileup), path(matched) @@ -28,20 +17,27 @@ process GATK4_CALCULATECONTAMINATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def matched_command = matched ? " -matched ${matched} " : '' def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK CalculateContamination] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk CalculateContamination \\ + gatk --java-options "-Xmx${avail_mem}g" CalculateContamination \\ -I $pileup \\ $matched_command \\ -O ${prefix}.contamination.table \\ $segment_command \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsequencedictionary/functions.nf b/modules/gatk4/createsequencedictionary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/createsequencedictionary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 12372bdf..e8f32106 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CREATESEQUENCEDICTIONARY { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: path fasta @@ -26,9 +15,10 @@ process GATK4_CREATESEQUENCEDICTIONARY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { - log.info '[GATK] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' + log.info '[GATK CreateSequenceDictionary] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } @@ -37,11 +27,11 @@ process GATK4_CREATESEQUENCEDICTIONARY { CreateSequenceDictionary \\ --REFERENCE $fasta \\ --URI $fasta \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsomaticpanelofnormals/functions.nf b/modules/gatk4/createsomaticpanelofnormals/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/createsomaticpanelofnormals/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index 66dfda23..ff345f75 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_CREATESOMATICPANELOFNORMALS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(genomicsdb) path fasta - path fastaidx + path fai path dict output: @@ -30,18 +19,25 @@ process GATK4_CREATESOMATICPANELOFNORMALS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK CreateSomaticPanelOfNormals] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ CreateSomaticPanelOfNormals \\ -R $fasta \\ -V gendb://$genomicsdb \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/createsomaticpanelofnormals/meta.yml b/modules/gatk4/createsomaticpanelofnormals/meta.yml index f0199ed6..e450c68a 100644 --- a/modules/gatk4/createsomaticpanelofnormals/meta.yml +++ b/modules/gatk4/createsomaticpanelofnormals/meta.yml @@ -28,10 +28,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/modules/gatk4/estimatelibrarycomplexity/functions.nf b/modules/gatk4/estimatelibrarycomplexity/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/estimatelibrarycomplexity/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index 4cea7086..c17dba09 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_ESTIMATELIBRARYCOMPLEXITY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.2.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/gatk4:4.2.2.0--hdfd78af_1" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(cram) @@ -29,7 +18,8 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ") def avail_mem = 3 @@ -39,16 +29,16 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { avail_mem = task.memory.giga } """ - gatk EstimateLibraryComplexity \ + gatk --java-options "-Xmx${avail_mem}g" EstimateLibraryComplexity \ ${crams} \ -O ${prefix}.metrics \ --REFERENCE_SEQUENCE ${fasta} \ --VALIDATION_STRINGENCY SILENT \ - --TMP_DIR . $options.args + --TMP_DIR . $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/fastqtosam/functions.nf b/modules/gatk4/fastqtosam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/fastqtosam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index ebd081ac..a55ba709 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_FASTQTOSAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -26,18 +15,25 @@ process GATK4_FASTQTOSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK FastqToSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk FastqToSam \\ + gatk --java-options "-Xmx${avail_mem}g" FastqToSam \\ $read_files \\ -O ${prefix}.bam \\ -SM $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/filtermutectcalls/functions.nf b/modules/gatk4/filtermutectcalls/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/filtermutectcalls/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 5a784677..6a1d9b3a 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_FILTERMUTECTCALLS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(tbi), path(stats), path(orientationbias), path(segmentation), path(contaminationfile), val(contaminationest) path fasta - path fastaidx + path fai path dict output: @@ -31,7 +20,8 @@ process GATK4_FILTERMUTECTCALLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def orientationbias_options = '' if (orientationbias) { @@ -47,19 +37,25 @@ process GATK4_FILTERMUTECTCALLS { if (contaminationfile) { contamination_options = '--contamination-table ' + contaminationfile.join(' --contamination-table ') } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK FilterMutectCalls] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk FilterMutectCalls \\ + gatk --java-options "-Xmx${avail_mem}g" FilterMutectCalls \\ -R $fasta \\ -V $vcf \\ $orientationbias_options \\ $segmentation_options \\ $contamination_options \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/filtermutectcalls/meta.yml b/modules/gatk4/filtermutectcalls/meta.yml index f14f9404..7d85e2b9 100644 --- a/modules/gatk4/filtermutectcalls/meta.yml +++ b/modules/gatk4/filtermutectcalls/meta.yml @@ -53,10 +53,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary diff --git a/modules/gatk4/gatherbqsrreports/main.nf b/modules/gatk4/gatherbqsrreports/main.nf new file mode 100644 index 00000000..1567f9aa --- /dev/null +++ b/modules/gatk4/gatherbqsrreports/main.nf @@ -0,0 +1,41 @@ +process GATK4_GATHERBQSRREPORTS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_1': + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_1' }" + + input: + tuple val(meta), path(recal_table) + + output: + tuple val(meta), path("*.table"), emit: table + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def input = recal_table.collect{"-I ${it}"}.join(' ') + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GatherBQSRReports] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" \\ + GatherBQSRReports \ + ${input} \ + --tmp-dir . \ + $args \ + --output ${prefix}.table + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/gatherbqsrreports/meta.yml b/modules/gatk4/gatherbqsrreports/meta.yml new file mode 100644 index 00000000..f71afd69 --- /dev/null +++ b/modules/gatk4/gatherbqsrreports/meta.yml @@ -0,0 +1,43 @@ +name: gatk4_gatherbqsrreports +description: write your description here +keywords: + - gatk4 + - gatk4_gatherbqsrreports + - base_recalibration +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - recal_table: + type: file + description: File(s) containing BQSR table(s) + pattern: "*.table" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - recal_table: + type: file + description: File containing joined BQSR table + pattern: "*.table" + +authors: + - "@FriederikeHanssen" diff --git a/modules/gatk4/genomicsdbimport/functions.nf b/modules/gatk4/genomicsdbimport/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/genomicsdbimport/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index aa4fceb0..2751173b 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GENOMICSDBIMPORT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcf), path(tbi), path(intervalfile), val(intervalval), path(wspace) @@ -25,18 +14,19 @@ process GATK4_GENOMICSDBIMPORT { val input_map output: - tuple val(meta), path("*_genomicsdb") , optional:true, emit: genomicsdb + tuple val(meta), path("${prefix}") , optional:true, emit: genomicsdb tuple val(meta), path("$updated_db") , optional:true, emit: updatedb tuple val(meta), path("*.interval_list"), optional:true, emit: intervallist path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" // settings for running default create gendb mode - def inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V')}" - def dir_command = "--genomicsdb-workspace-path ${prefix}" - def intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " + inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" + dir_command = "--genomicsdb-workspace-path ${prefix}" + intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " // settings changed for running get intervals list mode if run_intlist is true if (run_intlist) { @@ -52,16 +42,22 @@ process GATK4_GENOMICSDBIMPORT { updated_db = wspace.toString() } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GenomicsDBImport] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk GenomicsDBImport \\ + gatk --java-options "-Xmx${avail_mem}g" GenomicsDBImport \\ $inputs_command \\ $dir_command \\ $intervals_command \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/genomicsdbimport/meta.yml b/modules/gatk4/genomicsdbimport/meta.yml index f7a32e7e..af626cb1 100644 --- a/modules/gatk4/genomicsdbimport/meta.yml +++ b/modules/gatk4/genomicsdbimport/meta.yml @@ -66,7 +66,11 @@ output: - genomicsdb: type: directory description: Directory containing the files that compose the genomicsdb workspace, this is only output for create mode, as update changes an existing db - pattern: "*_genomicsdb" + pattern: "*/$prefix" + - updatedb: + type: directory + description: Directory containing the files that compose the updated genomicsdb workspace, this is only output for update mode, and should be the same path as the input wspace. + pattern: "same/path/as/wspace" - intervallist: type: file description: File containing the intervals used to generate the genomicsdb, only created by get intervals mode. diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf new file mode 100644 index 00000000..1a772860 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -0,0 +1,50 @@ +process GATK4_GENOTYPEGVCFS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0' : + 'quay.io/biocontainers/gatk4:4.2.0.0--0' }" + + input: + tuple val(meta), path(gvcf), path(gvcf_index) + path fasta + path fasta_index + path fasta_dict + path dbsnp + path dbsnp_index + path intervals_bed + + output: + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" + def interval_options = intervals_bed ? "-L ${intervals_bed}" : "" + def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GenotypeGVCFs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" \\ + GenotypeGVCFs \\ + $args \\ + $interval_options \\ + $dbsnp_options \\ + -R $fasta \\ + -V $gvcf_options \\ + -O ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/genotypegvcfs/meta.yml b/modules/gatk4/genotypegvcfs/meta.yml new file mode 100644 index 00000000..cd7457a7 --- /dev/null +++ b/modules/gatk4/genotypegvcfs/meta.yml @@ -0,0 +1,69 @@ +name: gatk4_genotypegvcfs +description: | + Perform joint genotyping on one or more samples pre-called with HaplotypeCaller. +keywords: + - joint genotyping + - genotype + - gvcf +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - gvcf: + type: tuple of files + description: | + Tuple of gVCF(.gz) file (first) and its index (second) or the path to a GenomicsDB (and empty) + pattern: ["*.{vcf,vcf.gz}", "*.{idx,tbi}"] + - fasta: + type: file + description: Reference fasta file + pattern: "*.fasta" + - fasta_index: + type: file + description: Reference fasta index file + pattern: "*.fai" + - fasta_dict: + type: file + description: Reference fasta sequence dict file + pattern: "*.dict" + - dbsnp: + type: file + description: dbSNP VCF file + pattern: "*.vcf.gz" + - dbsnp_index: + type: tuple of files + description: dbSNP VCF index file + pattern: "*.tbi" + - intervals_bed: + type: file + description: An intevals BED file + pattern: "*.bed" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Genotyped VCF file + pattern: "*.vcf.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@santiagorevale" diff --git a/modules/gatk4/getpileupsummaries/functions.nf b/modules/gatk4/getpileupsummaries/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/getpileupsummaries/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 09449f12..361974e8 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_GETPILEUPSUMMARIES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) path variants - path variants_idx + path variants_tbi path sites output: @@ -29,22 +18,29 @@ process GATK4_GETPILEUPSUMMARIES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def sitesCommand = '' sitesCommand = sites ? " -L ${sites} " : " -L ${variants} " + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK GetPileupSummaries] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk GetPileupSummaries \\ + gatk --java-options "-Xmx${avail_mem}g" GetPileupSummaries \\ -I $bam \\ -V $variants \\ $sitesCommand \\ -O ${prefix}.pileups.table \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml index 70158a8d..0add299b 100644 --- a/modules/gatk4/getpileupsummaries/meta.yml +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -35,7 +35,7 @@ input: type: file description: Population vcf of germline sequencing, containing allele fractions. Is also used as sites file if no separate sites file is specified. pattern: "*.vcf.gz" - - variants_idx: + - variants_tbi: type: file description: Index file for the germline resource. pattern: "*.vcf.gz.tbi" diff --git a/modules/gatk4/haplotypecaller/functions.nf b/modules/gatk4/haplotypecaller/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/haplotypecaller/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 4bddbb6d..e00f1e58 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_HAPLOTYPECALLER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(input), path(input_index) @@ -33,7 +22,8 @@ process GATK4_HAPLOTYPECALLER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def interval_option = interval ? "-L ${interval}" : "" def dbsnp_option = dbsnp ? "-D ${dbsnp}" : "" def avail_mem = 3 @@ -51,12 +41,12 @@ process GATK4_HAPLOTYPECALLER { ${dbsnp_option} \\ ${interval_option} \\ -O ${prefix}.vcf.gz \\ - $options.args \\ + $args \\ --tmp-dir . cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf new file mode 100644 index 00000000..cc6c663e --- /dev/null +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -0,0 +1,36 @@ +process GATK4_INDEXFEATUREFILE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0' : + 'quay.io/biocontainers/gatk4:4.2.0.0--0' }" + + input: + tuple val(meta), path(feature_file) + + output: + tuple val(meta), path("*.{tbi,idx}"), emit: index + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK IndexFeatureFile] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" \\ + IndexFeatureFile \\ + $args \\ + -I $feature_file + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/indexfeaturefile/meta.yml b/modules/gatk4/indexfeaturefile/meta.yml new file mode 100644 index 00000000..eebe6b85 --- /dev/null +++ b/modules/gatk4/indexfeaturefile/meta.yml @@ -0,0 +1,42 @@ +name: gatk4_indexfeaturefile +description: Creates an index for a feature file, e.g. VCF or BED file. +keywords: + - index + - feature +tools: + - gatk4: + description: Genome Analysis Toolkit (GATK4) + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + tool_dev_url: https://github.com/broadinstitute/gatk + doi: "10.1158/1538-7445.AM2017-3590" + licence: ['BSD-3-clause'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - feature_file: + type: file + description: VCF/BED file + pattern: "*.{vcf,vcf.gz,bed,bed.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - index: + type: file + description: Index for VCF/BED file + pattern: "*.{tbi,idx}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@santiagorevale" diff --git a/modules/gatk4/intervallisttools/functions.nf b/modules/gatk4/intervallisttools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/intervallisttools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 2f464919..b813d844 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_INTERVALLISTTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--hdfd78af_1" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(interval_list) @@ -26,16 +15,23 @@ process GATK4_INTERVALLISTTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK IntervalListTools] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ mkdir ${prefix}_split - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ IntervalListTools \\ -I ${interval_list} \\ -O ${prefix}_split \\ - $options.args + $args python3 < versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/learnreadorientationmodel/functions.nf b/modules/gatk4/learnreadorientationmodel/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/learnreadorientationmodel/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/learnreadorientationmodel/main.nf b/modules/gatk4/learnreadorientationmodel/main.nf index 0a499def..0c2f09d2 100644 --- a/modules/gatk4/learnreadorientationmodel/main.nf +++ b/modules/gatk4/learnreadorientationmodel/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_LEARNREADORIENTATIONMODEL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(f1r2) @@ -26,19 +15,26 @@ process GATK4_LEARNREADORIENTATIONMODEL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def inputs_list = [] f1r2.each() { a -> inputs_list.add(" -I " + a) } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK LearnReadOrientationModel] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk \\ + gatk --java-options "-Xmx${avail_mem}g" \\ LearnReadOrientationModel \\ ${inputs_list.join(' ')} \\ -O ${prefix}.tar.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/markduplicates/functions.nf b/modules/gatk4/markduplicates/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/markduplicates/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index b1ff5222..8bdb2c0a 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MARKDUPLICATES { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bams) @@ -28,26 +17,27 @@ process GATK4_MARKDUPLICATES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") - def avail_mem = 3 + def avail_mem = 3 if (!task.memory) { - log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + log.info '[GATK MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk MarkDuplicates \\ + gatk --java-options "-Xmx${avail_mem}g" MarkDuplicates \\ $bam_list \\ --METRICS_FILE ${prefix}.metrics \\ --TMP_DIR . \\ --CREATE_INDEX true \\ --OUTPUT ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergebamalignment/functions.nf b/modules/gatk4/mergebamalignment/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mergebamalignment/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 0c9fe5ee..a0f54976 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MERGEBAMALIGNMENT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(aligned) @@ -29,18 +18,25 @@ process GATK4_MERGEBAMALIGNMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK MergeBamAlignment] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk MergeBamAlignment \\ + gatk --java-options "-Xmx${avail_mem}g" MergeBamAlignment \\ ALIGNED=$aligned \\ UNMAPPED=$unmapped \\ R=$fasta \\ O=${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mergevcfs/functions.nf b/modules/gatk4/mergevcfs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mergevcfs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index ce9a52c3..1fcce485 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MERGEVCFS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(vcfs) @@ -28,7 +17,8 @@ process GATK4_MERGEVCFS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" // Make list of VCFs to merge def input = "" @@ -36,16 +26,22 @@ process GATK4_MERGEVCFS { input += " I=${vcf}" } def ref = use_ref_dict ? "D=${ref_dict}" : "" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK MergeVcfs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk MergeVcfs \\ + gatk --java-options "-Xmx${avail_mem}g" MergeVcfs \\ $input \\ O=${prefix}.vcf.gz \\ $ref \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mutect2/functions.nf b/modules/gatk4/mutect2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/mutect2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 7999eec3..414c7705 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_MUTECT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta) , path(input) , path(input_index) , val(which_norm) @@ -25,12 +14,12 @@ process GATK4_MUTECT2 { val run_mito val interval_label path fasta - path fastaidx + path fai path dict path germline_resource - path germline_resource_idx + path germline_resource_tbi path panel_of_normals - path panel_of_normals_idx + path panel_of_normals_tbi output: tuple val(meta), path("*.vcf.gz") , emit: vcf @@ -40,7 +29,8 @@ process GATK4_MUTECT2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def panels_command = '' def normals_command = '' @@ -63,18 +53,24 @@ process GATK4_MUTECT2 { normals_command = '-normal ' + which_norm.join( ' -normal ') } + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK Mutect2] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk Mutect2 \\ + gatk --java-options "-Xmx${avail_mem}g" Mutect2 \\ -R ${fasta} \\ ${inputs_command} \\ ${normals_command} \\ ${panels_command} \\ -O ${prefix}.vcf.gz \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 44601e41..83f6cb7c 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -54,10 +54,10 @@ input: type: file description: The reference fasta file pattern: "*.fasta" - - fastaidx: + - fai: type: file description: Index of reference fasta file - pattern: "fasta.fai" + pattern: "*.fasta.fai" - dict: type: file description: GATK sequence dictionary @@ -66,18 +66,18 @@ input: type: file description: Population vcf of germline sequencing, containing allele fractions. pattern: "*.vcf.gz" - - germline_resource_idx: + - germline_resource_tbi: type: file description: Index file for the germline resource. - pattern: "*.vcf.gz_tbi" + pattern: "*.vcf.gz.tbi" - panel_of_normals: type: file description: vcf file to be used as a panel of normals. pattern: "*.vcf.gz" - - panel_of_normals_idx: + - panel_of_normals_tbi: type: file description: Index for the panel of normals. - pattern: "*.vcf.gz_tbi" + pattern: "*.vcf.gz.tbi" output: - vcf: diff --git a/modules/gatk4/revertsam/functions.nf b/modules/gatk4/revertsam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/revertsam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index b3c9085a..0713d7ca 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_REVERTSAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,16 +15,23 @@ process GATK4_REVERTSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK RevertSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk RevertSam \\ + gatk --java-options "-Xmx${avail_mem}g" RevertSam \\ I=$bam \\ O=${prefix}.reverted.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/samtofastq/functions.nf b/modules/gatk4/samtofastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/samtofastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 324f3bae..0afb7ef3 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_SAMTOFASTQ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -26,17 +15,24 @@ process GATK4_SAMTOFASTQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK SamToFastq] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk SamToFastq \\ + gatk --java-options "-Xmx${avail_mem}g" SamToFastq \\ I=$bam \\ $output \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/splitncigarreads/functions.nf b/modules/gatk4/splitncigarreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/splitncigarreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index 793cc671..6daed954 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -1,43 +1,41 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_SPLITNCIGARREADS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: tuple val(meta), path(bam) - tuple path(fasta), path(fai), path(dict) + path fasta + path fai + path dict output: tuple val(meta), path('*.bam'), emit: bam path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK SplitNCigarReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } """ - gatk SplitNCigarReads \\ + gatk --java-options "-Xmx${avail_mem}g" SplitNCigarReads \\ -R $fasta \\ -I $bam \\ -O ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/splitncigarreads/meta.yml b/modules/gatk4/splitncigarreads/meta.yml index f287ede4..fd6edda0 100644 --- a/modules/gatk4/splitncigarreads/meta.yml +++ b/modules/gatk4/splitncigarreads/meta.yml @@ -24,10 +24,17 @@ input: description: BAM/SAM/CRAM file containing reads pattern: "*.{bam,sam,cram}" - fasta: - type: tuple of files - description: | - Tuple of fasta file (first), sequence dict (second) and fasta index (third) - pattern: ["*.fasta", "*.dict", "*.fai"] + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" output: - bam: type: file diff --git a/modules/gatk4/variantfiltration/functions.nf b/modules/gatk4/variantfiltration/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gatk4/variantfiltration/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 28084645..efe245cc 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -1,46 +1,42 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GATK4_VARIANTFILTRATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gatk4=4.2.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gatk4:4.2.0.0--0" - } else { - container "quay.io/biocontainers/gatk4:4.2.0.0--0" - } + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" input: - tuple val(meta), path(vcf) + tuple val(meta), path(vcf), path(vcf_tbi) path fasta path fai path dict output: - tuple val(meta), path("*.vcf"), emit: vcf - path "versions.yml" , emit: versions - + tuple val(meta), path("*.vcf.gz"), emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK VariantFiltration] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.toGiga() + } """ - gatk VariantFiltration \\ + gatk --java-options "-Xmx${avail_mem}G" VariantFiltration \\ -R $fasta \\ -V $vcf \\ - -O ${prefix}.vcf \\ - $options.args + -O ${prefix}.vcf.gz \\ + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gatk4/variantfiltration/meta.yml b/modules/gatk4/variantfiltration/meta.yml index 6d4983a6..71f0b8b2 100644 --- a/modules/gatk4/variantfiltration/meta.yml +++ b/modules/gatk4/variantfiltration/meta.yml @@ -21,8 +21,12 @@ input: e.g. [ id:'test'] - vcf: type: list - description: Input VCF file - pattern: "*.{vcf}" + description: List of VCF(.gz) files + pattern: "*.{vcf,vcf.gz}" + - vcf_tbi: + type: list + description: List of VCF file indexes + pattern: "*.{idx,tbi}" - fasta: type: file description: Fasta file of reference genome @@ -38,8 +42,12 @@ input: output: - vcf: type: file - description: filtered VCF file - pattern: "*.filtered.{vcf}" + description: Compressed VCF file + pattern: "*.vcf.gz" + - tbi: + type: file + description: Index of VCF file + pattern: "*.vcf.gz.tbi" - versions: type: file description: File containing software versions diff --git a/modules/genmap/index/functions.nf b/modules/genmap/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genmap/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genmap/index/main.nf b/modules/genmap/index/main.nf index c79596f0..943f1a31 100644 --- a/modules/genmap/index/main.nf +++ b/modules/genmap/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENMAP_INDEX { tag '$fasta' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::genmap=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1" - } else { - container "quay.io/biocontainers/genmap:1.3.0--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1' : + 'quay.io/biocontainers/genmap:1.3.0--h1b792b2_1' }" input: path fasta @@ -26,6 +15,7 @@ process GENMAP_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ genmap \\ index \\ @@ -33,8 +23,8 @@ process GENMAP_INDEX { -I genmap cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + "${task.process}": + genmap: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') END_VERSIONS """ } diff --git a/modules/genmap/mappability/functions.nf b/modules/genmap/mappability/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genmap/mappability/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genmap/mappability/main.nf b/modules/genmap/mappability/main.nf index 4d858cbb..94083f14 100644 --- a/modules/genmap/mappability/main.nf +++ b/modules/genmap/mappability/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENMAP_MAPPABILITY { tag '$fasta' label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::genmap=1.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1" - } else { - container "quay.io/biocontainers/genmap:1.3.0--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genmap:1.3.0--h1b792b2_1' : + 'quay.io/biocontainers/genmap:1.3.0--h1b792b2_1' }" input: path index @@ -28,16 +17,17 @@ process GENMAP_MAPPABILITY { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ genmap \\ map \\ - $options.args \\ + $args \\ -I $index \\ -O mappability cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') + "${task.process}": + genmap: \$(genmap --version 2>&1 | sed 's/GenMap version: //; s/SeqAn.*\$//') END_VERSIONS """ } diff --git a/modules/genrich/functions.nf b/modules/genrich/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/genrich/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/genrich/main.nf b/modules/genrich/main.nf index f34f9cd2..d9deea3c 100644 --- a/modules/genrich/main.nf +++ b/modules/genrich/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GENRICH { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::genrich=0.6.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/genrich:0.6.1--h5bf99c6_1" - } else { - container "quay.io/biocontainers/genrich:0.6.1--h5bf99c6_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/genrich:0.6.1--h5bf99c6_1' : + 'quay.io/biocontainers/genrich:0.6.1--h5bf99c6_1' }" input: tuple val(meta), path(treatment_bam) @@ -36,7 +25,8 @@ process GENRICH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def control = control_bam ? "-c $control_bam" : '' def blacklist = blacklist_bed ? "-E $blacklist_bed" : "" def pvalues = save_pvalues ? "-f ${prefix}.pvalues.bedGraph" : "" @@ -44,7 +34,7 @@ process GENRICH { def bed = save_bed ? "-b ${prefix}.intervals.bed" : "" def duplicates = "" if (save_duplicates) { - if (options.args.contains('-r')) { + if (args.contains('-r')) { duplicates = "-R ${prefix}.duplicates.txt" } else { log.info '[Genrich] Duplicates can only be saved if they are filtered, defaulting to -r option (Remove PCR duplicates).' @@ -54,7 +44,7 @@ process GENRICH { """ Genrich \\ -t $treatment_bam \\ - $options.args \\ + $args \\ $control \\ $blacklist \\ -o ${prefix}.narrowPeak \\ @@ -65,8 +55,8 @@ process GENRICH { $control cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(Genrich --version 2>&1) | sed 's/^Genrich, version //; s/ .*\$//') + "${task.process}": + genrich: \$(echo \$(Genrich --version 2>&1) | sed 's/^Genrich, version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gffread/functions.nf b/modules/gffread/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gffread/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gffread/main.nf b/modules/gffread/main.nf index 4133ee08..e7893f8b 100644 --- a/modules/gffread/main.nf +++ b/modules/gffread/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GFFREAD { tag "$gff" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gffread=0.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gffread:0.12.1--h8b12597_0" - } else { - container "quay.io/biocontainers/gffread:0.12.1--h8b12597_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gffread:0.12.1--h8b12597_0' : + 'quay.io/biocontainers/gffread:0.12.1--h8b12597_0' }" input: path gff @@ -26,15 +15,16 @@ process GFFREAD { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${gff.baseName}${options.suffix}" : "${gff.baseName}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${gff.baseName}" """ gffread \\ $gff \\ - $options.args \\ + $args \\ -o ${prefix}.gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(gffread --version 2>&1) + "${task.process}": + gffread: \$(gffread --version 2>&1) END_VERSIONS """ } diff --git a/modules/glnexus/functions.nf b/modules/glnexus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/glnexus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index 1384334f..b8afca22 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GLNEXUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::glnexus=1.4.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/glnexus:1.4.1--h40d77a6_0" - } else { - container "quay.io/biocontainers/glnexus:1.4.1--h40d77a6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/glnexus:1.4.1--h40d77a6_0' : + 'quay.io/biocontainers/glnexus:1.4.1--h40d77a6_0' }" input: tuple val(meta), path(gvcfs) @@ -26,7 +15,8 @@ process GLNEXUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" // Make list of GVCFs to merge def input = gvcfs.collect { it.toString() } @@ -40,13 +30,13 @@ process GLNEXUS { glnexus_cli \\ --threads $task.cpus \\ --mem-gbytes $avail_mem \\ - $options.args \\ + $args \\ ${input.join(' ')} \\ > ${prefix}.bcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') + "${task.process}": + glnexus: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/graphmap2/align/functions.nf b/modules/graphmap2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/graphmap2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/graphmap2/align/main.nf b/modules/graphmap2/align/main.nf index 831b0b3b..554e585b 100644 --- a/modules/graphmap2/align/main.nf +++ b/modules/graphmap2/align/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GRAPHMAP2_ALIGN { tag "$meta.id" label 'process_medium' tag "$meta.id" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::graphmap=0.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0" - } else { - container "quay.io/biocontainers/graphmap:0.6.3--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0' : + 'quay.io/biocontainers/graphmap:0.6.3--he513fc3_0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,8 @@ process GRAPHMAP2_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ graphmap2 \\ align \\ @@ -38,11 +28,11 @@ process GRAPHMAP2_ALIGN { -i $index \\ -d $reads \\ -o ${prefix}.sam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') + "${task.process}": + graphmap2: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/graphmap2/index/functions.nf b/modules/graphmap2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/graphmap2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/graphmap2/index/main.nf b/modules/graphmap2/index/main.nf index a8b03074..fffc7bcb 100644 --- a/modules/graphmap2/index/main.nf +++ b/modules/graphmap2/index/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GRAPHMAP2_INDEX { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:['']) } conda (params.enable_conda ? "bioconda::graphmap=0.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0" - } else { - container "quay.io/biocontainers/graphmap:0.6.3--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/graphmap:0.6.3--he513fc3_0' : + 'quay.io/biocontainers/graphmap:0.6.3--he513fc3_0' }" input: path fasta @@ -25,17 +14,18 @@ process GRAPHMAP2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ graphmap2 \\ align \\ -t $task.cpus \\ -I \\ - $options.args \\ + $args \\ -r $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') + "${task.process}": + graphmap2: \$(echo \$(graphmap2 align 2>&1) | sed 's/^.*Version: v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/gstama/collapse/functions.nf b/modules/gstama/collapse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gstama/collapse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gstama/collapse/main.nf b/modules/gstama/collapse/main.nf index d4167b5e..1c06692d 100644 --- a/modules/gstama/collapse/main.nf +++ b/modules/gstama/collapse/main.nf @@ -1,29 +1,18 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GSTAMA_COLLAPSE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" - } + conda (params.enable_conda ? "bioconda::gs-tama=1.0.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0' : + 'quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0' }" input: tuple val(meta), path(bam) path fasta output: - tuple val(meta), path("*.bed") , emit: bed + tuple val(meta), path("*_collapsed.bed") , emit: bed tuple val(meta), path("*_trans_read.bed") , emit: bed_trans_reads tuple val(meta), path("*_local_density_error.txt"), emit: local_density_error tuple val(meta), path("*_polya.txt") , emit: polya @@ -36,17 +25,18 @@ process GSTAMA_COLLAPSE { tuple val(meta), path("*_variants.txt") , emit: variants, optional: true script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ tama_collapse.py \\ -s $bam \\ -f $fasta \\ -p ${prefix} \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) + "${task.process}": + gstama: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) END_VERSIONS """ } diff --git a/modules/gstama/merge/functions.nf b/modules/gstama/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gstama/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gstama/merge/main.nf b/modules/gstama/merge/main.nf index 37d685f6..53ff93e4 100644 --- a/modules/gstama/merge/main.nf +++ b/modules/gstama/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GSTAMA_MERGE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gs-tama=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0" - } else { - container "quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gs-tama:1.0.2--hdfd78af_0' : + 'quay.io/biocontainers/gs-tama:1.0.2--hdfd78af_0' }" input: tuple val(meta), path(bed) @@ -30,17 +19,18 @@ process GSTAMA_MERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ tama_merge.py \\ -f $filelist \\ -d merge_dup \\ -p ${prefix} \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( tama_merge.py -version | head -n1 ) + "${task.process}": + gstama: \$( tama_merge.py -version | head -n1 ) END_VERSIONS """ } diff --git a/modules/gtdbtk/classifywf/functions.nf b/modules/gtdbtk/classifywf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gtdbtk/classifywf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gtdbtk/classifywf/main.nf b/modules/gtdbtk/classifywf/main.nf index fdcef76a..4a4b3a01 100644 --- a/modules/gtdbtk/classifywf/main.nf +++ b/modules/gtdbtk/classifywf/main.nf @@ -1,22 +1,12 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.5.0' // When using stubs for the GTDB database, the version info isn't printed. +def VERSION = '1.5.0' // Version information not provided by tool on CLI process GTDBTK_CLASSIFYWF { tag "${meta.assembler}-${meta.id}" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gtdbtk=1.5.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gtdbtk:1.5.0--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gtdbtk:1.5.0--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gtdbtk:1.5.0--pyhdfd78af_0' : + 'quay.io/biocontainers/gtdbtk:1.5.0--pyhdfd78af_0' }" input: tuple val(meta), path("bins/*") @@ -35,6 +25,7 @@ process GTDBTK_CLASSIFYWF { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def pplacer_scratch = params.gtdbtk_pplacer_scratch ? "--scratch_dir pplacer_tmp" : "" """ export GTDBTK_DATA_PATH="\${PWD}/database" @@ -43,7 +34,7 @@ process GTDBTK_CLASSIFYWF { fi gtdbtk classify_wf \\ - $options.args \\ + $args \\ --genome_dir bins \\ --prefix "gtdbtk.${meta.assembler}-${meta.id}" \\ --out_dir "\${PWD}" \\ @@ -58,8 +49,8 @@ process GTDBTK_CLASSIFYWF { mv gtdbtk.warnings.log "gtdbtk.${meta.assembler}-${meta.id}.warnings.log" cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gtdbtk --version -v 2>&1) | sed "s/gtdbtk: version //; s/ Copyright.*//") + "${task.process}": + gtdbtk: \$(echo \$(gtdbtk --version -v 2>&1) | sed "s/gtdbtk: version //; s/ Copyright.*//") END_VERSIONS """ @@ -76,8 +67,8 @@ process GTDBTK_CLASSIFYWF { touch gtdbtk.${meta.assembler}-${meta.id}.failed_genomes.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + gtdbtk: $VERSION END_VERSIONS """ } diff --git a/modules/gubbins/functions.nf b/modules/gubbins/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gubbins/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gubbins/main.nf b/modules/gubbins/main.nf index da194906..b4c6dc23 100644 --- a/modules/gubbins/main.nf +++ b/modules/gubbins/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUBBINS { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::gubbins=3.0.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gubbins:3.0.0--py39h5bf99c6_0" - } else { - container "quay.io/biocontainers/gubbins:3.0.0--py39h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gubbins:3.0.0--py39h5bf99c6_0' : + 'quay.io/biocontainers/gubbins:3.0.0--py39h5bf99c6_0' }" input: path alignment @@ -33,14 +22,15 @@ process GUBBINS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ run_gubbins.py \\ --threads $task.cpus \\ - $options.args \\ + $args \\ $alignment cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(run_gubbins.py --version 2>&1) + "${task.process}": + gubbins: \$(run_gubbins.py --version 2>&1) END_VERSIONS """ } diff --git a/modules/gunc/downloaddb/functions.nf b/modules/gunc/downloaddb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunc/downloaddb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunc/downloaddb/main.nf b/modules/gunc/downloaddb/main.nf index af421608..430b862b 100644 --- a/modules/gunc/downloaddb/main.nf +++ b/modules/gunc/downloaddb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNC_DOWNLOADDB { tag '$db_name' label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0' : + 'quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0' }" input: val db_name @@ -26,12 +15,13 @@ process GUNC_DOWNLOADDB { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - gunc download_db . -db $db_name $options.args + gunc download_db . -db $db_name $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( gunc --version ) + "${task.process}": + gunc: \$( gunc --version ) END_VERSIONS """ } diff --git a/modules/gunc/run/functions.nf b/modules/gunc/run/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunc/run/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunc/run/main.nf b/modules/gunc/run/main.nf index f873a7df..8508c9f0 100644 --- a/modules/gunc/run/main.nf +++ b/modules/gunc/run/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNC_RUN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::gunc=1.0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gunc:1.0.5--pyhdfd78af_0' : + 'quay.io/biocontainers/gunc:1.0.5--pyhdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -28,18 +17,19 @@ process GUNC_RUN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ gunc \\ run \\ --input_fasta $fasta \\ --db_file $db \\ --threads $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( gunc --version ) + "${task.process}": + gunc: \$( gunc --version ) END_VERSIONS """ } diff --git a/modules/gunzip/functions.nf b/modules/gunzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/gunzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index aec4569f..77a4e546 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -1,41 +1,31 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process GUNZIP { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: - path archive + tuple val(meta), path(archive) output: - path "$gunzip", emit: gunzip - path "versions.yml" , emit: versions + tuple val(meta), path("$gunzip"), emit: gunzip + path "versions.yml" , emit: versions script: - gunzip = archive.toString() - '.gz' + def args = task.ext.args ?: '' + gunzip = archive.toString() - '.gz' """ gunzip \\ -f \\ - $options.args \\ + $args \\ $archive cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') + "${task.process}": + gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/gunzip/meta.yml b/modules/gunzip/meta.yml index 3482f0d2..ea1f1546 100644 --- a/modules/gunzip/meta.yml +++ b/modules/gunzip/meta.yml @@ -10,6 +10,11 @@ tools: documentation: https://www.gnu.org/software/gzip/manual/gzip.html licence: ['GPL-3.0-or-later'] input: + - meta: + type: map + description: | + Optional groovy Map containing meta information + e.g. [ id:'test', single_end:false ] - archive: type: file description: File to be compressed/uncompressed @@ -26,3 +31,4 @@ output: authors: - "@joseespinosa" - "@drpatelh" + - "@jfy133" diff --git a/modules/hicap/functions.nf b/modules/hicap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hicap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hicap/main.nf b/modules/hicap/main.nf index e2e70678..a96343f6 100644 --- a/modules/hicap/main.nf +++ b/modules/hicap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HICAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hicap=1.0.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hicap:1.0.3--py_0" - } else { - container "quay.io/biocontainers/hicap:1.0.3--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hicap:1.0.3--py_0' : + 'quay.io/biocontainers/hicap:1.0.3--py_0' }" input: tuple val(meta), path(fasta) @@ -24,13 +13,14 @@ process HICAP { path model_fp output: - tuple val(meta), path("*.gbk"), emit: gbk - tuple val(meta), path("*.svg"), emit: svg - tuple val(meta), path("*.tsv"), emit: tsv + tuple val(meta), path("*.gbk"), emit: gbk, optional: true + tuple val(meta), path("*.svg"), emit: svg, optional: true + tuple val(meta), path("*.tsv"), emit: tsv, optional: true path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def database_args = database_dir ? "--database_dir ${database_dir}" : "" def model_args = model_fp ? "--model_fp ${model_fp}" : "" def is_compressed = fasta.getName().endsWith(".gz") ? true : false @@ -39,18 +29,17 @@ process HICAP { if [ "$is_compressed" == "true" ]; then gzip -c -d $fasta > $fasta_name fi - hicap \\ --query_fp $fasta_name \\ $database_args \\ $model_args \\ - $options.args \\ + $args \\ --threads $task.cpus \\ -o ./ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( hicap --version 2>&1 ) | sed 's/^.*hicap //' ) + "${task.process}": + hicap: \$( echo \$( hicap --version 2>&1 ) | sed 's/^.*hicap //' ) END_VERSIONS """ } diff --git a/modules/hifiasm/functions.nf b/modules/hifiasm/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hifiasm/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hifiasm/main.nf b/modules/hifiasm/main.nf index 9dfc9618..208554d6 100644 --- a/modules/hifiasm/main.nf +++ b/modules/hifiasm/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HIFIASM { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hifiasm=0.15.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hifiasm:0.15.4--h2e03b76_0" - } else { - container "quay.io/biocontainers/hifiasm:0.15.4--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hifiasm:0.15.4--h2e03b76_0' : + 'quay.io/biocontainers/hifiasm:0.15.4--h2e03b76_0' }" input: tuple val(meta), path(reads) @@ -37,11 +26,12 @@ process HIFIASM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (use_parental_kmers) { """ hifiasm \\ - $options.args \\ + $args \\ -o ${prefix}.asm \\ -t $task.cpus \\ -1 $paternal_kmer_dump \\ @@ -49,21 +39,21 @@ process HIFIASM { $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + "${task.process}": + hifiasm: \$(hifiasm --version 2>&1) END_VERSIONS """ } else { // Phasing with Hi-C data is not supported yet """ hifiasm \\ - $options.args \\ + $args \\ -o ${prefix}.asm \\ -t $task.cpus \\ $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hifiasm --version 2>&1) + "${task.process}": + hifiasm: \$(hifiasm --version 2>&1) END_VERSIONS """ } diff --git a/modules/hisat2/align/functions.nf b/modules/hisat2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 9b73216b..ae888616 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hisat2=2.2.0 bioconda::samtools=1.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0" - } else { - container "quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' : + 'quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' }" input: tuple val(meta), path(reads) @@ -26,14 +15,14 @@ process HISAT2_ALIGN { path splicesites output: - tuple val(meta), path("*.bam"), emit: bam - tuple val(meta), path("*.log"), emit: summary - path "versions.yml" , emit: versions - + tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*.log") , emit: summary tuple val(meta), path("*fastq.gz"), optional:true, emit: fastq + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -55,12 +44,12 @@ process HISAT2_ALIGN { --threads $task.cpus \\ $seq_center \\ $unaligned \\ - $options.args \\ + $args \\ | samtools view -bS -F 4 -F 256 - > ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ @@ -80,7 +69,7 @@ process HISAT2_ALIGN { $unaligned \\ --no-mixed \\ --no-discordant \\ - $options.args \\ + $args \\ | samtools view -bS -F 4 -F 8 -F 256 - > ${prefix}.bam if [ -f ${prefix}.unmapped.fastq.1.gz ]; then @@ -91,8 +80,8 @@ process HISAT2_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/modules/hisat2/build/functions.nf b/modules/hisat2/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/build/main.nf b/modules/hisat2/build/main.nf index 015f6f59..4e8cd02b 100644 --- a/modules/hisat2/build/main.nf +++ b/modules/hisat2/build/main.nf @@ -1,25 +1,14 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_BUILD { tag "$fasta" label 'process_high' label 'process_high_memory' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" - } else { - container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : + 'quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3' }" input: path fasta @@ -31,6 +20,7 @@ process HISAT2_BUILD { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 0 if (!task.memory) { log.info "[HISAT2 index build] Available memory not known - defaulting to 0. Specify process memory requirements to change this." @@ -52,7 +42,6 @@ process HISAT2_BUILD { log.info "[HISAT2 index build] Less than ${hisat2_build_memory} GB available, so NOT using splice sites and exons to build HISAT2 index." log.info "[HISAT2 index build] Use --hisat2_build_memory [small number] to skip this check." } - """ mkdir hisat2 $extract_exons @@ -60,13 +49,13 @@ process HISAT2_BUILD { -p $task.cpus \\ $ss \\ $exon \\ - $options.args \\ + $args \\ $fasta \\ hisat2/${fasta.baseName} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION END_VERSIONS """ } diff --git a/modules/hisat2/extractsplicesites/functions.nf b/modules/hisat2/extractsplicesites/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hisat2/extractsplicesites/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hisat2/extractsplicesites/main.nf b/modules/hisat2/extractsplicesites/main.nf index 1c8b7830..302c35f1 100644 --- a/modules/hisat2/extractsplicesites/main.nf +++ b/modules/hisat2/extractsplicesites/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.2.0' +def VERSION = '2.2.0' // Version information not provided by tool on CLI process HISAT2_EXTRACTSPLICESITES { tag "$gtf" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::hisat2=2.2.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3" - } else { - container "quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : + 'quay.io/biocontainers/hisat2:2.2.1--h1b792b2_3' }" input: path gtf @@ -28,11 +17,12 @@ process HISAT2_EXTRACTSPLICESITES { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ hisat2_extract_splice_sites.py $gtf > ${gtf.baseName}.splice_sites.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hisat2: $VERSION END_VERSIONS """ } diff --git a/modules/hmmcopy/gccounter/functions.nf b/modules/hmmcopy/gccounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmcopy/gccounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmcopy/gccounter/main.nf b/modules/hmmcopy/gccounter/main.nf index 6e7bc11f..36666095 100644 --- a/modules/hmmcopy/gccounter/main.nf +++ b/modules/hmmcopy/gccounter/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1.1' +def VERSION = '0.1.1' // Version information not provided by tool on CLI process HMMCOPY_GCCOUNTER { label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" - } else { - container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" input: path fasta @@ -27,14 +16,15 @@ process HMMCOPY_GCCOUNTER { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ gcCounter \\ - $options.args \\ + $args \\ ${fasta} > ${fasta.baseName}.gc.wig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hmmcopy: $VERSION END_VERSIONS """ } diff --git a/modules/hmmcopy/readcounter/functions.nf b/modules/hmmcopy/readcounter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmcopy/readcounter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmcopy/readcounter/main.nf b/modules/hmmcopy/readcounter/main.nf index 9e3e72a7..6399b1a2 100644 --- a/modules/hmmcopy/readcounter/main.nf +++ b/modules/hmmcopy/readcounter/main.nf @@ -1,42 +1,32 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.1.1' +def VERSION = '0.1.1' // Version information not provided by tool on CLI process HMMCOPY_READCOUNTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hmmcopy=0.1.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5" - } else { - container "quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmcopy:0.1.1--h2e03b76_5' : + 'quay.io/biocontainers/hmmcopy:0.1.1--h2e03b76_5' }" input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(bam), path(bai) output: - tuple val(meta), path("*.wig"), emit: wig - path "versions.yml" , emit: versions + tuple val(meta), path("*.wig"), emit: wig + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ readCounter \\ - $options.args \\ + $args \\ ${bam} > ${prefix}.wig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + hmmcopy: $VERSION END_VERSIONS """ } diff --git a/modules/hmmer/hmmalign/functions.nf b/modules/hmmer/hmmalign/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/hmmer/hmmalign/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/hmmer/hmmalign/main.nf b/modules/hmmer/hmmalign/main.nf index b4292feb..e6d04044 100644 --- a/modules/hmmer/hmmalign/main.nf +++ b/modules/hmmer/hmmalign/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process HMMER_HMMALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::hmmer=3.3.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1" - } else { - container "quay.io/biocontainers/hmmer:3.3.2--h1b792b2_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hmmer:3.3.2--h1b792b2_1' : + 'quay.io/biocontainers/hmmer:3.3.2--h1b792b2_1' }" input: tuple val(meta), path(fasta) @@ -27,18 +16,19 @@ process HMMER_HMMALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def fastacmd = fasta.getExtension() == 'gz' ? "gunzip -c $fasta" : "cat $fasta" """ $fastacmd | \\ hmmalign \\ - $options.args \\ + $args \\ $hmm \\ - | gzip -c > ${meta.id}.sthlm.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(hmmalign -h | grep -o '^# HMMER [0-9.]*' | sed 's/^# HMMER *//') + "${task.process}": + hmmer: \$(hmmalign -h | grep -o '^# HMMER [0-9.]*' | sed 's/^# HMMER *//') END_VERSIONS """ } diff --git a/modules/homer/annotatepeaks/functions.nf b/modules/homer/annotatepeaks/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/annotatepeaks/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/annotatepeaks/main.nf b/modules/homer/annotatepeaks/main.nf index 1714644b..84e0241a 100644 --- a/modules/homer/annotatepeaks/main.nf +++ b/modules/homer/annotatepeaks/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_ANNOTATEPEAKS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(peak) @@ -30,19 +19,20 @@ process HOMER_ANNOTATEPEAKS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ annotatePeaks.pl \\ $peak \\ $fasta \\ - $options.args \\ + $args \\ -gtf $gtf \\ -cpu $task.cpus \\ > ${prefix}.annotatePeaks.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/findpeaks/functions.nf b/modules/homer/findpeaks/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/findpeaks/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index 2e0b6db9..66de06b6 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_FINDPEAKS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(tagDir) @@ -28,17 +17,18 @@ process HOMER_FINDPEAKS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ findPeaks \\ $tagDir \\ - $options.args \\ + $args \\ -o ${prefix}.peaks.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/maketagdirectory/functions.nf b/modules/homer/maketagdirectory/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/maketagdirectory/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index 4f531e82..72e2091f 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_MAKETAGDIRECTORY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(bed) @@ -26,20 +15,21 @@ process HOMER_MAKETAGDIRECTORY { output: tuple val(meta), path("tag_dir"), emit: tagdir - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ makeTagDirectory \\ tag_dir \\ - $options.args \\ + $args \\ $bed \\ -genome $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/homer/makeucscfile/functions.nf b/modules/homer/makeucscfile/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/homer/makeucscfile/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index c56da24b..17e86947 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -def options = initOptions(params.options) - -def VERSION = '4.11' +def VERSION = '4.11' // Version information not provided by tool on CLI process HOMER_MAKEUCSCFILE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3" - } else { - container "quay.io/biocontainers/homer:4.11--pl526hc9558a2_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" input: tuple val(meta), path(tagDir) @@ -28,16 +17,17 @@ process HOMER_MAKEUCSCFILE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ makeUCSCfile \\ $tagDir \\ - -o auto - $options.args + -o auto \\ + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + homer: $VERSION END_VERSIONS """ } diff --git a/modules/idr/functions.nf b/modules/idr/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/idr/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/idr/main.nf b/modules/idr/main.nf index 006826ac..44b07be4 100644 --- a/modules/idr/main.nf +++ b/modules/idr/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IDR { tag "$prefix" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::idr=2.0.4.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/idr:2.0.4.2--py39hcbe4a3b_5" - } else { - container "quay.io/biocontainers/idr:2.0.4.2--py38h9af456f_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/idr:2.0.4.2--py39hcbe4a3b_5' : + 'quay.io/biocontainers/idr:2.0.4.2--py38h9af456f_5' }" input: path peaks @@ -30,6 +19,7 @@ process IDR { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' if (peaks.toList().size < 2) { log.error "[ERROR] idr needs at least two replicates only one provided." } @@ -46,11 +36,11 @@ process IDR { --output-file $idr_vals \\ --log-output-file $log_file \\ --plot \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(idr --version 2>&1) | sed 's/^.*IDR //; s/ .*\$//') + "${task.process}": + idr: \$(echo \$(idr --version 2>&1) | sed 's/^.*IDR //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/imputeme/vcftoprs/main.nf b/modules/imputeme/vcftoprs/main.nf new file mode 100644 index 00000000..5fee90c2 --- /dev/null +++ b/modules/imputeme/vcftoprs/main.nf @@ -0,0 +1,49 @@ +process IMPUTEME_VCFTOPRS { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "YOUR-TOOL-HERE" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/imputeme/vv1.0.7_cv1/imputeme_vv1.0.7_cv1.img' : + 'biocontainers/imputeme:vv1.0.7_cv1' }" + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.json"), emit: json + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + #!/usr/bin/env Rscript + + #Set configuration - either from args or from defaults + source("/imputeme/code/impute-me/functions.R") + if(file.exists('$args')){ + set_conf("set_from_file",'$args') + }else{ + set_conf("set_from_file", "/imputeme/code/impute-me/template/nextflow_default_configuration.R") + } + + #main run + return_message <- prepare_individual_genome('$vcf',overrule_vcf_checks=T) + uniqueID <- sub(' .+\$','',sub('^.+this run is ','',return_message)) + convert_vcfs_to_simple_format(uniqueID=uniqueID) + crawl_for_snps_to_analyze(uniqueIDs=uniqueID) + run_export_script(uniqueIDs=uniqueID) + file.copy(paste0("./",uniqueID,"/",uniqueID,"_data.json"),"output.json") + + #version export. Have to hardcode process name and software name because + #won't run inside an R-block + version_file_path="versions.yml" + f <- file(version_file_path,"w") + writeLines("IMPUTEME_VCFTOPRS:", f) + writeLines(paste0(" imputeme: ", sub("^v","",get_conf("version"))),f) + close(f) + + """ + +} diff --git a/modules/imputeme/vcftoprs/meta.yml b/modules/imputeme/vcftoprs/meta.yml new file mode 100644 index 00000000..8ba5dfe1 --- /dev/null +++ b/modules/imputeme/vcftoprs/meta.yml @@ -0,0 +1,41 @@ +name: imputeme_vcftoprs +description: inputs a VCF-file with whole genome DNA sequencing. Outputs a JSON with polygenic risk scores. +keywords: + - PRS, VCF +tools: + - imputeme: + description: + homepage: www.impute.me + documentation: https://hub.docker.com/repository/docker/lassefolkersen/impute-me + tool_dev_url: https://github.com/lassefolkersen/impute-me + doi: "https://doi.org/10.3389/fgene.2020.00578" + licence: LGPL3 + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ vcf:'test', single_end:false ] + - vcf: + type: file + description: vcf file + pattern: "*.{vcf}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: json containing Z-scores for all calculated PRS + pattern: "*.{json}" + +authors: + - "@lassefolkersen" diff --git a/modules/iqtree/functions.nf b/modules/iqtree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/iqtree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/iqtree/main.nf b/modules/iqtree/main.nf index bec879df..54a6486d 100644 --- a/modules/iqtree/main.nf +++ b/modules/iqtree/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IQTREE { tag "$alignment" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::iqtree=2.1.4_beta' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/iqtree:2.1.4_beta--hdcc8f71_0" - } else { - container "quay.io/biocontainers/iqtree:2.1.4_beta--hdcc8f71_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/iqtree:2.1.4_beta--hdcc8f71_0' : + 'quay.io/biocontainers/iqtree:2.1.4_beta--hdcc8f71_0' }" input: path alignment @@ -27,20 +16,21 @@ process IQTREE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def fconst_args = constant_sites ? "-fconst $constant_sites" : '' def memory = task.memory.toString().replaceAll(' ', '') """ iqtree \\ $fconst_args \\ - $options.args \\ + $args \\ -s $alignment \\ -nt AUTO \\ -ntmax $task.cpus \\ -mem $memory \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version //;s/ .*//') + "${task.process}": + iqtree: \$(echo \$(iqtree -version 2>&1) | sed 's/^IQ-TREE multicore version //;s/ .*//') END_VERSIONS """ } diff --git a/modules/ismapper/functions.nf b/modules/ismapper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ismapper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ismapper/main.nf b/modules/ismapper/main.nf index 20d3d5b7..a51cc01e 100644 --- a/modules/ismapper/main.nf +++ b/modules/ismapper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISMAPPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ismapper=2.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ismapper:2.0.2--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/ismapper:2.0.2--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ismapper:2.0.2--pyhdfd78af_1' : + 'quay.io/biocontainers/ismapper:2.0.2--pyhdfd78af_1' }" input: tuple val(meta), path(reads), path(reference), path(query) @@ -26,10 +15,11 @@ process ISMAPPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ ismap \\ - $options.args \\ + $args \\ --t $task.cpus \\ --output_dir results \\ --queries $query \\ @@ -37,8 +27,8 @@ process ISMAPPER { --reads $reads cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( ismap --version 2>&1 ) | sed 's/^.*ismap //' ) + "${task.process}": + ismapper: \$( echo \$( ismap --version 2>&1 ) | sed 's/^.*ismap //' ) END_VERSIONS """ } diff --git a/modules/isoseq3/cluster/functions.nf b/modules/isoseq3/cluster/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/isoseq3/cluster/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/isoseq3/cluster/main.nf b/modules/isoseq3/cluster/main.nf index df005706..fdd47971 100644 --- a/modules/isoseq3/cluster/main.nf +++ b/modules/isoseq3/cluster/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISOSEQ3_CLUSTER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" - } else { - container "quay.io/biocontainers/isoseq3:3.4.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0' : + 'quay.io/biocontainers/isoseq3:3.4.0--0' }" input: tuple val(meta), path(bam) @@ -27,28 +16,27 @@ process ISOSEQ3_CLUSTER { tuple val(meta), path("*.transcripts.cluster") , emit: cluster tuple val(meta), path("*.transcripts.cluster_report.csv"), emit: cluster_report tuple val(meta), path("*.transcripts.transcriptset.xml") , emit: transcriptset - path "versions.yml" , emit: versions - tuple val(meta), path("*.transcripts.hq.bam") , optional: true, emit: hq_bam tuple val(meta), path("*.transcripts.hq.bam.pbi") , optional: true, emit: hq_pbi tuple val(meta), path("*.transcripts.lq.bam") , optional: true, emit: lq_bam tuple val(meta), path("*.transcripts.lq.bam.pbi") , optional: true, emit: lq_pbi tuple val(meta), path("*.transcripts.singletons.bam") , optional: true, emit: singletons_bam tuple val(meta), path("*.transcripts.singletons.bam.pbi"), optional: true, emit: singletons_pbi - + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ isoseq3 \\ cluster \\ $bam \\ ${prefix}.transcripts.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - isoseq3 cluster: \$( isoseq3 cluster --version|sed 's/isoseq cluster //g'|sed 's/ (.*//g' ) + "${task.process}": + isoseq3: \$( isoseq3 cluster --version|sed 's/isoseq cluster //g'|sed 's/ (.*//g' ) END_VERSIONS """ } diff --git a/modules/isoseq3/refine/functions.nf b/modules/isoseq3/refine/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/isoseq3/refine/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/isoseq3/refine/main.nf b/modules/isoseq3/refine/main.nf index 5a45eb2d..5044cba2 100644 --- a/modules/isoseq3/refine/main.nf +++ b/modules/isoseq3/refine/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ISOSEQ3_REFINE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::isoseq3=3.4.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0" - } else { - container "quay.io/biocontainers/isoseq3:3.4.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/isoseq3:3.4.0--0' : + 'quay.io/biocontainers/isoseq3:3.4.0--0' }" input: tuple val(meta), path(bam) @@ -31,19 +20,20 @@ process ISOSEQ3_REFINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ isoseq3 \\ refine \\ -j $task.cpus \\ - $options.args \\ + $args \\ $bam \\ $primers \\ ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( isoseq3 refine --version|sed 's/isoseq refine //'|sed 's/ (commit.\\+//' ) + "${task.process}": + isoseq3: \$( isoseq3 refine --version|sed 's/isoseq refine //'|sed 's/ (commit.\\+//' ) END_VERSIONS """ } diff --git a/modules/ivar/consensus/functions.nf b/modules/ivar/consensus/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/consensus/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/consensus/main.nf b/modules/ivar/consensus/main.nf index 33fa11f7..58d97c8c 100644 --- a/modules/ivar/consensus/main.nf +++ b/modules/ivar/consensus/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_CONSENSUS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam) @@ -29,21 +18,23 @@ process IVAR_CONSENSUS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" """ samtools mpileup \\ --reference $fasta \\ - $options.args2 \\ + $args2 \\ $bam | \\ $save_mpileup \\ ivar consensus \\ - $options.args \\ + $args \\ -p $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/trim/functions.nf b/modules/ivar/trim/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/trim/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/trim/main.nf b/modules/ivar/trim/main.nf index 6cf8171c..4d0c70a2 100644 --- a/modules/ivar/trim/main.nf +++ b/modules/ivar/trim/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_TRIM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,18 +17,19 @@ process IVAR_TRIM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ ivar trim \\ - $options.args \\ + $args \\ -i $bam \\ -b $bed \\ -p $prefix \\ > ${prefix}.ivar.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ivar/variants/functions.nf b/modules/ivar/variants/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ivar/variants/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ivar/variants/main.nf b/modules/ivar/variants/main.nf index d079a8e9..ce4abd4d 100644 --- a/modules/ivar/variants/main.nf +++ b/modules/ivar/variants/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process IVAR_VARIANTS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ivar=1.3.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0" - } else { - container "quay.io/biocontainers/ivar:1.3.1--h089eab3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ivar:1.3.1--h089eab3_0' : + 'quay.io/biocontainers/ivar:1.3.1--h089eab3_0' }" input: tuple val(meta), path(bam) @@ -29,24 +18,26 @@ process IVAR_VARIANTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def save_mpileup = params.save_mpileup ? "tee ${prefix}.mpileup |" : "" def features = params.gff ? "-g $gff" : "" """ samtools mpileup \\ - $options.args2 \\ + $args2 \\ --reference $fasta \\ $bam | \\ $save_mpileup \\ ivar variants \\ - $options.args \\ + $args \\ $features \\ -r $fasta \\ -p $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') + "${task.process}": + ivar: \$(echo \$(ivar version 2>&1) | sed 's/^.*iVar version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/jupyternotebook/functions.nf b/modules/jupyternotebook/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/jupyternotebook/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/jupyternotebook/main.nf b/modules/jupyternotebook/main.nf index 2d8ad92f..e4bdf98b 100644 --- a/modules/jupyternotebook/main.nf +++ b/modules/jupyternotebook/main.nf @@ -1,29 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' include { dump_params_yml; indent_code_block } from "./parametrize" -params.options = [:] -options = initOptions(params.options) -params.parametrize = true -params.implicit_params = true -params.meta_params = true - process JUPYTERNOTEBOOK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } //NB: You likely want to override this with a container containing all required //dependencies for your analysis. The container at least needs to contain the //ipykernel, jupytext, papermill and nbconvert Python packages. conda (params.enable_conda ? "ipykernel=6.0.3 jupytext=1.11.4 nbconvert=6.1.0 papermill=2.3.3 matplotlib=3.4.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963%3A879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" - } else { - container "quay.io/biocontainers/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963:879972fc8bdc81ee92f2bce3b4805d89a772bf84-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963%3A879972fc8bdc81ee92f2bce3b4805d89a772bf84-0' : + 'quay.io/biocontainers/mulled-v2-514b1a5d280c7043110b2a8d0a87b57ba392a963:879972fc8bdc81ee92f2bce3b4805d89a772bf84-0' }" input: tuple val(meta), path(notebook) @@ -36,7 +23,11 @@ process JUPYTERNOTEBOOK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize + def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params + def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params // Dump parameters to yaml file. // Using a yaml file over using the CLI params because @@ -44,14 +35,14 @@ process JUPYTERNOTEBOOK { // * allows to pass nested maps instead of just single values def params_cmd = "" def render_cmd = "" - if (params.parametrize) { + if (parametrize) { nb_params = [:] - if (params.implicit_params) { + if (implicit_params) { nb_params["cpus"] = task.cpus nb_params["artifact_dir"] = "artifacts" nb_params["input_dir"] = "./" } - if (params.meta_params) { + if (meta_params) { nb_params["meta"] = meta } nb_params += parameters @@ -71,10 +62,10 @@ process JUPYTERNOTEBOOK { mkdir artifacts # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources - export MKL_NUM_THREADS="${task.cpus}" - export OPENBLAS_NUM_THREADS="${task.cpus}" - export OMP_NUM_THREADS="${task.cpus}" - export NUMBA_NUM_THREADS="${task.cpus}" + export MKL_NUM_THREADS="$task.cpus" + export OPENBLAS_NUM_THREADS="$task.cpus" + export OMP_NUM_THREADS="$task.cpus" + export NUMBA_NUM_THREADS="$task.cpus" # Convert notebook to ipynb using jupytext, execute using papermill, convert using nbconvert jupytext --to notebook --output - --set-kernel - ${notebook} \\ @@ -82,7 +73,7 @@ process JUPYTERNOTEBOOK { | jupyter nbconvert --stdin --to html --output ${prefix}.html cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": jupytext: \$(jupytext --version) ipykernel: \$(python -c "import ipykernel; print(ipykernel.__version__)") nbconvert: \$(jupyter nbconvert --version) diff --git a/modules/kallisto/index/functions.nf b/modules/kallisto/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallisto/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallisto/index/main.nf b/modules/kallisto/index/main.nf index 96457b6d..4dc9c6d0 100644 --- a/modules/kallisto/index/main.nf +++ b/modules/kallisto/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTO_INDEX { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::kallisto=0.46.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kallisto:0.46.2--h4f7b962_1" - } else { - container "quay.io/biocontainers/kallisto:0.46.2--h4f7b962_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kallisto:0.46.2--h4f7b962_1' : + 'quay.io/biocontainers/kallisto:0.46.2--h4f7b962_1' }" input: path fasta @@ -26,16 +15,17 @@ process KALLISTO_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ kallisto \\ index \\ - $options.args \\ + $args \\ -i kallisto \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') + "${task.process}": + kallisto: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/count/functions.nf b/modules/kallistobustools/count/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallistobustools/count/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallistobustools/count/main.nf b/modules/kallistobustools/count/main.nf index 8c705e51..d67eba31 100644 --- a/modules/kallistobustools/count/main.nf +++ b/modules/kallistobustools/count/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTOBUSTOOLS_COUNT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0' : + 'quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0' }" input: tuple val(meta), path(reads) @@ -24,7 +13,7 @@ process KALLISTOBUSTOOLS_COUNT { path t2g path t1c path t2c - val workflow + val workflow_mode val technology output: @@ -32,7 +21,8 @@ process KALLISTOBUSTOOLS_COUNT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def cdna = t1c ? "-c1 $t1c" : '' def introns = t2c ? "-c2 $t2c" : '' """ @@ -43,16 +33,16 @@ process KALLISTOBUSTOOLS_COUNT { -g $t2g \\ $cdna \\ $introns \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ -x $technology \\ - $options.args \\ + $args \\ -o ${prefix}.count \\ ${reads[0]} \\ ${reads[1]} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/count/meta.yml b/modules/kallistobustools/count/meta.yml index bc2433bb..911697d2 100644 --- a/modules/kallistobustools/count/meta.yml +++ b/modules/kallistobustools/count/meta.yml @@ -39,9 +39,9 @@ input: type: file description: kb ref's c2 unspliced_t2c file pattern: "*.{introns_t2c.txt}" - - workflow: + - workflow_mode: type: value - description: String value defining worfklow to use, can be one of "standard", "lamanno", "nucleus" + description: String value defining workflow to use, can be one of "standard", "lamanno", "nucleus" pattern: "{standard,lamanno,nucleus,kite}" - technology: type: value diff --git a/modules/kallistobustools/ref/functions.nf b/modules/kallistobustools/ref/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kallistobustools/ref/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kallistobustools/ref/main.nf b/modules/kallistobustools/ref/main.nf index a8287498..1e496f67 100644 --- a/modules/kallistobustools/ref/main.nf +++ b/modules/kallistobustools/ref/main.nf @@ -1,27 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KALLISTOBUSTOOLS_REF { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::kb-python=0.26.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kb-python:0.26.3--pyhdfd78af_0' : + 'quay.io/biocontainers/kb-python:0.26.3--pyhdfd78af_0' }" input: path fasta path gtf - val workflow + val workflow_mode output: path "versions.yml" , emit: versions @@ -33,20 +22,21 @@ process KALLISTOBUSTOOLS_REF { path "intron_t2c.txt" , optional:true, emit: intron_t2c script: - if (workflow == "standard") { + def args = task.ext.args ?: '' + if (workflow_mode == "standard") { """ kb \\ ref \\ -i kb_ref_out.idx \\ -g t2g.txt \\ -f1 cdna.fa \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ $fasta \\ $gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } else { @@ -59,13 +49,13 @@ process KALLISTOBUSTOOLS_REF { -f2 intron.fa \\ -c1 cdna_t2c.txt \\ -c2 intron_t2c.txt \\ - --workflow $workflow \\ + --workflow $workflow_mode \\ $fasta \\ $gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') + "${task.process}": + kallistobustools: \$(echo \$(kb --version 2>&1) | sed 's/^.*kb_python //;s/positional arguments.*\$//') END_VERSIONS """ } diff --git a/modules/kallistobustools/ref/meta.yml b/modules/kallistobustools/ref/meta.yml index 353b9c11..dcc78c66 100644 --- a/modules/kallistobustools/ref/meta.yml +++ b/modules/kallistobustools/ref/meta.yml @@ -21,9 +21,9 @@ input: type: file description: Genomic gtf file pattern: "*.{gtf,gtf.gz}" - - workflow: + - workflow_mode: type: value - description: String value defining worfklow to use, can be one of "standard", "lamanno", "nucleus" + description: String value defining workflow to use, can be one of "standard", "lamanno", "nucleus" pattern: "{standard,lamanno,nucleus}" output: diff --git a/modules/khmer/normalizebymedian/functions.nf b/modules/khmer/normalizebymedian/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/khmer/normalizebymedian/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/khmer/normalizebymedian/main.nf b/modules/khmer/normalizebymedian/main.nf index 234d172b..50b3d603 100644 --- a/modules/khmer/normalizebymedian/main.nf +++ b/modules/khmer/normalizebymedian/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KHMER_NORMALIZEBYMEDIAN { tag "${name}" label 'process_long' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::khmer=3.0.0a3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2" - } else { - container "quay.io/biocontainers/khmer:3.0.0a3--py37haa7609a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2' : + 'quay.io/biocontainers/khmer:3.0.0a3--py37haa7609a_2' }" input: path pe_reads @@ -28,22 +17,22 @@ process KHMER_NORMALIZEBYMEDIAN { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' pe_args = pe_reads ? "--paired" : "" se_args = se_reads ? "--unpaired-reads ${se_reads}" : "" files = pe_reads ? pe_reads : se_reads - """ normalize-by-median.py \\ -M ${task.memory.toGiga()}e9 \\ - --gzip ${options.args} \\ + --gzip $args \\ -o ${name}.fastq.gz \\ - ${pe_args} \\ - ${se_args} \\ - ${files} + $pe_args \\ + $se_args \\ + $files cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( normalize-by-median.py --version 2>&1 | grep ^khmer | sed 's/^khmer //' ) + "${task.process}": + khmer: \$( normalize-by-median.py --version 2>&1 | grep ^khmer | sed 's/^khmer //' ) END_VERSIONS """ } diff --git a/modules/kleborate/functions.nf b/modules/kleborate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kleborate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kleborate/main.nf b/modules/kleborate/main.nf index 5bb76ad0..998eced1 100644 --- a/modules/kleborate/main.nf +++ b/modules/kleborate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KLEBORATE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::kleborate=2.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/kleborate:2.1.0--pyhdfd78af_1" - } else { - container "quay.io/biocontainers/kleborate:2.1.0--pyhdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kleborate:2.1.0--pyhdfd78af_1' : + 'quay.io/biocontainers/kleborate:2.1.0--pyhdfd78af_1' }" input: tuple val(meta), path(fastas) @@ -26,16 +15,17 @@ process KLEBORATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ kleborate \\ - $options.args \\ + $args \\ --outfile ${prefix}.results.txt \\ --assemblies *.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(kleborate --version | sed 's/Kleborate v//;')) + "${task.process}": + kleborate: \$( echo \$(kleborate --version | sed 's/Kleborate v//;')) END_VERSIONS """ } diff --git a/modules/kraken2/kraken2/functions.nf b/modules/kraken2/kraken2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/kraken2/kraken2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index 0d4e5840..3c4d1caf 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process KRAKEN2_KRAKEN2 { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::kraken2=2.1.1 conda-forge::pigz=2.6' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' - } else { - container 'quay.io/biocontainers/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' : + 'quay.io/biocontainers/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:941789bd7fe00db16531c26de8bf3c5c985242a5-0' }" input: tuple val(meta), path(reads) @@ -29,7 +18,8 @@ process KRAKEN2_KRAKEN2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def paired = meta.single_end ? "" : "--paired" def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" def unclassified = meta.single_end ? "${prefix}.unclassified.fastq" : "${prefix}.unclassified#.fastq" @@ -42,14 +32,14 @@ process KRAKEN2_KRAKEN2 { --report ${prefix}.kraken2.report.txt \\ --gzip-compressed \\ $paired \\ - $options.args \\ + $args \\ $reads pigz -p $task.cpus *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') + "${task.process}": + kraken2: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/krona/kronadb/main.nf b/modules/krona/kronadb/main.nf new file mode 100644 index 00000000..ca7fc3d3 --- /dev/null +++ b/modules/krona/kronadb/main.nf @@ -0,0 +1,27 @@ +def VERSION='2.7.1' // Version information not provided by tool on CLI + +process KRONA_KRONADB { + label 'process_low' + + conda (params.enable_conda ? "bioconda::krona=2.7.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/krona:2.7.1--pl526_5' : + 'quay.io/biocontainers/krona:2.7.1--pl526_5' }" + + input: + + output: + path 'taxonomy/taxonomy.tab', emit: db + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + ktUpdateTaxonomy.sh taxonomy + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + krona: $VERSION + END_VERSIONS + """ +} diff --git a/modules/krona/kronadb/meta.yml b/modules/krona/kronadb/meta.yml new file mode 100644 index 00000000..2a12aaaf --- /dev/null +++ b/modules/krona/kronadb/meta.yml @@ -0,0 +1,30 @@ +name: krona_kronadb +description: KronaTools Update Taxonomy downloads a taxonomy database +keywords: + - database + - taxonomy + - krona +tools: + - krona: + description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. + homepage: https://github.com/marbl/Krona/wiki/KronaTools + documentation: https://github.com/marbl/Krona/wiki/Installing + tool_dev_url: + doi: https://doi.org/10.1186/1471-2105-12-385 + licence: + +input: + - none: There is no input. This module downloads a pre-built taxonomy database for use with Krona Tools. + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - db: + type: file + description: A TAB separated file that contains a taxonomy database. + pattern: "*.{tab}" + +authors: + - "@mjakobs" diff --git a/modules/krona/ktimporttaxonomy/main.nf b/modules/krona/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..bc79c98c --- /dev/null +++ b/modules/krona/ktimporttaxonomy/main.nf @@ -0,0 +1,30 @@ +def VERSION = '2.8' // Version information not provided by tool on CLI + +process KRONA_KTIMPORTTAXONOMY { + tag "${meta.id}" + label 'process_high' + + conda (params.enable_conda ? "bioconda::krona=2.8" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/krona:2.8--pl5262hdfd78af_2' : + 'quay.io/biocontainers/krona:2.8--pl5262hdfd78af_2' }" + + input: + tuple val(meta), path(report) + path "taxonomy/taxonomy.tab" + + output: + tuple val(meta), path ('*.html'), emit: html + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + """ + ktImportTaxonomy "$report" -tax taxonomy + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + krona: $VERSION + END_VERSIONS + """ +} diff --git a/modules/krona/ktimporttaxonomy/meta.yml b/modules/krona/ktimporttaxonomy/meta.yml new file mode 100644 index 00000000..b65919f8 --- /dev/null +++ b/modules/krona/ktimporttaxonomy/meta.yml @@ -0,0 +1,44 @@ +name: krona_ktimporttaxonomy +description: KronaTools Import Taxonomy imports taxonomy classifications and produces an interactive Krona plot. +keywords: + - plot + - taxonomy + - interactive + - html + - visualisation + - krona chart +tools: + - krona: + description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. + homepage: https://github.com/marbl/Krona/wiki/KronaTools + documentation: http://manpages.ubuntu.com/manpages/impish/man1/ktImportTaxonomy.1.html + tool_dev_url: + doi: https://doi.org/10.1186/1471-2105-12-385 + licence: + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - database: + type: path + description: "Path to the taxonomy database downloaded by krona/kronadb" + - report: + type: file + description: "A tab-delimited file with taxonomy IDs and (optionally) query IDs, magnitudes, and scores. Query IDs are taken from column 1, taxonomy IDs from column 2, and scores from column 3. Lines beginning with # will be ignored." + pattern: "*.{tsv}" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - html: + type: file + description: A html file containing an interactive krona plot. + pattern: "*.{html}" + +authors: + - "@mjakobs" diff --git a/modules/last/dotplot/functions.nf b/modules/last/dotplot/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/dotplot/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/dotplot/main.nf b/modules/last/dotplot/main.nf index d02e98ad..e8857403 100644 --- a/modules/last/dotplot/main.nf +++ b/modules/last/dotplot/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_DOTPLOT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -28,17 +17,18 @@ process LAST_DOTPLOT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ last-dotplot \\ - $options.args \\ + $args \\ $maf \\ $prefix.$format # last-dotplot has no --version option so let's use lastal from the same suite cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/lastal/functions.nf b/modules/last/lastal/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/lastal/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/lastal/main.nf b/modules/last/lastal/main.nf index c4335f25..b5ac8bfe 100644 --- a/modules/last/lastal/main.nf +++ b/modules/last/lastal/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_LASTAL { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx), path (param_file) @@ -27,13 +16,14 @@ process LAST_LASTAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def trained_params = param_file ? "-p ${param_file}" : '' """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) lastal \\ $trained_params \\ - $options.args \\ + $args \\ -P $task.cpus \\ ${index}/\$INDEX_NAME \\ $fastx \\ @@ -42,8 +32,8 @@ process LAST_LASTAL { # which makes its checksum non-reproducible. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version 2>&1 | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/lastdb/functions.nf b/modules/last/lastdb/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/lastdb/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/lastdb/main.nf b/modules/last/lastdb/main.nf index fb765ada..e9895c5c 100644 --- a/modules/last/lastdb/main.nf +++ b/modules/last/lastdb/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_LASTDB { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx) @@ -26,18 +15,19 @@ process LAST_LASTDB { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ mkdir lastdb lastdb \\ - $options.args \\ + $args \\ -P $task.cpus \\ lastdb/${prefix} \\ $fastx cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/mafconvert/functions.nf b/modules/last/mafconvert/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/mafconvert/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/mafconvert/main.nf b/modules/last/mafconvert/main.nf index 5e259109..ca60e7fe 100644 --- a/modules/last/mafconvert/main.nf +++ b/modules/last/mafconvert/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_MAFCONVERT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -35,15 +24,16 @@ process LAST_MAFCONVERT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - maf-convert $options.args $format $maf | gzip --no-name \\ + maf-convert $args $format $maf | gzip --no-name \\ > ${prefix}.${format}.gz # maf-convert has no --version option but lastdb (part of the same package) has. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/mafswap/functions.nf b/modules/last/mafswap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/mafswap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/mafswap/main.nf b/modules/last/mafswap/main.nf index 5ce38c92..0a58b027 100644 --- a/modules/last/mafswap/main.nf +++ b/modules/last/mafswap/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_MAFSWAP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,14 +15,15 @@ process LAST_MAFSWAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - maf-swap $options.args $maf | gzip --no-name > ${prefix}.swapped.maf.gz + maf-swap $args $maf | gzip --no-name > ${prefix}.swapped.maf.gz # maf-swap has no --version option but lastdb, part of the same package, has. cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version 2>&1 | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version 2>&1 | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/last/postmask/functions.nf b/modules/last/postmask/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/postmask/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/postmask/main.nf b/modules/last/postmask/main.nf index 3102fbe6..fb097a11 100644 --- a/modules/last/postmask/main.nf +++ b/modules/last/postmask/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_POSTMASK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,15 +15,16 @@ process LAST_POSTMASK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if( "$maf" == "${prefix}.maf.gz" ) error "Input and output names are the same, use the suffix option to disambiguate" """ - last-postmask $options.args $maf | gzip --no-name > ${prefix}.maf.gz + last-postmask $args $maf | gzip --no-name > ${prefix}.maf.gz # last-postmask does not have a --version option cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastal --version 2>&1 | sed 's/lastal //') + "${task.process}": + last: \$(lastal --version 2>&1 | sed 's/lastal //') END_VERSIONS """ } diff --git a/modules/last/split/functions.nf b/modules/last/split/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/split/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/split/main.nf b/modules/last/split/main.nf index 2a9e5621..60ed135b 100644 --- a/modules/last/split/main.nf +++ b/modules/last/split/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_SPLIT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(maf) @@ -26,13 +15,14 @@ process LAST_SPLIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - zcat < $maf | last-split $options.args | gzip --no-name > ${prefix}.maf.gz + zcat < $maf | last-split $args | gzip --no-name > ${prefix}.maf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(last-split --version 2>&1 | sed 's/last-split //') + "${task.process}": + last: \$(last-split --version 2>&1 | sed 's/last-split //') END_VERSIONS """ } diff --git a/modules/last/train/functions.nf b/modules/last/train/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/last/train/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/last/train/main.nf b/modules/last/train/main.nf index f0b958bc..471db7c1 100644 --- a/modules/last/train/main.nf +++ b/modules/last/train/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LAST_TRAIN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::last=1250' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0" - } else { - container "quay.io/biocontainers/last:1250--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/last:1250--h2e03b76_0' : + 'quay.io/biocontainers/last:1250--h2e03b76_0' }" input: tuple val(meta), path(fastx) @@ -27,20 +16,21 @@ process LAST_TRAIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ INDEX_NAME=\$(basename \$(ls $index/*.des) .des) last-train \\ - $options.args \\ + $args \\ -P $task.cpus \\ ${index}/\$INDEX_NAME \\ $fastx \\ > ${prefix}.\$INDEX_NAME.par cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(lastdb --version | sed 's/lastdb //') + "${task.process}": + last: \$(lastdb --version | sed 's/lastdb //') END_VERSIONS """ } diff --git a/modules/leehom/main.nf b/modules/leehom/main.nf new file mode 100644 index 00000000..b5cb2dcb --- /dev/null +++ b/modules/leehom/main.nf @@ -0,0 +1,74 @@ +def VERSION = '1.2.15' // Version information not provided by tool on CLI + +process LEEHOM { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::leehom=1.2.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/leehom:1.2.15--h29e30f7_1' : + 'quay.io/biocontainers/leehom:1.2.15--h29e30f7_1' }" + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("${prefix}.bam") , optional: true, emit: bam + tuple val(meta), path("${prefix}.fq.gz") , optional: true, emit: fq_pass + tuple val(meta), path("${prefix}.fail.fq.gz") , optional: true, emit: fq_fail + tuple val(meta), path("${prefix}_r1.fq.gz") , optional: true, emit: unmerged_r1_fq_pass + tuple val(meta), path("${prefix}_r1.fail.fq.gz"), optional: true, emit: unmerged_r1_fq_fail + tuple val(meta), path("${prefix}_r2.fq.gz") , optional: true, emit: unmerged_r2_fq_pass + tuple val(meta), path("${prefix}_r2.fail.fq.gz"), optional: true, emit: unmerged_r2_fq_fail + tuple val(meta), path("*.log") , emit: log + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" + + if (reads.toString().endsWith('.bam')) { + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -o ${prefix}.bam \\ + --log ${prefix}.log \\ + $reads + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ + } else if (meta.single_end) { + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -fq1 $reads \\ + -fqo $prefix \\ + --log ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ + } else { + """ + leeHom \\ + $args \\ + -t $task.cpus \\ + -fq1 ${reads[0]} \\ + -fq2 ${reads[1]} \\ + -fqo $prefix \\ + --log ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + leehom: $VERSION + END_VERSIONS + """ + } +} diff --git a/modules/leehom/meta.yml b/modules/leehom/meta.yml new file mode 100644 index 00000000..b0d6092a --- /dev/null +++ b/modules/leehom/meta.yml @@ -0,0 +1,77 @@ +name: leehom +description: Bayesian reconstruction of ancient DNA fragments +keywords: + - ancient DNA + - adapter removal + - clipping + - trimming + - merging + - collapsing + - preprocessing + - bayesian +tools: + - leehom: + description: Bayesian reconstruction of ancient DNA fragments + homepage: "https://grenaud.github.io/leeHom/" + documentation: "https://github.com/grenaud/leeHom" + tool_dev_url: "https://github.com/grenaud/leeHom" + doi: "10.1093/nar/gku699" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Unaligned BAM or one or two gzipped FASTQ file(s) + pattern: "*.{bam,fq.gz,fastq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: BAM file + pattern: "*.bam" + - fq_pass: + type: file + description: Trimmed and merged FASTQ + pattern: "*.fq.gz" + - fq_fail: + type: file + description: Failed trimmed and merged FASTQs + pattern: "*.fail.fq.gz" + - unmerged_r1_fq_pass: + type: file + description: Passed unmerged R1 FASTQs + pattern: "*.r1.fq.gz" + - unmerged_r1_fq_fail: + type: file + description: Failed unmerged R1 FASTQs + pattern: "*.r1.fail.fq.gz" + - unmerged_r2_fq_pass: + type: file + description: Passed unmerged R1 FASTQs + pattern: "*.r2.fq.gz" + - unmerged_r2_fq_pass: + type: file + description: Failed unmerged R1 FASTQs + pattern: "*.r2.fail.fq.gz" + - log: + type: file + description: Log file of command + pattern: "*.log" + + +authors: + - "@jfy133" diff --git a/modules/lib/functions.nf b/modules/lib/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lib/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lima/functions.nf b/modules/lima/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lima/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lima/main.nf b/modules/lima/main.nf index 16525953..a662a7bb 100644 --- a/modules/lima/main.nf +++ b/modules/lima/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LIMA { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lima=2.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lima:2.2.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/lima:2.2.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lima:2.2.0--h9ee0642_0' : + 'quay.io/biocontainers/lima:2.2.0--h9ee0642_0' }" input: tuple val(meta), path(ccs) @@ -40,7 +29,8 @@ process LIMA { tuple val(meta), path("*.json") , optional: true, emit: json script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ OUT_EXT="" @@ -62,11 +52,11 @@ process LIMA { $primers \\ $prefix.\$OUT_EXT \\ -j $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) + "${task.process}": + lima: \$( lima --version | sed 's/lima //g' | sed 's/ (.\\+//g' ) END_VERSIONS """ } diff --git a/modules/lissero/main.nf b/modules/lissero/main.nf new file mode 100644 index 00000000..667697ef --- /dev/null +++ b/modules/lissero/main.nf @@ -0,0 +1,31 @@ +process LISSERO { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::lissero=0.4.9" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lissero:0.4.9--py_0' : + 'quay.io/biocontainers/lissero:0.4.9--py_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + lissero \\ + $args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + lissero: \$( echo \$(lissero --version 2>&1) | sed 's/^.*LisSero //' ) + END_VERSIONS + """ +} diff --git a/modules/lissero/meta.yml b/modules/lissero/meta.yml new file mode 100644 index 00000000..d4fb38df --- /dev/null +++ b/modules/lissero/meta.yml @@ -0,0 +1,44 @@ +name: lissero +description: Serogrouping Listeria monocytogenes assemblies +keywords: + - fasta + - Listeria monocytogenes + - serogroup +tools: + - lissero: + description: In silico serotyping of Listeria monocytogenes + homepage: https://github.com/MDU-PHL/LisSero/blob/master/README.md + documentation: https://github.com/MDU-PHL/LisSero/blob/master/README.md + tool_dev_url: https://github.com/MDU-PHL/lissero + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" + diff --git a/modules/lofreq/call/functions.nf b/modules/lofreq/call/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/call/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/call/main.nf b/modules/lofreq/call/main.nf index e77d7a78..d7fd078b 100644 --- a/modules/lofreq/call/main.nf +++ b/modules/lofreq/call/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_CALL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam) @@ -27,18 +16,19 @@ process LOFREQ_CALL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ call \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.vcf.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/callparallel/functions.nf b/modules/lofreq/callparallel/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/callparallel/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/callparallel/main.nf b/modules/lofreq/callparallel/main.nf index a86748d7..764efcc5 100644 --- a/modules/lofreq/callparallel/main.nf +++ b/modules/lofreq/callparallel/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_CALLPARALLEL { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam), path(bai) @@ -28,19 +17,20 @@ process LOFREQ_CALLPARALLEL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ call-parallel \\ --pp-threads $task.cpus \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.vcf.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/filter/functions.nf b/modules/lofreq/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/filter/main.nf b/modules/lofreq/filter/main.nf index 905a961d..34a5aef8 100644 --- a/modules/lofreq/filter/main.nf +++ b/modules/lofreq/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_FILTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(vcf) @@ -26,17 +15,18 @@ process LOFREQ_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq \\ filter \\ - $options.args \\ + $args \\ -i $vcf \\ -o ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/lofreq/indelqual/functions.nf b/modules/lofreq/indelqual/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/lofreq/indelqual/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/lofreq/indelqual/main.nf b/modules/lofreq/indelqual/main.nf index b33a1e04..5e5b8f44 100644 --- a/modules/lofreq/indelqual/main.nf +++ b/modules/lofreq/indelqual/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process LOFREQ_INDELQUAL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::lofreq=2.1.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4" - } else { - container "quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/lofreq:2.1.5--py38h588ecb2_4' : + 'quay.io/biocontainers/lofreq:2.1.5--py38h588ecb2_4' }" input: tuple val(meta), path(bam) @@ -26,17 +16,18 @@ process LOFREQ_INDELQUAL { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ lofreq indelqual \\ - $options.args \\ + $args \\ -f $fasta \\ -o ${prefix}.bam \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') + "${task.process}": + lofreq: \$(echo \$(lofreq version 2>&1) | sed 's/^version: //; s/ *commit.*\$//') END_VERSIONS """ } diff --git a/modules/macrel/contigs/main.nf b/modules/macrel/contigs/main.nf new file mode 100644 index 00000000..558ef6e8 --- /dev/null +++ b/modules/macrel/contigs/main.nf @@ -0,0 +1,40 @@ +process MACREL_CONTIGS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::macrel=1.1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/macrel:1.1.0--py36hc5360cc_0': + 'quay.io/biocontainers/macrel:1.1.0--py36hc5360cc_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*/*.smorfs.faa.gz") , emit: smorfs + tuple val(meta), path("*/*.all_orfs.faa.gz") , emit: all_orfs + tuple val(meta), path("*/*.prediction.gz") , emit: amp_prediction + tuple val(meta), path("*/*.md") , emit: readme_file + tuple val(meta), path("*/*_log.txt") , emit: log_file + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + macrel contigs \\ + $args \\ + --fasta $fasta \\ + --output ${prefix}/ \\ + --tag ${prefix} \\ + --log-file ${prefix}/${prefix}_log.txt \\ + --threads $task.cpus + + gzip --no-name ${prefix}/*.faa + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + macrel: \$(echo \$(macrel --version | sed 's/macrel //g')) + END_VERSIONS + """ +} diff --git a/modules/macrel/contigs/meta.yml b/modules/macrel/contigs/meta.yml new file mode 100644 index 00000000..e0b2fabd --- /dev/null +++ b/modules/macrel/contigs/meta.yml @@ -0,0 +1,61 @@ +name: macrel_contigs +description: A tool that mines antimicrobial peptides (AMPs) from (meta)genomes by predicting peptides from genomes (provided as contigs) and outputs all the predicted anti-microbial peptides found. +keywords: + - AMP + - antimicrobial peptides + - genome mining + - metagenomes + - peptide prediction +tools: + - macrel: + description: A pipeline for AMP (antimicrobial peptide) prediction + homepage: https://macrel.readthedocs.io/en/latest/ + documentation: https://macrel.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/BigDataBiology/macrel + doi: "10.7717/peerj.10555" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: A fasta file with nucleotide sequences. + pattern: "*.{fasta,fa,fna,fasta.gz,fa.gz,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - amp_prediction: + type: file + description: A zipped file, with all predicted amps in a table format. + pattern: "*.prediction.gz" + - smorfs: + type: file + description: A zipped fasta file containing aminoacid sequences showing the general gene prediction information in the contigs. + pattern: "*.smorfs.faa.gz" + - all_orfs: + type: file + description: A zipped fasta file containing amino acid sequences showing the general gene prediction information in the contigs. + pattern: "*.all_orfs.faa.gz" + - readme_file: + type: file + description: A readme file containing tool specific information (e.g. citations, details about the output, etc.). + pattern: "*.md" + - log_file: + type: file + description: A log file containing the information pertaining to the run. + pattern: "*_log.txt" + +authors: + - "@darcy220606" diff --git a/modules/macs2/callpeak/functions.nf b/modules/macs2/callpeak/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/macs2/callpeak/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/macs2/callpeak/main.nf b/modules/macs2/callpeak/main.nf index d54d406d..c5c88f8e 100644 --- a/modules/macs2/callpeak/main.nf +++ b/modules/macs2/callpeak/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MACS2_CALLPEAK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::macs2=2.2.7.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h0213d0e_1" - } else { - container "quay.io/biocontainers/macs2:2.2.7.1--py38h0213d0e_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/macs2:2.2.7.1--py38h4a8c8d9_3' : + 'quay.io/biocontainers/macs2:2.2.7.1--py38h4a8c8d9_3' }" input: tuple val(meta), path(ipbam), path(controlbam) @@ -32,13 +21,21 @@ process MACS2_CALLPEAK { tuple val(meta), path("*.bdg") , optional:true, emit: bdg script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def format = meta.single_end ? 'BAM' : 'BAMPE' - def control = controlbam ? "--control $controlbam" : '' + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def args_list = args.tokenize() + def format = meta.single_end ? 'BAM' : 'BAMPE' + def control = controlbam ? "--control $controlbam" : '' + if(args_list.contains('--format')){ + def id = args_list.findIndexOf{it=='--format'} + format = args_list[id+1] + args_list.remove(id+1) + args_list.remove(id) + } """ macs2 \\ callpeak \\ - $options.args \\ + ${args_list.join(' ')} \\ --gsize $macs2_gsize \\ --format $format \\ --name $prefix \\ @@ -46,8 +43,8 @@ process MACS2_CALLPEAK { $control cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(macs2 --version | sed -e "s/macs2 //g") + "${task.process}": + macs2: \$(macs2 --version | sed -e "s/macs2 //g") END_VERSIONS """ } diff --git a/modules/macs2/callpeak/meta.yml b/modules/macs2/callpeak/meta.yml new file mode 100644 index 00000000..afb949ec --- /dev/null +++ b/modules/macs2/callpeak/meta.yml @@ -0,0 +1,63 @@ +name: macs2_callpeak +description: Peak calling of enriched genomic regions of ChIP-seq and ATAC-seq experiments +keywords: + - alignment + - atac-seq + - chip-seq + - peak-calling +tools: + - macs2: + description: Model Based Analysis for ChIP-Seq data + homepage: None + documentation: https://docs.csc.fi/apps/macs2/ + tool_dev_url: https://github.com/macs3-project/MACS + doi: "https://doi.org/10.1101/496521" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ipbam: + type: file + description: The ChIP-seq treatment file + - controlbam: + type: file + description: The control file + - macs2_gsize: + type: string + description: Effective genome size. It can be 1.0e+9 or 1000000000, or shortcuts:'hs' for human (2.7e9), + 'mm' for mouse (1.87e9), 'ce' for C. elegans (9e7) and 'dm' for fruitfly (1.2e8) + +output: + - versions: + type: file + description: File containing software version + pattern: "versions.yml" + - peak: + type: file + description: BED file containing annotated peaks + pattern: "*.gappedPeak,*.narrowPeak}" + - xls: + type: file + description: xls file containing annotated peaks + pattern: "*.xls" + - gapped: + type: file + description: Optional BED file containing gapped peak + pattern: "*.gappedPeak" + - bed: + type: file + description: Optional BED file containing peak summits locations for every peak + pattern: "*.bed" + - bdg: + type: file + description: Optional bedGraph files for input and treatment input samples + pattern: "*.bdg" + +authors: + - "@ntoda03" + - "@JoseEspinosa" + - "@jianhong" diff --git a/modules/malt/build/functions.nf b/modules/malt/build/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/malt/build/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/malt/build/main.nf b/modules/malt/build/main.nf index 48259a50..d1b0c427 100644 --- a/modules/malt/build/main.nf +++ b/modules/malt/build/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALT_BUILD { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::malt=0.53" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" - } else { - container "quay.io/biocontainers/malt:0.53--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0' : + 'quay.io/biocontainers/malt:0.53--hdfd78af_0' }" input: path fastas @@ -30,6 +19,7 @@ process MALT_BUILD { path "malt-build.log", emit: log script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[MALT_BUILD] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -46,13 +36,13 @@ process MALT_BUILD { -s $seq_type \\ $igff \\ -d 'malt_index/' \\ - -t ${task.cpus} \\ - $options.args \\ + -t $task.cpus \\ + $args \\ -mdb ${map_db}/*.db |&tee malt-build.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2) + "${task.process}": + malt: \$(malt-build --help |& tail -n 3 | head -n 1 | cut -f 2 -d'(' | cut -f 1 -d ',' | cut -d ' ' -f 2) END_VERSIONS """ } diff --git a/modules/malt/run/functions.nf b/modules/malt/run/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/malt/run/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index bc78de8c..8b8f05cc 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALT_RUN { label 'process_high_memory' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::malt=0.53" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0" - } else { - container "quay.io/biocontainers/malt:0.53--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/malt:0.53--hdfd78af_0' : + 'quay.io/biocontainers/malt:0.53--hdfd78af_0' }" input: path fastqs @@ -30,6 +19,7 @@ process MALT_RUN { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[MALT_RUN] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -40,17 +30,17 @@ process MALT_RUN { """ malt-run \\ -J-Xmx${avail_mem}g \\ - -t ${task.cpus} \\ + -t $task.cpus \\ -v \\ -o . \\ - $options.args \\ + $args \\ --inFile ${fastqs.join(' ')} \\ -m $mode \\ --index $index/ |&tee malt-run.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(malt-run --help 2>&1 | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ') + "${task.process}": + malt: \$(malt-run --help 2>&1 | grep -o 'version.* ' | cut -f 1 -d ',' | cut -f2 -d ' ') END_VERSIONS """ } diff --git a/modules/maltextract/functions.nf b/modules/maltextract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/maltextract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/maltextract/main.nf b/modules/maltextract/main.nf index d909ec96..e3a42016 100644 --- a/modules/maltextract/main.nf +++ b/modules/maltextract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MALTEXTRACT { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::hops=0.35" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1" - } else { - container "quay.io/biocontainers/hops:0.35--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hops:0.35--hdfd78af_1' : + 'quay.io/biocontainers/hops:0.35--hdfd78af_1' }" input: path rma6 @@ -28,6 +17,7 @@ process MALTEXTRACT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ MaltExtract \\ -Xmx${task.memory.toGiga()}g \\ @@ -36,11 +26,11 @@ process MALTEXTRACT { -t $taxon_list \\ -r $ncbi_dir \\ -o results/ \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MaltExtract --help | head -n 2 | tail -n 1 | sed 's/MaltExtract version//') + "${task.process}": + maltextract: \$(MaltExtract --help | head -n 2 | tail -n 1 | sed 's/MaltExtract version//') END_VERSIONS """ } diff --git a/modules/manta/germline/functions.nf b/modules/manta/germline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/germline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index f957a7ec..2a8c0acc 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_GERMLINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input), path(input_index) @@ -35,7 +24,8 @@ process MANTA_GERMLINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ @@ -59,10 +49,9 @@ process MANTA_GERMLINE { mv manta/results/variants/diploidSV.vcf.gz.tbi \ ${prefix}.diploid_sv.vcf.gz.tbi - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/manta/somatic/functions.nf b/modules/manta/somatic/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/somatic/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf index f912d478..1d62635b 100644 --- a/modules/manta/somatic/main.nf +++ b/modules/manta/somatic/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_SOMATIC { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor) @@ -37,7 +26,8 @@ process MANTA_SOMATIC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ @@ -60,8 +50,8 @@ process MANTA_SOMATIC { mv manta/results/variants/somaticSV.vcf.gz.tbi ${prefix}.somatic_sv.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/manta/tumoronly/functions.nf b/modules/manta/tumoronly/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/manta/tumoronly/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf index f20e8128..63f7a840 100644 --- a/modules/manta/tumoronly/main.nf +++ b/modules/manta/tumoronly/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MANTA_TUMORONLY { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::manta=1.6.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1" - } else { - container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1' : + 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: tuple val(meta), path(input), path(input_index) @@ -35,7 +24,8 @@ process MANTA_TUMORONLY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def options_manta = target_bed ? "--exome --callRegions $target_bed" : "" """ configManta.py \ @@ -59,10 +49,9 @@ process MANTA_TUMORONLY { mv manta/results/variants/tumorSV.vcf.gz.tbi \ ${prefix}.tumor_sv.vcf.gz.tbi - cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configManta.py --version ) + "${task.process}": + manta: \$( configManta.py --version ) END_VERSIONS """ } diff --git a/modules/mapdamage2/main.nf b/modules/mapdamage2/main.nf new file mode 100644 index 00000000..e3668fda --- /dev/null +++ b/modules/mapdamage2/main.nf @@ -0,0 +1,49 @@ +process MAPDAMAGE2 { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::mapdamage2=2.2.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mapdamage2:2.2.1--pyr40_0' : + 'quay.io/biocontainers/mapdamage2:2.2.1--pyr40_0' }" + + input: + tuple val(meta), path(bam) + path(fasta) + + output: + tuple val(meta), path("results_*/Runtime_log.txt") ,emit: runtime_log + tuple val(meta), path("results_*/Fragmisincorporation_plot.pdf"), optional: true ,emit: fragmisincorporation_plot + tuple val(meta), path("results_*/Length_plot.pdf"), optional: true ,emit: length_plot + tuple val(meta), path("results_*/misincorporation.txt"), optional: true ,emit: misincorporation + tuple val(meta), path("results_*/lgdistribution.txt"), optional: true ,emit: lgdistribution + tuple val(meta), path("results_*/dnacomp.txt"), optional: true ,emit: dnacomp + tuple val(meta), path("results_*/Stats_out_MCMC_hist.pdf"), optional: true ,emit: stats_out_mcmc_hist + tuple val(meta), path("results_*/Stats_out_MCMC_iter.csv"), optional: true ,emit: stats_out_mcmc_iter + tuple val(meta), path("results_*/Stats_out_MCMC_trace.pdf"), optional: true ,emit: stats_out_mcmc_trace + tuple val(meta), path("results_*/Stats_out_MCMC_iter_summ_stat.csv"), optional: true ,emit: stats_out_mcmc_iter_summ_stat + tuple val(meta), path("results_*/Stats_out_MCMC_post_pred.pdf"), optional: true ,emit: stats_out_mcmc_post_pred + tuple val(meta), path("results_*/Stats_out_MCMC_correct_prob.csv"), optional: true ,emit: stats_out_mcmc_correct_prob + tuple val(meta), path("results_*/dnacomp_genome.csv"), optional: true ,emit: dnacomp_genome + tuple val(meta), path("results_*/rescaled.bam"), optional: true ,emit: rescaled + tuple val(meta), path("results_*/5pCtoT_freq.txt"), optional: true ,emit: pctot_freq + tuple val(meta), path("results_*/3pGtoA_freq.txt"), optional: true ,emit: pgtoa_freq + tuple val(meta), path("results_*/*.fasta"), optional: true ,emit: fasta + tuple val(meta), path("*/"), optional: true ,emit: folder + path "versions.yml",emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + mapDamage \\ + $args \\ + -i $bam \\ + -r $fasta + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + mapdamage2: \$(echo \$(mapDamage --version)) + END_VERSIONS + """ +} diff --git a/modules/mapdamage2/meta.yml b/modules/mapdamage2/meta.yml new file mode 100644 index 00000000..e511a0a6 --- /dev/null +++ b/modules/mapdamage2/meta.yml @@ -0,0 +1,114 @@ +name: mapdamage2 + +description: Computational framework for tracking and quantifying DNA damage patterns among ancient DNA sequencing reads generated by Next-Generation Sequencing platforms. +keywords: + - ancient DNA + - DNA damage + - NGS + - damage patterns + - bam +tools: + - mapdamage2: + description: Tracking and quantifying damage patterns in ancient DNA sequences + homepage: http://ginolhac.github.io/mapDamage/ + documentation: https://ginolhac.github.io/mapDamage/ + tool_dev_url: https://github.com/ginolhac/mapDamage + doi: "10.1093/bioinformatics/btt193" + licence: ['MIT'] + +input: + - meta: + type: map + description: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.{bam}" + - fasta: + type: file + description: Fasta file, the reference the input BAM was mapped against + pattern: "*.{fasta}" + +output: + - meta: + type: map + description: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - runtime_log: + type: file + description: Log file with a summary of command lines used and timestamps. + pattern: "Runtime_log.txt" + - fragmisincorporation_plot: + type: file + description: A pdf file that displays both fragmentation and misincorporation patterns. + pattern: "Fragmisincorporation_plot.pdf" + - length_plot: + type: file + description: A pdf file that displays length distribution of singleton reads per strand and cumulative frequencies of C->T at 5'-end and G->A at 3'-end are also displayed per strand. + pattern: "Length_plot.pdf" + - misincorporation: + type: file + description: Contains a table with occurrences for each type of mutations and relative positions from the reads ends. + pattern: "misincorporation.txt" + - pctot_freq: + type: file + description: Contains frequencies of Cytosine to Thymine mutations per position from the 5'-ends. + pattern: "5pCtoT_freq.txt" + - pgtoa_freq: + type: file + description: Contains frequencies of Guanine to Adenine mutations per position from the 3'-ends. + pattern: "3pGtoA_freq.txt" + - dnacomp: + type: file + description: Contains a table of the reference genome base composition per position, inside reads and adjacent regions. + pattern: "dnacomp.txt" + - lgdistribution: + type: file + description: Contains a table with read length distributions per strand. + pattern: "lgdistribution.txt" + - stats_out_mcmc_hist: + type: file + description: A MCMC histogram for the damage parameters and log likelihood. + pattern: "Stats_out_MCMC_hist.pdf" + - stats_out_mcmc_iter: + type: file + description: Values for the damage parameters and log likelihood in each MCMC iteration. + pattern: "Stats_out_MCMC_iter.csv" + - stats_out_mcmc_trace: + type: file + description: A MCMC trace plot for the damage parameters and log likelihood. + pattern: "Stats_out_MCMC_trace.pdf" + - stats_out_mcmc_iter_summ_stat: + type: file + description: Summary statistics for the damage parameters estimated posterior distributions. + pattern: "Stats_out_MCMC_iter_summ_stat.csv" + - stats_out_mcmc_post_pred: + type: file + description: Empirical misincorporation frequency and posterior predictive intervals from the fitted model. + pattern: "Stats_out_MCMC_post_pred.pdf" + - stats_out_mcmc_correct_prob: + type: file + description: Position specific probability of a C->T and G->A misincorporation is due to damage. + pattern: "Stats_out_MCMC_correct_prob.csv" + - dnacomp_genome: + type: file + description: Contains the global reference genome base composition (computed by seqtk). + pattern: "dnacomp_genome.csv" + - rescaled: + type: file + description: Rescaled BAM file, where likely post-mortem damaged bases have downscaled quality scores. + pattern: "*.{bam}" + - fasta: + type: file + description: Allignments in a FASTA file, only if flagged by -d. + pattern: "*.{fasta}" + - folder: + type: folder + description: Folder created when --plot-only, --rescale and --stats-only flags are passed. + pattern: "*/" + +authors: +- "@darcy220606" diff --git a/modules/mash/sketch/functions.nf b/modules/mash/sketch/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mash/sketch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mash/sketch/main.nf b/modules/mash/sketch/main.nf index f434a5f1..d93641f7 100644 --- a/modules/mash/sketch/main.nf +++ b/modules/mash/sketch/main.nf @@ -1,20 +1,10 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MASH_SKETCH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mash=2.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mash:2.3--he348c14_1" - } else { - container "quay.io/biocontainers/mash:2.3--he348c14_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mash:2.3--he348c14_1' : + 'quay.io/biocontainers/mash:2.3--he348c14_1' }" input: tuple val(meta), path(reads) @@ -25,19 +15,20 @@ process MASH_SKETCH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ mash \\ sketch \\ - $options.args \\ + $args \\ -p $task.cpus \\ -o ${prefix} \\ -r $reads \\ 2> ${prefix}.mash_stats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(mash --version 2>&1) + "${task.process}": + mash: \$(mash --version 2>&1) END_VERSIONS """ } diff --git a/modules/mashtree/functions.nf b/modules/mashtree/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mashtree/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mashtree/main.nf b/modules/mashtree/main.nf index db0b14f5..5da2f805 100644 --- a/modules/mashtree/main.nf +++ b/modules/mashtree/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MASHTREE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mashtree=1.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mashtree:1.2.0--pl526h516909a_0" - } else { - container "quay.io/biocontainers/mashtree:1.2.0--pl526h516909a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mashtree:1.2.0--pl526h516909a_0' : + 'quay.io/biocontainers/mashtree:1.2.0--pl526h516909a_0' }" input: tuple val(meta), path(seqs) @@ -27,18 +16,19 @@ process MASHTREE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ mashtree \\ - $options.args \\ + $args \\ --numcpus $task.cpus \\ --outmatrix ${prefix}.tsv \\ --outtree ${prefix}.dnd \\ $seqs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( mashtree --version 2>&1 ) | sed 's/^.*Mashtree //' ) + "${task.process}": + mashtree: \$( echo \$( mashtree --version 2>&1 ) | sed 's/^.*Mashtree //' ) END_VERSIONS """ } diff --git a/modules/maxbin2/functions.nf b/modules/maxbin2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/maxbin2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/maxbin2/main.nf b/modules/maxbin2/main.nf index bcfa9590..4d384391 100644 --- a/modules/maxbin2/main.nf +++ b/modules/maxbin2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MAXBIN2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::maxbin2=2.2.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/maxbin2:2.2.7--he1b5a44_2" - } else { - container "quay.io/biocontainers/maxbin2:2.2.7--he1b5a44_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/maxbin2:2.2.7--he1b5a44_2' : + 'quay.io/biocontainers/maxbin2:2.2.7--he1b5a44_2' }" input: tuple val(meta), path(contigs), path(reads), path(abund) @@ -33,20 +22,21 @@ process MAXBIN2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def associate_files = reads ? "-reads $reads" : "-abund $abund" """ run_MaxBin.pl \\ -contig $contigs \\ $associate_files \\ -thread $task.cpus \\ - $options.args \\ + $args \\ -out $prefix gzip *.fasta *.noclass *.tooshort *log *.marker cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": maxbin2: \$( run_MaxBin.pl -v | head -n 1 | sed 's/MaxBin //' ) END_VERSIONS """ diff --git a/modules/medaka/main.nf b/modules/medaka/main.nf new file mode 100644 index 00000000..761b1c34 --- /dev/null +++ b/modules/medaka/main.nf @@ -0,0 +1,37 @@ +process MEDAKA { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::medaka=1.4.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/medaka:1.4.4--py38h130def0_0' : + 'quay.io/biocontainers/medaka:1.4.4--py38h130def0_0' }" + + input: + tuple val(meta), path(reads), path(assembly) + + output: + tuple val(meta), path("*.fa.gz"), emit: assembly + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + medaka_consensus \\ + -t $task.cpus \\ + $args \\ + -i $reads \\ + -d $assembly \\ + -o ./ + + mv consensus.fasta ${prefix}.fa + + gzip -n ${prefix}.fa + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + medaka: \$( medaka --version 2>&1 | sed 's/medaka //g' ) + END_VERSIONS + """ +} diff --git a/modules/medaka/meta.yml b/modules/medaka/meta.yml new file mode 100644 index 00000000..d194464f --- /dev/null +++ b/modules/medaka/meta.yml @@ -0,0 +1,47 @@ +name: medaka +description: A tool to create consensus sequences and variant calls from nanopore sequencing data +keywords: + - assembly + - polishing + - nanopore +tools: + - medaka: + description: Neural network sequence error correction. + homepage: https://nanoporetech.github.io/medaka/index.html + documentation: https://nanoporetech.github.io/medaka/index.html + tool_dev_url: https://github.com/nanoporetech/medaka + doi: "" + licence: ['Mozilla Public License 2.0'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input nanopore fasta/FastQ files + pattern: "*.{fasta,fa,fastq,fastq.gz,fq,fq.gz}" + - assembly: + type: file + description: Genome assembly + pattern: "*.{fasta,fa}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - assembly: + type: file + description: Polished genome assembly + pattern: "*.fa.gz" + +authors: + - "@avantonder" diff --git a/modules/megahit/functions.nf b/modules/megahit/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/megahit/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/megahit/main.nf b/modules/megahit/main.nf index 8c8a5555..7b511883 100644 --- a/modules/megahit/main.nf +++ b/modules/megahit/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MEGAHIT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::megahit=1.2.9 conda-forge::pigz=2.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" - } else { - container "quay.io/biocontainers/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0' : + 'quay.io/biocontainers/mulled-v2-0f92c152b180c7cd39d9b0e6822f8c89ccb59c99:8ec213d21e5d03f9db54898a2baeaf8ec729b447-0' }" input: tuple val(meta), path(reads) @@ -30,25 +19,27 @@ process MEGAHIT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ megahit \\ -r ${reads} \\ -t $task.cpus \\ - $options.args \\ + $args \\ --out-prefix $prefix pigz \\ --no-name \\ -p $task.cpus \\ - $options.args2 \\ + $args2 \\ megahit_out/*.fa \\ megahit_out/intermediate_contigs/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + "${task.process}": + megahit: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') END_VERSIONS """ } else { @@ -57,19 +48,19 @@ process MEGAHIT { -1 ${reads[0]} \\ -2 ${reads[1]} \\ -t $task.cpus \\ - $options.args \\ + $args \\ --out-prefix $prefix pigz \\ --no-name \\ -p $task.cpus \\ - $options.args2 \\ + $args2 \\ megahit_out/*.fa \\ megahit_out/intermediate_contigs/*.fa cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') + "${task.process}": + megahit: \$(echo \$(megahit -v 2>&1) | sed 's/MEGAHIT v//') END_VERSIONS """ } diff --git a/modules/meningotype/main.nf b/modules/meningotype/main.nf new file mode 100644 index 00000000..c3b65b9d --- /dev/null +++ b/modules/meningotype/main.nf @@ -0,0 +1,31 @@ +process MENINGOTYPE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::meningotype=0.8.5" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/meningotype:0.8.5--pyhdfd78af_0' : + 'quay.io/biocontainers/meningotype:0.8.5--pyhdfd78af_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + meningotype \\ + $args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + meningotype: \$( echo \$(meningotype --version 2>&1) | sed 's/^.*meningotype v//' ) + END_VERSIONS + """ +} diff --git a/modules/meningotype/meta.yml b/modules/meningotype/meta.yml new file mode 100644 index 00000000..07c2ff5e --- /dev/null +++ b/modules/meningotype/meta.yml @@ -0,0 +1,43 @@ +name: meningotype +description: Serotyping of Neisseria meningitidis assemblies +keywords: + - fasta + - Neisseria meningitidis + - serotype +tools: + - meningotype: + description: In silico serotyping and finetyping (porA and fetA) of Neisseria meningitidis + homepage: https://github.com/MDU-PHL/meningotype + documentation: https://github.com/MDU-PHL/meningotype + tool_dev_url: https://github.com/MDU-PHL/meningotype + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/modules/metabat2/jgisummarizebamcontigdepths/main.nf new file mode 100644 index 00000000..4a5869b6 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -0,0 +1,35 @@ +process METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1' : + 'quay.io/biocontainers/metabat2:2.15--h986a166_1' }" + + input: + tuple val(meta), path(bam), path(bai) + + output: + tuple val(meta), path("*.txt.gz"), emit: depth + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + export OMP_NUM_THREADS=$task.cpus + + jgi_summarize_bam_contig_depths \\ + --outputDepth ${prefix}.txt \\ + $args \\ + $bam + + bgzip --threads $task.cpus ${prefix}.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + metabat2: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/metabat2/jgisummarizebamcontigdepths/meta.yml b/modules/metabat2/jgisummarizebamcontigdepths/meta.yml new file mode 100644 index 00000000..351a4701 --- /dev/null +++ b/modules/metabat2/jgisummarizebamcontigdepths/meta.yml @@ -0,0 +1,50 @@ +name: metabat2_jgisummarizebamcontigdepths +description: Depth computation per contig step of metabat2 +keywords: + - sort + - binning + - depth + - bam + - coverage + - de novo assembly +tools: + - metabat2: + description: Metagenome binning + homepage: https://bitbucket.org/berkeleylab/metabat/src/master/ + documentation: https://bitbucket.org/berkeleylab/metabat/src/master/ + tool_dev_url: https://bitbucket.org/berkeleylab/metabat/src/master/ + doi: "10.7717/peerj.7359" + licence: ['BSD-3-clause-LBNL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Sorted BAM file of reads aligned on the assembled contigs + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bam.bai" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - depth: + type: file + description: Text file listing the coverage per contig + pattern: ".txt.gz" + +authors: + - "@maxibor" diff --git a/modules/metabat2/metabat2/main.nf b/modules/metabat2/metabat2/main.nf new file mode 100644 index 00000000..a8af0ae9 --- /dev/null +++ b/modules/metabat2/metabat2/main.nf @@ -0,0 +1,49 @@ +process METABAT2_METABAT2 { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::metabat2=2.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metabat2:2.15--h986a166_1' : + 'quay.io/biocontainers/metabat2:2.15--h986a166_1' }" + + input: + tuple val(meta), path(fasta), path(depth) + + output: + tuple val(meta), path("*.tooShort.fa.gz") , optional:true , emit: tooshort + tuple val(meta), path("*.lowDepth.fa.gz") , optional:true , emit: lowdepth + tuple val(meta), path("*.unbinned.fa.gz") , optional:true , emit: unbinned + tuple val(meta), path("*.tsv.gz") , optional:true , emit: membership + tuple val(meta), path("bins/*.fa.gz") , optional:true , emit: fasta + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def decompress_depth = depth ? "gzip -d -f $depth" : "" + def depth_file = depth ? "-a ${depth.baseName}" : "" + """ + $decompress_depth + + metabat2 \\ + $args \\ + -i $fasta \\ + $depth_file \\ + -t $task.cpus \\ + --saveCls \\ + -o metabat2/${prefix} + + mv metabat2/${prefix} ${prefix}.tsv + mv metabat2 bins + + gzip ${prefix}.tsv + find ./bins/ -name "*.fa" -type f | xargs -t -n 1 bgzip -@ ${task.cpus} + find ./bins/ -name "*[lowDepth,tooShort,unbinned].fa.gz" -type f -exec mv {} . \\; + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + metabat2: \$( metabat2 --help 2>&1 | head -n 2 | tail -n 1| sed 's/.*\\:\\([0-9]*\\.[0-9]*\\).*/\\1/' ) + END_VERSIONS + """ +} diff --git a/modules/metabat2/metabat2/meta.yml b/modules/metabat2/metabat2/meta.yml new file mode 100644 index 00000000..0ec07b02 --- /dev/null +++ b/modules/metabat2/metabat2/meta.yml @@ -0,0 +1,69 @@ +name: metabat2_metabat2 +keywords: + - sort + - binning + - depth + - bam + - coverage + - de novo assembly +tools: + - metabat2: + description: Metagenome binning + homepage: https://bitbucket.org/berkeleylab/metabat/src/master/ + documentation: https://bitbucket.org/berkeleylab/metabat/src/master/ + tool_dev_url: https://bitbucket.org/berkeleylab/metabat/src/master/ + doi: "10.7717/peerj.7359" + licence: ['BSD-3-clause-LBNL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Fasta file of the assembled contigs + pattern: "*.{fa,fas,fasta,fna,fa.gz,fas.gz,fasta.gz,fna.gz}" + - depth: + type: file + description: | + Optional text file listing the coverage per contig pre-generated + by metabat2_jgisummarizebamcontigdepths + pattern: "*.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fasta: + type: file + description: Bins created from assembled contigs in fasta file + pattern: "*.fa.gz" + - tooshort: + type: file + description: Contigs that did not pass length filtering + pattern: "*.tooShort.fa.gz" + - lowdepth: + type: file + description: Contigs that did not have sufficient depth for binning + pattern: "*.lowDepth.fa.gz" + - unbinned: + type: file + description: Contigs that pass length and depth filtering but could not be binned + pattern: "*.unbinned.fa.gz" + - membership: + type: file + description: cluster memberships as a matrix format. + pattern: "*.tsv.gz" + + +authors: + - "@maxibor" + - "@jfy133" diff --git a/modules/metaphlan3/functions.nf b/modules/metaphlan3/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/metaphlan3/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index c5157b66..64965af3 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METAPHLAN3 { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::metaphlan=3.0.12' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/metaphlan:3.0.12--pyhb7b1952_0" - } else { - container "quay.io/biocontainers/metaphlan:3.0.12--pyhb7b1952_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/metaphlan:3.0.12--pyhb7b1952_0' : + 'quay.io/biocontainers/metaphlan:3.0.12--pyhb7b1952_0' }" input: tuple val(meta), path(input) @@ -29,7 +18,8 @@ process METAPHLAN3 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def input_type = ("$input".endsWith(".fastq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" def input_data = ("$input_type".contains("fastq")) && !meta.single_end ? "${input[0]},${input[1]}" : "$input" def bowtie2_out = "$input_type" == "--input_type bowtie2out" || "$input_type" == "--input_type sam" ? '' : "--bowtie2out ${prefix}.bowtie2out.txt" @@ -39,14 +29,14 @@ process METAPHLAN3 { --nproc $task.cpus \\ $input_type \\ $input_data \\ - $options.args \\ + $args \\ $bowtie2_out \\ --bowtie2db ${metaphlan_db} \\ --biom ${prefix}.biom \\ --output_file ${prefix}_profile.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(metaphlan --version 2>&1 | awk '{print \$3}') + "${task.process}": + metaphlan3: \$(metaphlan --version 2>&1 | awk '{print \$3}') END_VERSIONS """ } diff --git a/modules/methyldackel/extract/functions.nf b/modules/methyldackel/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/methyldackel/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/methyldackel/extract/main.nf b/modules/methyldackel/extract/main.nf index 94e4b379..a39c0305 100644 --- a/modules/methyldackel/extract/main.nf +++ b/modules/methyldackel/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METHYLDACKEL_EXTRACT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" - } else { - container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0' : + 'quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,15 +17,16 @@ process METHYLDACKEL_EXTRACT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ MethylDackel extract \\ - $options.args \\ + $args \\ $fasta \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + "${task.process}": + methyldackel: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") END_VERSIONS """ } diff --git a/modules/methyldackel/mbias/functions.nf b/modules/methyldackel/mbias/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/methyldackel/mbias/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/methyldackel/mbias/main.nf b/modules/methyldackel/mbias/main.nf index c8fd2fa2..021f76f1 100644 --- a/modules/methyldackel/mbias/main.nf +++ b/modules/methyldackel/mbias/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process METHYLDACKEL_MBIAS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::methyldackel=0.6.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0" - } else { - container "quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/methyldackel:0.6.0--h22771d5_0' : + 'quay.io/biocontainers/methyldackel:0.6.0--h22771d5_0' }" input: tuple val(meta), path(bam), path(bai) @@ -28,10 +17,11 @@ process METHYLDACKEL_MBIAS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ MethylDackel mbias \\ - $options.args \\ + $args \\ $fasta \\ $bam \\ $prefix \\ @@ -39,8 +29,8 @@ process METHYLDACKEL_MBIAS { > ${prefix}.mbias.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") + "${task.process}": + methyldackel: \$(MethylDackel --version 2>&1 | cut -f1 -d" ") END_VERSIONS """ } diff --git a/modules/minia/functions.nf b/modules/minia/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minia/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minia/main.nf b/modules/minia/main.nf index 140ef9e7..ceff67c5 100644 --- a/modules/minia/main.nf +++ b/modules/minia/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIA { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::minia=3.2.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minia:3.2.4--he513fc3_0" - } else { - container "quay.io/biocontainers/minia:3.2.4--he513fc3_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minia:3.2.4--he513fc3_0' : + 'quay.io/biocontainers/minia:3.2.4--he513fc3_0' }" input: tuple val(meta), path(reads) @@ -28,19 +17,20 @@ process MINIA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def read_list = reads.join(",") """ echo "${read_list}" | sed 's/,/\\n/g' > input_files.txt minia \\ - $options.args \\ + $args \\ -nb-cores $task.cpus \\ -in input_files.txt \\ -out $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(minia --version 2>&1 | grep Minia) | sed 's/^.*Minia version //;') + "${task.process}": + minia: \$(echo \$(minia --version 2>&1 | grep Minia) | sed 's/^.*Minia version //;') END_VERSIONS """ } diff --git a/modules/miniasm/main.nf b/modules/miniasm/main.nf new file mode 100644 index 00000000..b0db6925 --- /dev/null +++ b/modules/miniasm/main.nf @@ -0,0 +1,38 @@ +process MINIASM { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::miniasm=0.3_r179" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/miniasm:0.3_r179--h5bf99c6_2' : + 'quay.io/biocontainers/miniasm:0.3_r179--h5bf99c6_2' }" + + input: + tuple val(meta), path(reads), path(paf) + + output: + tuple val(meta), path("*.gfa.gz") , emit: gfa + tuple val(meta), path("*.fasta.gz"), emit: assembly + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + miniasm \\ + $args \\ + -f $reads \\ + $paf > \\ + ${prefix}.gfa + + awk '/^S/{print ">"\$2"\\n"\$3}' "${prefix}.gfa" | fold > ${prefix}.fasta + + gzip -n ${prefix}.gfa + gzip -n ${prefix}.fasta + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + miniasm: \$( miniasm -V 2>&1 ) + END_VERSIONS + """ +} diff --git a/modules/miniasm/meta.yml b/modules/miniasm/meta.yml new file mode 100644 index 00000000..e8aedb9a --- /dev/null +++ b/modules/miniasm/meta.yml @@ -0,0 +1,51 @@ +name: miniasm +description: A very fast OLC-based de novo assembler for noisy long reads +keywords: + - assembly + - pacbio + - nanopore +tools: + - miniasm: + description: Ultrafast de novo assembly for long noisy reads (though having no consensus step) + homepage: https://github.com/lh3/miniasm + documentation: https://github.com/lh3/miniasm + tool_dev_url: https://github.com/lh3/miniasm + doi: "10.1093/bioinformatics/btw152" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input PacBio/ONT FastQ files. + pattern: "*.{fastq,fastq.gz,fq,fq.gz}" + - paf: + type: file + description: Alignment in PAF format + pattern: "*{.paf,.paf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - gfa: + type: file + description: Assembly graph + pattern: "*.gfa.gz" + - assembly: + type: file + description: Genome assembly + pattern: "*.fasta.gz" + +authors: + - "@avantonder" diff --git a/modules/minimap2/align/functions.nf b/modules/minimap2/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minimap2/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index 215e4fb5..500250e9 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIMAP2_ALIGN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" - } else { - container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' : + 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: tuple val(meta), path(reads) @@ -27,19 +16,20 @@ process MINIMAP2_ALIGN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" """ minimap2 \\ - $options.args \\ + $args \\ -t $task.cpus \\ $reference \\ $input_reads \\ > ${prefix}.paf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + "${task.process}": + minimap2: \$(minimap2 --version 2>&1) END_VERSIONS """ } diff --git a/modules/minimap2/index/functions.nf b/modules/minimap2/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/minimap2/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/minimap2/index/main.nf b/modules/minimap2/index/main.nf index b154a649..10cdd142 100644 --- a/modules/minimap2/index/main.nf +++ b/modules/minimap2/index/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MINIMAP2_INDEX { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:['']) } conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0" - } else { - container "quay.io/biocontainers/minimap2:2.21--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' : + 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: path fasta @@ -25,16 +14,17 @@ process MINIMAP2_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ minimap2 \\ -t $task.cpus \\ -d ${fasta.baseName}.mmi \\ - $options.args \\ + $args \\ $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(minimap2 --version 2>&1) + "${task.process}": + minimap2: \$(minimap2 --version 2>&1) END_VERSIONS """ } diff --git a/modules/mlst/functions.nf b/modules/mlst/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mlst/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mlst/main.nf b/modules/mlst/main.nf index faac9871..b2983b82 100644 --- a/modules/mlst/main.nf +++ b/modules/mlst/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MLST { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mlst=2.19.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mlst:2.19.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/mlst:2.19.0--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mlst:2.19.0--hdfd78af_1' : + 'quay.io/biocontainers/mlst:2.19.0--hdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,7 +15,8 @@ process MLST { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ mlst \\ --threads $task.cpus \\ @@ -34,8 +24,8 @@ process MLST { > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(mlst --version 2>&1) | sed 's/mlst //' ) + "${task.process}": + mlst: \$( echo \$(mlst --version 2>&1) | sed 's/mlst //' ) END_VERSIONS """ diff --git a/modules/mosdepth/functions.nf b/modules/mosdepth/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mosdepth/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mosdepth/main.nf b/modules/mosdepth/main.nf index 8fe3cfee..d2669b7e 100644 --- a/modules/mosdepth/main.nf +++ b/modules/mosdepth/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MOSDEPTH { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::mosdepth=0.3.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mosdepth:0.3.2--h01d7912_0" - } else { - container "quay.io/biocontainers/mosdepth:0.3.2--h01d7912_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mosdepth:0.3.2--h01d7912_0' : + 'quay.io/biocontainers/mosdepth:0.3.2--h01d7912_0' }" input: tuple val(meta), path(bam), path(bai) @@ -34,17 +23,18 @@ process MOSDEPTH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def interval = window_size ? "--by ${window_size}" : "--by ${bed}" """ mosdepth \\ $interval \\ - $options.args \\ + $args \\ $prefix \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(mosdepth --version 2>&1 | sed 's/^.*mosdepth //; s/ .*\$//') + "${task.process}": + mosdepth: \$(mosdepth --version 2>&1 | sed 's/^.*mosdepth //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/msisensor/msi/functions.nf b/modules/msisensor/msi/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/msisensor/msi/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/msisensor/msi/main.nf b/modules/msisensor/msi/main.nf index bd5a0a0e..398b34a6 100644 --- a/modules/msisensor/msi/main.nf +++ b/modules/msisensor/msi/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MSISENSOR_MSI { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::msisensor=0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2" - } else { - container "quay.io/biocontainers/msisensor:0.5--hb3646a4_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2' : + 'quay.io/biocontainers/msisensor:0.5--hb3646a4_2' }" input: tuple val(meta), path(normal_bam), path(normal_bai), path(tumor_bam), path(tumor_bai), val(metascan), path(homopolymers) @@ -29,7 +18,8 @@ process MSISENSOR_MSI { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ msisensor \\ msi \\ @@ -37,11 +27,11 @@ process MSISENSOR_MSI { -n $normal_bam \\ -t $tumor_bam \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + "${task.process}": + msisensor: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') END_VERSIONS """ } diff --git a/modules/msisensor/scan/functions.nf b/modules/msisensor/scan/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/msisensor/scan/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/msisensor/scan/main.nf b/modules/msisensor/scan/main.nf index ebd8785a..223b4f44 100644 --- a/modules/msisensor/scan/main.nf +++ b/modules/msisensor/scan/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MSISENSOR_SCAN { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::msisensor=0.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2" - } else { - container "quay.io/biocontainers/msisensor:0.5--hb3646a4_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/msisensor:0.5--hb3646a4_2' : + 'quay.io/biocontainers/msisensor:0.5--hb3646a4_2' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process MSISENSOR_SCAN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ msisensor \\ scan \\ -d $fasta \\ -o ${prefix}.msisensor_scan.tab \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') + "${task.process}": + msisensor: \$(msisensor 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') END_VERSIONS """ } diff --git a/modules/mtnucratio/functions.nf b/modules/mtnucratio/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/mtnucratio/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/mtnucratio/main.nf b/modules/mtnucratio/main.nf index 28d08a13..83d6ea2b 100644 --- a/modules/mtnucratio/main.nf +++ b/modules/mtnucratio/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MTNUCRATIO { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::mtnucratio=0.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mtnucratio:0.7--hdfd78af_2" - } else { - container "quay.io/biocontainers/mtnucratio:0.7--hdfd78af_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mtnucratio:0.7--hdfd78af_2' : + 'quay.io/biocontainers/mtnucratio:0.7--hdfd78af_2' }" input: tuple val(meta), path(bam) @@ -27,17 +17,18 @@ process MTNUCRATIO { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ mtnucratio \\ - $options.args \\ + $args \\ $bam \\ $mt_id cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(mtnucratio --version 2>&1) | head -n1 | sed 's/Version: //') + "${task.process}": + mtnucratio: \$(echo \$(mtnucratio --version 2>&1) | head -n1 | sed 's/Version: //') END_VERSIONS """ } diff --git a/modules/multiqc/functions.nf b/modules/multiqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/multiqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/multiqc/main.nf b/modules/multiqc/main.nf index 0861aa59..3dceb162 100644 --- a/modules/multiqc/main.nf +++ b/modules/multiqc/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MULTIQC { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::multiqc=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" input: path multiqc_files @@ -27,12 +16,13 @@ process MULTIQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - multiqc -f $options.args . + multiqc -f $args . cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) END_VERSIONS """ } diff --git a/modules/mummer/main.nf b/modules/mummer/main.nf new file mode 100644 index 00000000..39ad3e8b --- /dev/null +++ b/modules/mummer/main.nf @@ -0,0 +1,45 @@ +def VERSION = '3.23' // Version information not provided by tool on CLI + +process MUMMER { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::mummer=3.23" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12' : + 'quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12' }" + + input: + tuple val(meta), path(ref), path(query) + + output: + tuple val(meta), path("*.coords"), emit: coords + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false + def fasta_name_ref = ref.getName().replace(".gz", "") + + def is_compressed_query = query.getName().endsWith(".gz") ? true : false + def fasta_name_query = query.getName().replace(".gz", "") + """ + if [ "$is_compressed_ref" == "true" ]; then + gzip -c -d $ref > $fasta_name_ref + fi + if [ "$is_compressed_query" == "true" ]; then + gzip -c -d $query > $fasta_name_query + fi + mummer \\ + $args \\ + $fasta_name_ref \\ + $fasta_name_query \\ + > ${prefix}.coords + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + mummer: $VERSION + END_VERSIONS + """ +} diff --git a/modules/mummer/meta.yml b/modules/mummer/meta.yml new file mode 100644 index 00000000..5f7a983c --- /dev/null +++ b/modules/mummer/meta.yml @@ -0,0 +1,48 @@ +name: mummer +description: MUMmer is a system for rapidly aligning entire genomes +keywords: + - align + - genome + - fasta +tools: + - mummer: + description: MUMmer is a system for rapidly aligning entire genomes + homepage: http://mummer.sourceforge.net/ + documentation: http://mummer.sourceforge.net/ + tool_dev_url: http://mummer.sourceforge.net/ + doi: https://doi.org/10.1186/gb-2004-5-2-r12 + licence: ['The Artistic License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ref: + type: file + description: FASTA file of the reference sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - query: + type: file + description: FASTA file of the query sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - coords: + type: file + description: File containing coordinates of matches between reference and query sequence + pattern: "*.coords" + +authors: + - "@mjcipriano" + - "@sateeshperi" diff --git a/modules/muscle/functions.nf b/modules/muscle/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/muscle/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/muscle/main.nf b/modules/muscle/main.nf index 6ffb97ac..6d549aaa 100644 --- a/modules/muscle/main.nf +++ b/modules/muscle/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process MUSCLE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::muscle=3.8.1551" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/muscle:3.8.1551--h7d875b9_6" - } else { - container "quay.io/biocontainers/muscle:3.8.1551--h7d875b9_6" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/muscle:3.8.1551--h7d875b9_6' : + 'quay.io/biocontainers/muscle:3.8.1551--h7d875b9_6' }" input: tuple val(meta), path(fasta) @@ -33,18 +22,18 @@ process MUSCLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def fasta_out = options.args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' - def clw_out = options.args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' - def msf_out = options.args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' - def phys_out = options.args.contains('-phys') ? "-physout ${prefix}_muscle_msa.phys" : '' - def phyi_out = options.args.contains('-phyi') ? "-phyiout ${prefix}_muscle_msa.phyi" : '' - def html_out = options.args.contains('-html') ? "-htmlout ${prefix}_muscle_msa.html" : '' - def tree_out = options.args.contains('-maketree') ? "-out ${prefix}_muscle_msa.tree" : '' - + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def fasta_out = args.contains('-fasta') ? "-fastaout ${prefix}_muscle_msa.afa" : '' + def clw_out = args.contains('-clw') ? "-clwout ${prefix}_muscle_msa.clw" : '' + def msf_out = args.contains('-msf') ? "-msfout ${prefix}_muscle_msa.msf" : '' + def phys_out = args.contains('-phys') ? "-physout ${prefix}_muscle_msa.phys" : '' + def phyi_out = args.contains('-phyi') ? "-phyiout ${prefix}_muscle_msa.phyi" : '' + def html_out = args.contains('-html') ? "-htmlout ${prefix}_muscle_msa.html" : '' + def tree_out = args.contains('-maketree') ? "-out ${prefix}_muscle_msa.tree" : '' """ muscle \\ - $options.args \\ + $args \\ -in $fasta \\ $fasta_out \\ $clw_out \\ @@ -55,8 +44,8 @@ process MUSCLE { $tree_out \\ -loga muscle_msa.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(muscle -version | sed 's/^MUSCLE v//; s/by.*\$//') + "${task.process}": + muscle: \$(muscle -version | sed 's/^MUSCLE v//; s/by.*\$//') END_VERSIONS """ } diff --git a/modules/nanolyse/functions.nf b/modules/nanolyse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nanolyse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nanolyse/main.nf b/modules/nanolyse/main.nf index 271592f7..0ad0f799 100644 --- a/modules/nanolyse/main.nf +++ b/modules/nanolyse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NANOLYSE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::nanolyse=1.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nanolyse:1.2.0--py_0" - } else { - container "quay.io/biocontainers/nanolyse:1.2.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nanolyse:1.2.0--py_0' : + 'quay.io/biocontainers/nanolyse:1.2.0--py_0' }" input: tuple val(meta), path(fastq) @@ -28,14 +17,15 @@ process NANOLYSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ gunzip -c $fastq | NanoLyse -r $fasta | gzip > ${prefix}.fastq.gz mv NanoLyse.log ${prefix}.nanolyse.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(NanoLyse --version 2>&1 | sed -e "s/NanoLyse //g") + "${task.process}": + nanolyse: \$(NanoLyse --version 2>&1 | sed -e "s/NanoLyse //g") END_VERSIONS """ } diff --git a/modules/nanoplot/functions.nf b/modules/nanoplot/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/nanoplot/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nanoplot/main.nf b/modules/nanoplot/main.nf index 16e2248c..36577d8a 100644 --- a/modules/nanoplot/main.nf +++ b/modules/nanoplot/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NANOPLOT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::nanoplot=1.38.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nanoplot:1.38.0--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/nanoplot:1.38.0--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nanoplot:1.38.0--pyhdfd78af_0' : + 'quay.io/biocontainers/nanoplot:1.38.0--pyhdfd78af_0' }" input: tuple val(meta), path(ontfile) @@ -29,16 +18,17 @@ process NANOPLOT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def input_file = ("$ontfile".endsWith(".fastq.gz")) ? "--fastq ${ontfile}" : ("$ontfile".endsWith(".txt")) ? "--summary ${ontfile}" : '' """ NanoPlot \\ - $options.args \\ + $args \\ -t $task.cpus \\ $input_file cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//') + "${task.process}": + nanoplot: \$(echo \$(NanoPlot --version 2>&1) | sed 's/^.*NanoPlot //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/ncbigenomedownload/main.nf b/modules/ncbigenomedownload/main.nf new file mode 100644 index 00000000..9897c861 --- /dev/null +++ b/modules/ncbigenomedownload/main.nf @@ -0,0 +1,46 @@ +process NCBIGENOMEDOWNLOAD { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::ncbi-genome-download=0.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ncbi-genome-download:0.3.0--pyh864c0ab_1' : + 'quay.io/biocontainers/ncbi-genome-download:0.3.0--pyh864c0ab_1' }" + + input: + val meta + path accessions + + output: + tuple val(meta), path("*_genomic.gbff.gz") , emit: gbk , optional: true + tuple val(meta), path("*_genomic.fna.gz") , emit: fna , optional: true + tuple val(meta), path("*_rm.out.gz") , emit: rm , optional: true + tuple val(meta), path("*_feature_table.txt.gz") , emit: features, optional: true + tuple val(meta), path("*_genomic.gff.gz") , emit: gff , optional: true + tuple val(meta), path("*_protein.faa.gz") , emit: faa , optional: true + tuple val(meta), path("*_protein.gpff.gz") , emit: gpff , optional: true + tuple val(meta), path("*_wgsmaster.gbff.gz") , emit: wgs_gbk , optional: true + tuple val(meta), path("*_cds_from_genomic.fna.gz"), emit: cds , optional: true + tuple val(meta), path("*_rna.fna.gz") , emit: rna , optional: true + tuple val(meta), path("*_rna_from_genomic.fna.gz"), emit: rna_fna , optional: true + tuple val(meta), path("*_assembly_report.txt") , emit: report , optional: true + tuple val(meta), path("*_assembly_stats.txt") , emit: stats , optional: true + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def accessions_opt = accessions ? "-A ${accessions}" : "" + """ + ncbi-genome-download \\ + $args \\ + $accessions_opt \\ + --output-folder ./ \\ + --flat-output + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ncbigenomedownload: \$( ncbi-genome-download --version ) + END_VERSIONS + """ +} diff --git a/modules/ncbigenomedownload/meta.yml b/modules/ncbigenomedownload/meta.yml new file mode 100644 index 00000000..fd9e0a45 --- /dev/null +++ b/modules/ncbigenomedownload/meta.yml @@ -0,0 +1,91 @@ +name: ncbigenomedownload +description: A tool to quickly download assemblies from NCBI's Assembly database +keywords: + - fasta + - download + - assembly +tools: + - ncbigenomedownload: + description: Download genome files from the NCBI FTP server. + homepage: https://github.com/kblin/ncbi-genome-download + documentation: https://github.com/kblin/ncbi-genome-download + tool_dev_url: https://github.com/kblin/ncbi-genome-download + doi: "" + licence: ['Apache Software License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - accessions: + type: file + description: List of accessions (one per line) to download + pattern: "*.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - gbk: + type: file + description: GenBank format of the genomic sequence(s) in the assembly + pattern: "*_genomic.gbff.gz" + - fna: + type: file + description: FASTA format of the genomic sequence(s) in the assembly. + pattern: "*_genomic.fna.gz" + - rm: + type: file + description: RepeatMasker output for eukaryotes. + pattern: "*_rm.out.gz" + - features: + type: file + description: Tab-delimited text file reporting locations and attributes for a subset of annotated features + pattern: "*_feature_table.txt.gz" + - gff: + type: file + description: Annotation of the genomic sequence(s) in GFF3 format + pattern: "*_genomic.gff.gz" + - faa: + type: file + description: FASTA format of the accessioned protein products annotated on the genome assembly. + pattern: "*_protein.faa.gz" + - gpff: + type: file + description: GenPept format of the accessioned protein products annotated on the genome assembly. + pattern: "*_protein.gpff.gz" + - wgs_gbk: + type: file + description: GenBank flat file format of the WGS master for the assembly + pattern: "*_wgsmaster.gbff.gz" + - cds: + type: file + description: FASTA format of the nucleotide sequences corresponding to all CDS features annotated on the assembly + pattern: "*_cds_from_genomic.fna.gz" + - rna: + type: file + description: FASTA format of accessioned RNA products annotated on the genome assembly + pattern: "*_rna.fna.gz" + - rna_fna: + type: file + description: FASTA format of the nucleotide sequences corresponding to all RNA features annotated on the assembly + pattern: "*_rna_from_genomic.fna.gz" + - report: + type: file + description: Tab-delimited text file reporting the name, role and sequence accession.version for objects in the assembly + pattern: "*_assembly_report.txt" + - stats: + type: file + description: Tab-delimited text file reporting statistics for the assembly + pattern: "*_assembly_stats.txt" + +authors: + - "@rpetit3" diff --git a/modules/nextclade/functions.nf b/modules/nextclade/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/nextclade/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/nextclade/main.nf b/modules/nextclade/main.nf index 6fc6efc4..f60af57b 100755 --- a/modules/nextclade/main.nf +++ b/modules/nextclade/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process NEXTCLADE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::nextclade_js=0.14.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/nextclade_js:0.14.4--h9ee0642_0" - } else { - container "quay.io/biocontainers/nextclade_js:0.14.4--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/nextclade_js:0.14.4--h9ee0642_0' : + 'quay.io/biocontainers/nextclade_js:0.14.4--h9ee0642_0' }" input: tuple val(meta), path(fasta) @@ -30,10 +19,11 @@ process NEXTCLADE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ nextclade \\ - $options.args \\ + $args \\ --jobs $task.cpus \\ --input-fasta $fasta \\ --output-json ${prefix}.json \\ @@ -43,8 +33,8 @@ process NEXTCLADE { --output-tree ${prefix}.tree.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(nextclade --version 2>&1) + "${task.process}": + nextclade: \$(nextclade --version 2>&1) END_VERSIONS """ } diff --git a/modules/ngmaster/main.nf b/modules/ngmaster/main.nf new file mode 100644 index 00000000..7d04031c --- /dev/null +++ b/modules/ngmaster/main.nf @@ -0,0 +1,31 @@ +process NGMASTER { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::ngmaster=0.5.8" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ngmaster:0.5.8--pyhdfd78af_1' : + 'quay.io/biocontainers/ngmaster:0.5.8--pyhdfd78af_1' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + ngmaster \\ + $args \\ + $fasta \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ngmaster: \$( echo \$(ngmaster --version 2>&1) | sed 's/^.*ngmaster //' ) + END_VERSIONS + """ +} diff --git a/modules/ngmaster/meta.yml b/modules/ngmaster/meta.yml new file mode 100644 index 00000000..1dbb02a0 --- /dev/null +++ b/modules/ngmaster/meta.yml @@ -0,0 +1,43 @@ +name: ngmaster +description: Serotyping Neisseria gonorrhoeae assemblies +keywords: + - fasta + - Neisseria gonorrhoeae + - serotype +tools: + - ngmaster: + description: In silico multi-antigen sequence typing for Neisseria gonorrhoeae (NG-MAST) + homepage: https://github.com/MDU-PHL/ngmaster/blob/master/README.md + documentation: https://github.com/MDU-PHL/ngmaster/blob/master/README.md + tool_dev_url: https://github.com/MDU-PHL/ngmaster + doi: "10.1099/mgen.0.000076" + licence: ['GPL v3 only'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA assembly file + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited result file + pattern: "*.tsv" + +authors: + - "@rpetit3" diff --git a/modules/nucmer/main.nf b/modules/nucmer/main.nf new file mode 100644 index 00000000..4e296515 --- /dev/null +++ b/modules/nucmer/main.nf @@ -0,0 +1,45 @@ +process NUCMER { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::mummer=3.23" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mummer:3.23--pl5262h1b792b2_12' : + 'quay.io/biocontainers/mummer:3.23--pl5262h1b792b2_12' }" + + input: + tuple val(meta), path(ref), path(query) + + output: + tuple val(meta), path("*.delta") , emit: delta + tuple val(meta), path("*.coords"), emit: coords + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def is_compressed_ref = ref.getName().endsWith(".gz") ? true : false + def is_compressed_query = query.getName().endsWith(".gz") ? true : false + def fasta_name_ref = ref.getName().replace(".gz", "") + def fasta_name_query = query.getName().replace(".gz", "") + """ + if [ "$is_compressed_ref" == "true" ]; then + gzip -c -d $ref > $fasta_name_ref + fi + if [ "$is_compressed_query" == "true" ]; then + gzip -c -d $query > $fasta_name_query + fi + + nucmer \\ + -p $prefix \\ + --coords \\ + $args \\ + $fasta_name_ref \\ + $fasta_name_query + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + nucmer: \$( nucmer --version 2>&1 | grep "version" | sed -e "s/NUCmer (NUCleotide MUMmer) version //g; s/nucmer//g;" ) + END_VERSIONS + """ +} diff --git a/modules/nucmer/meta.yml b/modules/nucmer/meta.yml new file mode 100644 index 00000000..cccf723f --- /dev/null +++ b/modules/nucmer/meta.yml @@ -0,0 +1,50 @@ +name: nucmer +description: NUCmer is a pipeline for the alignment of multiple closely related nucleotide sequences. +keywords: + - align + - nucleotide +tools: + - nucmer: + description: NUCmer is a pipeline for the alignment of multiple closely related nucleotide sequences. + homepage: http://mummer.sourceforge.net/ + documentation: http://mummer.sourceforge.net/ + tool_dev_url: http://mummer.sourceforge.net/ + doi: "https://doi.org/10.1186/gb-2004-5-2-r12" + licence: ['The Artistic License'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ref: + type: file + description: FASTA file of the reference sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + - query: + type: file + description: FASTA file of the query sequence + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - delta: + type: file + description: File containing coordinates of matches between reference and query + - coords: + type: file + description: NUCmer1.1 coords output file + pattern: "*.{coords}" + +authors: + - "@sateeshperi" + - "@mjcipriano" diff --git a/modules/optitype/functions.nf b/modules/optitype/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/optitype/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 083b03a7..d27f7f9f 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process OPTITYPE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::optitype=1.3.5" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/optitype:1.3.5--0" - } else { - container "quay.io/biocontainers/optitype:1.3.5--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/optitype:1.3.5--0' : + 'quay.io/biocontainers/optitype:1.3.5--0' }" input: tuple val(meta), path(bam) @@ -26,30 +15,32 @@ process OPTITYPE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ - # Create a config for OptiType on a per sample basis with options.args2 + # Create a config for OptiType on a per sample basis with task.ext.args2 #Doing it old school now echo "[mapping]" > config.ini echo "razers3=razers3" >> config.ini echo "threads=$task.cpus" >> config.ini echo "[ilp]" >> config.ini - echo "$options.args2" >> config.ini + echo "$args2" >> config.ini echo "threads=1" >> config.ini echo "[behavior]" >> config.ini echo "deletebam=true" >> config.ini echo "unpaired_weight=0" >> config.ini echo "use_discordant=false" >> config.ini - # Run the actual OptiType typing with options.args - OptiTypePipeline.py -i ${bam} -c config.ini --${meta.seq_type} $options.args --prefix $prefix --outdir $prefix + # Run the actual OptiType typing with args + OptiTypePipeline.py -i ${bam} -c config.ini --${meta.seq_type} $args --prefix $prefix --outdir $prefix #Couldn't find a nicer way of doing this cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g") + "${task.process}": + optitype: \$(cat \$(which OptiTypePipeline.py) | grep -e "Version:" | sed -e "s/Version: //g") END_VERSIONS """ } diff --git a/modules/pairix/functions.nf b/modules/pairix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairix/main.nf b/modules/pairix/main.nf index 4bfd3b0d..c1b9658c 100644 --- a/modules/pairix/main.nf +++ b/modules/pairix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairix=0.3.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairix:0.3.7--py36h30a8e3e_3" - } else { - container "quay.io/biocontainers/pairix:0.3.7--py36h30a8e3e_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairix:0.3.7--py36h30a8e3e_3' : + 'quay.io/biocontainers/pairix:0.3.7--py36h30a8e3e_3' }" input: tuple val(meta), path(pair) @@ -26,14 +15,15 @@ process PAIRIX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ pairix \\ - $options.args \\ + $args \\ $pair cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//') + "${task.process}": + pairix: \$(echo \$(pairix --help 2>&1) | sed 's/^.*Version: //; s/Usage.*\$//') END_VERSIONS """ } diff --git a/modules/pairtools/dedup/functions.nf b/modules/pairtools/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/dedup/main.nf b/modules/pairtools/dedup/main.nf index eabf24dd..fe59e155 100644 --- a/modules/pairtools/dedup/main.nf +++ b/modules/pairtools/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_DEDUP { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -27,17 +16,18 @@ process PAIRTOOLS_DEDUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools dedup \\ - $options.args \\ + $args \\ -o ${prefix}.pairs.gz \\ --output-stats ${prefix}.pairs.stat \\ $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/flip/functions.nf b/modules/pairtools/flip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/flip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/flip/main.nf b/modules/pairtools/flip/main.nf index 50cfdfd2..376191ce 100644 --- a/modules/pairtools/flip/main.nf +++ b/modules/pairtools/flip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_FLIP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(sam) @@ -27,18 +16,19 @@ process PAIRTOOLS_FLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ flip \\ -c $chromsizes \\ - $options.args \\ + $args \\ -o ${prefix}.flip.gz \\ $sam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/parse/functions.nf b/modules/pairtools/parse/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/parse/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/parse/main.nf b/modules/pairtools/parse/main.nf index cd6099e1..7bd778c9 100644 --- a/modules/pairtools/parse/main.nf +++ b/modules/pairtools/parse/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_PARSE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(bam) @@ -28,19 +17,20 @@ process PAIRTOOLS_PARSE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ parse \\ -c $chromsizes \\ - $options.args \\ + $args \\ --output-stats ${prefix}.pairsam.stat \\ -o ${prefix}.pairsam.gz \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/restrict/functions.nf b/modules/pairtools/restrict/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/restrict/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/restrict/main.nf b/modules/pairtools/restrict/main.nf index b1b21da7..8759f709 100644 --- a/modules/pairtools/restrict/main.nf +++ b/modules/pairtools/restrict/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_RESTRICT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(pairs) @@ -27,18 +16,19 @@ process PAIRTOOLS_RESTRICT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools \\ restrict \\ -f $frag \\ - $options.args \\ + $args \\ -o ${prefix}.pairs.gz \\ $pairs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/select/functions.nf b/modules/pairtools/select/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/select/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/select/main.nf b/modules/pairtools/select/main.nf index dec29573..a6d62ba7 100644 --- a/modules/pairtools/select/main.nf +++ b/modules/pairtools/select/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_SELECT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -27,17 +16,18 @@ process PAIRTOOLS_SELECT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pairtools select \\ - "$options.args" \\ + "$args" \\ -o ${prefix}.selected.pairs.gz \\ --output-rest ${prefix}.unselected.pairs.gz \\ ${input} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pairtools/sort/functions.nf b/modules/pairtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pairtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pairtools/sort/main.nf b/modules/pairtools/sort/main.nf index 996bcb0b..d5996dd0 100644 --- a/modules/pairtools/sort/main.nf +++ b/modules/pairtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PAIRTOOLS_SORT { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pairtools=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5" - } else { - container "quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pairtools:0.3.0--py37hb9c2fc3_5' : + 'quay.io/biocontainers/pairtools:0.3.0--py37hb9c2fc3_5' }" input: tuple val(meta), path(input) @@ -26,20 +15,21 @@ process PAIRTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def mem = task.memory.toString().replaceAll(/(\s|\.|B)+/, '') """ pairtools \\ sort \\ - $options.args \\ + $args \\ --nproc $task.cpus \\ --memory "$mem" \\ -o ${prefix}.pairs.gz \\ $input cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') + "${task.process}": + pairtools: \$(pairtools --version 2>&1 | sed 's/pairtools.*version //') END_VERSIONS """ } diff --git a/modules/pangolin/functions.nf b/modules/pangolin/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pangolin/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pangolin/main.nf b/modules/pangolin/main.nf index edf67dd7..5ee2b2e0 100644 --- a/modules/pangolin/main.nf +++ b/modules/pangolin/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PANGOLIN { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::pangolin=3.1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/pangolin:3.1.11--pyhdfd78af_1' - } else { - container 'quay.io/biocontainers/pangolin:3.1.11--pyhdfd78af_1' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pangolin:3.1.11--pyhdfd78af_1' : + 'quay.io/biocontainers/pangolin:3.1.11--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process PANGOLIN { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pangolin \\ $fasta\\ --outfile ${prefix}.pangolin.csv \\ --threads $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pangolin --version | sed "s/pangolin //g") + "${task.process}": + pangolin: \$(pangolin --version | sed "s/pangolin //g") END_VERSIONS """ } diff --git a/modules/paraclu/functions.nf b/modules/paraclu/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/paraclu/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/paraclu/main.nf b/modules/paraclu/main.nf index 6d65a784..1623ea89 100644 --- a/modules/paraclu/main.nf +++ b/modules/paraclu/main.nf @@ -1,22 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) +def VERSION = '10' // Version information not provided by tool on CLI process PARACLU { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::paraclu=10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1" - } else { - container "quay.io/biocontainers/paraclu:10--h9a82719_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1' : + 'quay.io/biocontainers/paraclu:10--h9a82719_1' }" input: tuple val(meta), path(bed) @@ -27,8 +18,8 @@ process PARACLU { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def VERSION=10 + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ awk -F "\t" '{print\$1"\t"\$6"\t"\$2"\t"\$5}' < $bed > ${bed}_4P @@ -38,8 +29,8 @@ process PARACLU { awk -F '\t' '{print \$1"\t"\$3"\t"\$4"\t"\$1":"\$3".."\$4","\$2"\t"\$6"\t"\$2}' ${prefix}.clustered.simplified > ${prefix}.clustered.simplified.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: $VERSION + "${task.process}": + paraclu: $VERSION END_VERSIONS """ } diff --git a/modules/pbbam/pbmerge/functions.nf b/modules/pbbam/pbmerge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pbbam/pbmerge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pbbam/pbmerge/main.nf b/modules/pbbam/pbmerge/main.nf index 63cd2ffe..e0525cb1 100644 --- a/modules/pbbam/pbmerge/main.nf +++ b/modules/pbbam/pbmerge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PBBAM_PBMERGE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pbbam=1.7.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pbbam:1.7.0--h058f120_1" - } else { - container "quay.io/biocontainers/pbbam:1.7.0--h058f120_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pbbam:1.7.0--h058f120_1' : + 'quay.io/biocontainers/pbbam:1.7.0--h058f120_1' }" input: tuple val(meta), path(bam) @@ -27,16 +16,17 @@ process PBBAM_PBMERGE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pbmerge \\ -o ${prefix}.bam \\ - $options.args \\ + $args \\ *.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - pbbam/pbmerge: \$( pbmerge --version|sed 's/pbmerge //' ) + "${task.process}": + pbbam: \$( pbmerge --version|sed 's/pbmerge //' ) END_VERSIONS """ } diff --git a/modules/pbccs/functions.nf b/modules/pbccs/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pbccs/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pbccs/main.nf b/modules/pbccs/main.nf index 7e70ac14..440fbc72 100644 --- a/modules/pbccs/main.nf +++ b/modules/pbccs/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PBCCS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pbccs=6.2.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pbccs:6.2.0--h9ee0642_0" - } else { - container "quay.io/biocontainers/pbccs:6.2.0--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pbccs:6.2.0--h9ee0642_0' : + 'quay.io/biocontainers/pbccs:6.2.0--h9ee0642_0' }" input: tuple val(meta), path(bam), path(pbi) @@ -32,21 +21,22 @@ process PBCCS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ ccs \\ $bam \\ ${prefix}.chunk${chunk_num}.bam \\ - --report-file ${prefix}.report.txt \\ - --report-json ${prefix}.report.json \\ - --metrics-json ${prefix}.metrics.json.gz \\ + --report-file ${prefix}.chunk${chunk_num}.report.txt \\ + --report-json ${prefix}.chunk${chunk_num}.report.json \\ + --metrics-json ${prefix}.chunk${chunk_num}.metrics.json.gz \\ --chunk $chunk_num/$chunk_on \\ -j $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(ccs --version 2>&1) | grep 'ccs' | sed 's/^.*ccs //; s/ .*\$//') + "${task.process}": + pbccs: \$(echo \$(ccs --version 2>&1) | grep 'ccs' | sed 's/^.*ccs //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/pbccs/meta.yml b/modules/pbccs/meta.yml index 38f31496..f55c0d71 100644 --- a/modules/pbccs/meta.yml +++ b/modules/pbccs/meta.yml @@ -42,10 +42,26 @@ output: type: file description: File containing software versions pattern: "versions.yml" - - css: + - bam: type: file - description: Consensus sequences - pattern: "*.ccs.bam" + description: CCS sequences in bam format + pattern: "*.bam" + - pbi: + type: file + description: PacBio Index of CCS sequences + pattern: "*.pbi" + - report_txt: + type: file + description: Summary of CCS in txt format + pattern: ".txt" + - report_json: + type: file + description: Summary of CCS in txt json + pattern: ".json" + - metrics: + type: file + description: Metrics about zmws + pattern: "*.json.gz" authors: - "@sguizard" diff --git a/modules/peddy/main.nf b/modules/peddy/main.nf new file mode 100644 index 00000000..d64c3762 --- /dev/null +++ b/modules/peddy/main.nf @@ -0,0 +1,37 @@ +process PEDDY { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::peddy=0.4.8" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/peddy:0.4.8--pyh5e36f6f_0' : + 'quay.io/biocontainers/peddy:0.4.8--pyh5e36f6f_0' }" + + input: + tuple val(meta), path(vcf), path(vcf_tbi) + path ped + + output: + tuple val(meta), path("*.html") , emit: html + tuple val(meta), path("*.csv") , emit: csv + tuple val(meta), path("*.peddy.ped"), emit: ped + tuple val(meta), path("*.png") , emit: png + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + peddy \\ + $args \\ + --plot \\ + -p $task.cpus \\ + $vcf \\ + $ped + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + peddy: \$( peddy --version 2>&1 | sed 's/peddy, version //' ) + END_VERSIONS + """ +} diff --git a/modules/peddy/meta.yml b/modules/peddy/meta.yml new file mode 100644 index 00000000..7c3fcf45 --- /dev/null +++ b/modules/peddy/meta.yml @@ -0,0 +1,64 @@ +name: peddy +description: Manipulation, validation and exploration of pedigrees +keywords: + - pedigrees + - ped + - family + +tools: + - peddy: + description: genotype, ped correspondence check, ancestry check, sex check. directly, quickly on VCF + homepage: https://github.com/brentp/peddy + documentation: https://peddy.readthedocs.io/en/latest/ + tool_dev_url: https://github.com/brentp/peddy + doi: "https://doi.org/10.1016/j.ajhg.2017.01.017" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF file + pattern: "*.{vcf.gz}" + - ped: + type: file + description: PED/FAM file + pattern: "*.{ped,fam}" + - vcf_tbi: + type: file + description: TBI file + pattern: "*.{vcf.gz.tbi}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ped: + type: file + description: PED/FAM file + pattern: "*.peddy.{ped}" + - html: + type: file + description: HTML file + pattern: "*.{html}" + - csv: + type: file + description: CSV file + pattern: "*.{csv}" + - png: + type: file + description: PNG file + pattern: "*.{png}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@rannick" diff --git a/modules/phantompeakqualtools/functions.nf b/modules/phantompeakqualtools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/phantompeakqualtools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index b390bf7e..6fe34cc2 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.2.2' +def VERSION = '1.2.2' // Version information not provided by tool on CLI process PHANTOMPEAKQUALTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::phantompeakqualtools=1.2.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/phantompeakqualtools:1.2.2--0" - } else { - container "quay.io/biocontainers/phantompeakqualtools:1.2.2--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/phantompeakqualtools:1.2.2--0' : + 'quay.io/biocontainers/phantompeakqualtools:1.2.2--0' }" input: tuple val(meta), path(bam) @@ -30,13 +19,15 @@ process PHANTOMPEAKQUALTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ RUN_SPP=`which run_spp.R` Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + phantompeakqualtools: $VERSION END_VERSIONS """ } diff --git a/modules/phyloflash/main.nf b/modules/phyloflash/main.nf new file mode 100644 index 00000000..9ebc40de --- /dev/null +++ b/modules/phyloflash/main.nf @@ -0,0 +1,72 @@ +process PHYLOFLASH { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::phyloflash=3.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/phyloflash:3.4--hdfd78af_1' : + 'quay.io/biocontainers/phyloflash:3.4--hdfd78af_1' }" + + input: + tuple val(meta), path(reads) + path silva_db + path univec_db + + output: + tuple val(meta), path("${meta.id}*/*"), emit: results + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if (meta.single_end) { + """ + phyloFlash.pl \\ + $args \\ + -read1 ${reads[0]} \\ + -lib $prefix \\ + -interleaved \\ + -dbhome . \\ + -CPUs $task.cpus + + mkdir $prefix + mv ${prefix}.* $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ + } else { + """ + phyloFlash.pl \\ + $args \\ + -read1 ${reads[0]} \\ + -read2 ${reads[1]} \\ + -lib $prefix \\ + -dbhome . \\ + -CPUs $task.cpus + + mkdir $prefix + mv ${prefix}.* $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ + } + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + mkdir ${prefix} + touch ${prefix}/${prefix}.SSU.collection.fasta + touch ${prefix}/${prefix}.phyloFlash + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + phyloflash: \$(echo \$(phyloFlash.pl -version 2>&1) | sed "s/^.*phyloFlash v//") + END_VERSIONS + """ +} diff --git a/modules/phyloflash/meta.yml b/modules/phyloflash/meta.yml new file mode 100644 index 00000000..3ed7a9fa --- /dev/null +++ b/modules/phyloflash/meta.yml @@ -0,0 +1,51 @@ +name: phyloflash +description: phyloFlash is a pipeline to rapidly reconstruct the SSU rRNAs and explore phylogenetic composition of an illumina (meta)genomic dataset. +keywords: + - metagenomics + - illumina datasets + - phylogenetic composition +tools: + - phyloflash: + description: phyloFlash is a pipeline to rapidly reconstruct the SSU rRNAs and explore phylogenetic composition of an illumina (meta)genomic dataset. + + homepage: https://hrgv.github.io/phyloFlash/ + documentation: https://hrgv.github.io/phyloFlash/usage.html + tool_dev_url: https://github.com/HRGV/phyloFlash + doi: "10.1128/mSystems.00920-20" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: Channel containing single or paired-end reads + pattern: "*.{fastq.gz,fq.gz}" + - sliva_db: + type: folder + description: Folder containing the SILVA database + pattern: "ref" + - univec_db: + type: folder + description: Folder containing UniVec database + pattern: "UniVec" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - results: + type: folder + description: Folder containing the results of phyloFlash analysis + pattern: "${prefix}*" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@abhi18av" diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf new file mode 100644 index 00000000..3705b8fb --- /dev/null +++ b/modules/picard/collecthsmetrics/main.nf @@ -0,0 +1,48 @@ +process PICARD_COLLECTHSMETRICS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::picard=2.26.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.26.2--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.2--hdfd78af_0' }" + + input: + tuple val(meta), path(bam) + path fasta + path fai + path bait_intervals + path target_intervals + + output: + tuple val(meta), path("*collecthsmetrics.txt"), emit: hs_metrics + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def reference = fasta ? "-R $fasta" : "" + + def avail_mem = 3 + if (!task.memory) { + log.info '[Picard CollectHsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + picard \\ + -Xmx${avail_mem}g \\ + CollectHsMetrics \\ + $args \\ + $reference \\ + -BAIT_INTERVALS $bait_intervals \\ + -TARGET_INTERVALS $target_intervals \\ + -INPUT $bam \\ + -OUTPUT ${prefix}_collecthsmetrics.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ +} diff --git a/modules/picard/collecthsmetrics/meta.yml b/modules/picard/collecthsmetrics/meta.yml new file mode 100644 index 00000000..4b94909f --- /dev/null +++ b/modules/picard/collecthsmetrics/meta.yml @@ -0,0 +1,66 @@ +name: picard_collecthsmetrics +description: Collects hybrid-selection (HS) metrics for a SAM or BAM file. +keywords: + - alignment + - metrics + - statistics + - insert + - hybrid-selection + - quality + - bam +tools: + - picard: + description: | + A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) + data and formats such as SAM/BAM/CRAM and VCF. + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/ + tool_dev_url: https://github.com/broadinstitute/picard/ + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: An aligned BAM/SAM file + pattern: "*.{bam,sam}" + - fasta: + type: file + description: | + A reference file to calculate dropout metrics measuring reduced representation of reads. + Optional input. + pattern: "*.fasta" + - fai: + type: file + description: Index of FASTA file. Only needed when fasta is supplied. + pattern: "*.fai" + - bait_intervals: + type: file + description: An interval list file that contains the locations of the baits used. + pattern: "baits.interval_list" + - target_intervals: + type: file + description: An interval list file that contains the locations of the targets. + pattern: "targets.interval_list" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - hs_metrics: + type: file + description: The metrics file. + pattern: "*_collecthsmetrics.txt" + +authors: + - "@projectoriented" diff --git a/modules/picard/collectmultiplemetrics/functions.nf b/modules/picard/collectmultiplemetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collectmultiplemetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index dd8fdaca..6b292534 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTMULTIPLEMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process PICARD_COLLECTMULTIPLEMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectMultipleMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -39,14 +29,14 @@ process PICARD_COLLECTMULTIPLEMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectMultipleMetrics \\ - $options.args \\ + $args \\ INPUT=$bam \\ OUTPUT=${prefix}.CollectMultipleMetrics \\ REFERENCE_SEQUENCE=$fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/collectwgsmetrics/functions.nf b/modules/picard/collectwgsmetrics/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/collectwgsmetrics/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index 6028feef..eddb4604 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_COLLECTWGSMETRICS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -27,7 +16,8 @@ process PICARD_COLLECTWGSMETRICS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard CollectWgsMetrics] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -38,14 +28,14 @@ process PICARD_COLLECTWGSMETRICS { picard \\ -Xmx${avail_mem}g \\ CollectWgsMetrics \\ - $options.args \\ + $args \\ INPUT=$bam \\ OUTPUT=${prefix}.CollectWgsMetrics.coverage_metrics \\ REFERENCE_SEQUENCE=$fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard CollectWgsMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard CollectWgsMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/filtersamreads/functions.nf b/modules/picard/filtersamreads/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/filtersamreads/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index 68cee34d..d8de137b 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_FILTERSAMREADS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam), path(readlist) @@ -27,7 +16,8 @@ process PICARD_FILTERSAMREADS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard FilterSamReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -42,11 +32,11 @@ process PICARD_FILTERSAMREADS { --INPUT $bam \\ --OUTPUT ${prefix}.bam \\ --FILTER $filter \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } else if ( filter == 'includeReadList' || filter == 'excludeReadList' ) { @@ -58,11 +48,11 @@ process PICARD_FILTERSAMREADS { --OUTPUT ${prefix}.bam \\ --FILTER $filter \\ --READ_LIST_FILE $readlist \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard FilterSamReads --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/markduplicates/functions.nf b/modules/picard/markduplicates/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/markduplicates/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 37b825d7..d3bf6938 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_MARKDUPLICATES { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process PICARD_MARKDUPLICATES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -39,14 +29,14 @@ process PICARD_MARKDUPLICATES { picard \\ -Xmx${avail_mem}g \\ MarkDuplicates \\ - $options.args \\ - -I $bam \\ - -O ${prefix}.bam \\ - -M ${prefix}.MarkDuplicates.metrics.txt + $args \\ + I=$bam \\ + O=${prefix}.bam \\ + M=${prefix}.MarkDuplicates.metrics.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/mergesamfiles/functions.nf b/modules/picard/mergesamfiles/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/mergesamfiles/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 355c0bf3..86796593 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_MERGESAMFILES { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bams) @@ -26,7 +15,8 @@ process PICARD_MERGESAMFILES { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def bam_files = bams.sort() def avail_mem = 3 if (!task.memory) { @@ -39,20 +29,20 @@ process PICARD_MERGESAMFILES { picard \\ -Xmx${avail_mem}g \\ MergeSamFiles \\ - $options.args \\ + $args \\ ${'INPUT='+bam_files.join(' INPUT=')} \\ OUTPUT=${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } else { """ ln -s ${bam_files[0]} ${prefix}.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/picard/sortsam/functions.nf b/modules/picard/sortsam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/picard/sortsam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index 939df1c0..eb3caf40 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -1,23 +1,11 @@ - -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PICARD_SORTSAM { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::picard=2.25.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/picard:2.25.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.25.7--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.25.7--hdfd78af_0' }" input: tuple val(meta), path(bam) @@ -28,7 +16,8 @@ process PICARD_SORTSAM { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 3 if (!task.memory) { log.info '[Picard SortSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -44,8 +33,8 @@ process PICARD_SORTSAM { --SORT_ORDER $sort_order cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(picard SortSam --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + "${task.process}": + picard: \$(picard SortSam --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ } diff --git a/modules/pirate/functions.nf b/modules/pirate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pirate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pirate/main.nf b/modules/pirate/main.nf index 01a950dd..70de52e6 100644 --- a/modules/pirate/main.nf +++ b/modules/pirate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PIRATE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pirate=1.0.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pirate%3A1.0.4--hdfd78af_1" - } else { - container "quay.io/biocontainers/pirate:1.0.4--hdfd78af_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pirate%3A1.0.4--hdfd78af_1' : + 'quay.io/biocontainers/pirate:1.0.4--hdfd78af_1' }" input: tuple val(meta), path(gff) @@ -27,17 +16,18 @@ process PIRATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ PIRATE \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --input ./ \\ --output results/ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( PIRATE --version 2>&1) | sed 's/PIRATE //' ) + "${task.process}": + pirate: \$( echo \$( PIRATE --version 2>&1) | sed 's/PIRATE //' ) END_VERSIONS """ } diff --git a/modules/plasmidid/functions.nf b/modules/plasmidid/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plasmidid/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plasmidid/main.nf b/modules/plasmidid/main.nf index 1edc5eeb..7404a678 100644 --- a/modules/plasmidid/main.nf +++ b/modules/plasmidid/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLASMIDID { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::plasmidid=1.6.5' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/plasmidid:1.6.5--hdfd78af_0' - } else { - container 'quay.io/biocontainers/plasmidid:1.6.5--hdfd78af_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plasmidid:1.6.5--hdfd78af_0' : + 'quay.io/biocontainers/plasmidid:1.6.5--hdfd78af_0' }" input: tuple val(meta), path(scaffold) @@ -34,19 +23,20 @@ process PLASMIDID { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ plasmidID \\ -d $fasta \\ -s $prefix \\ -c $scaffold \\ - $options.args \\ + $args \\ -o . mv NO_GROUP/$prefix ./$prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plasmidID --version 2>&1)) + "${task.process}": + plasmidid: \$(echo \$(plasmidID --version 2>&1)) END_VERSIONS """ } diff --git a/modules/plink/extract/main.nf b/modules/plink/extract/main.nf new file mode 100644 index 00000000..9b8a52f3 --- /dev/null +++ b/modules/plink/extract/main.nf @@ -0,0 +1,37 @@ +process PLINK_EXTRACT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1' : + 'quay.io/biocontainers/plink:1.90b6.21--h779adbc_1' }" + + input: + tuple val(meta), path(bed), path(bim), path(fam), path(variants) + + output: + tuple val(meta), path("*.bed"), emit: bed + tuple val(meta), path("*.bim"), emit: bim + tuple val(meta), path("*.fam"), emit: fam + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if( "$bed" == "${prefix}.bed" ) error "Input and output names are the same, use the suffix option to disambiguate" + """ + plink \\ + --bfile ${meta.id} \\ + $args \\ + --extract $variants \\ + --threads $task.cpus \\ + --make-bed \\ + --out $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + plink: \$(echo \$(plink --version) | sed 's/^PLINK v//;s/64.*//') + END_VERSIONS + """ +} diff --git a/modules/plink/extract/meta.yml b/modules/plink/extract/meta.yml new file mode 100644 index 00000000..3978fbb4 --- /dev/null +++ b/modules/plink/extract/meta.yml @@ -0,0 +1,62 @@ +name: plink_extract +description: Subset plink bfiles with a text file of variant identifiers +keywords: + - extract + - plink +tools: + - plink: + description: Whole genome association analysis toolset, designed to perform a range of basic, large-scale analyses in a computationally efficient manner. + homepage: None + documentation: None + tool_dev_url: None + doi: "" + licence: ['GPL'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: PLINK binary biallelic genotype table + pattern: "*.{bed}" + - bim: + type: file + description: PLINK extended MAP file + pattern: "*.{bim}" + - fam: + type: file + description: PLINK sample information file + pattern: "*.{fam}" + - variants: + type: file + description: A text file containing variant identifiers to keep (one per line) + pattern: "*.{keep}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bed: + type: file + description: PLINK binary biallelic genotype table + pattern: "*.{bed}" + - bim: + type: file + description: PLINK extended MAP file + pattern: "*.{bim}" + - fam: + type: file + description: PLINK sample information file + pattern: "*.{fam}" + +authors: + - "@nebfield" diff --git a/modules/plink/vcf/functions.nf b/modules/plink/vcf/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/plink/vcf/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/plink/vcf/main.nf b/modules/plink/vcf/main.nf index a676b723..719e90d2 100644 --- a/modules/plink/vcf/main.nf +++ b/modules/plink/vcf/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PLINK_VCF { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::plink=1.90b6.21" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1" - } else { - container "quay.io/biocontainers/plink:1.90b6.21--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink:1.90b6.21--h779adbc_1' : + 'quay.io/biocontainers/plink:1.90b6.21--h779adbc_1' }" input: tuple val(meta), path(vcf) @@ -29,18 +18,19 @@ process PLINK_VCF { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ plink \\ --vcf ${vcf} \\ - $options.args \\ + $args \\ --threads $task.cpus \\ --out ${prefix} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) + "${task.process}": + plink: \$(echo \$(plink --version 2>&1) | sed 's/^PLINK v//' | sed 's/..-bit.*//' ) END_VERSIONS """ } diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf new file mode 100644 index 00000000..078ece1e --- /dev/null +++ b/modules/plink2/vcf/main.nf @@ -0,0 +1,33 @@ +process PLINK2_VCF { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::plink2=2.00a2.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1' : + 'quay.io/biocontainers/plink2:2.00a2.3--h712d239_1' }" + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.pgen"), emit: pgen + tuple val(meta), path("*.psam"), emit: psam + tuple val(meta), path("*.pvar"), emit: pvar + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + plink2 \\ + $args \\ + --vcf $vcf \\ + --out ${prefix} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + plink2: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) + END_VERSIONS + """ +} diff --git a/modules/plink2/vcf/meta.yml b/modules/plink2/vcf/meta.yml new file mode 100644 index 00000000..1b2f3a9b --- /dev/null +++ b/modules/plink2/vcf/meta.yml @@ -0,0 +1,52 @@ +name: plink2_vcf +description: Import variant genetic data using plink2 +keywords: + - plink2 + - import +tools: + - plink2: + description: | + Whole genome association analysis toolset, designed to perform a range + of basic, large-scale analyses in a computationally efficient manner + homepage: http://www.cog-genomics.org/plink/2.0/ + documentation: http://www.cog-genomics.org/plink/2.0/general_usage + tool_dev_url: None + doi: "" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Variant calling file (vcf) + pattern: "*.{vcf}, *.{vcf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - pgen: + type: file + description: PLINK 2 binary genotype table + pattern: "*.{pgen}" + - psam: + type: file + description: PLINK 2 sample information file + pattern: "*.{psam}" + - pvar: + type: file + description: PLINK 2 variant information file + pattern: "*.{psam}" + +authors: + - "@nebfield" diff --git a/modules/pmdtools/filter/main.nf b/modules/pmdtools/filter/main.nf new file mode 100644 index 00000000..0b3bcbc6 --- /dev/null +++ b/modules/pmdtools/filter/main.nf @@ -0,0 +1,52 @@ +process PMDTOOLS_FILTER { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::pmdtools=0.60" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pmdtools:0.60--hdfd78af_5' : + 'quay.io/biocontainers/pmdtools:0.60--hdfd78af_5' }" + + input: + tuple val(meta), path(bam), path (bai) + val(threshold) + path(reference) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def split_cpus = Math.floor(task.cpus/2) + def prefix = task.ext.prefix ?: "${meta.id}" + if ("$bam" == "${prefix}.bam") error "[pmdtools/filter] Input and output names are the same, use the suffix option to disambiguate!" + //threshold and header flags activate filtering function of pmdtools + """ + samtools \\ + calmd \\ + $bam \\ + $reference \\ + $args \\ + -@ ${split_cpus} \\ + | pmdtools \\ + --threshold $threshold \\ + --header \\ + $args2 \\ + | samtools \\ + view \\ + $args3 \\ + -Sb \\ + - \\ + -@ ${split_cpus} \\ + -o ${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + pmdtools: \$( pmdtools --version | cut -f2 -d ' ' | sed 's/v//') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/pmdtools/filter/meta.yml b/modules/pmdtools/filter/meta.yml new file mode 100644 index 00000000..72abbfdc --- /dev/null +++ b/modules/pmdtools/filter/meta.yml @@ -0,0 +1,55 @@ +name: pmdtools_filter +description: pmdtools command to filter ancient DNA molecules from others +keywords: + - pmdtools + - aDNA + - filter + - damage +tools: + - pmdtools: + description: Compute postmortem damage patterns and decontaminate ancient genomes + homepage: https://github.com/pontussk/PMDtools + documentation: https://github.com/pontussk/PMDtools + tool_dev_url: https://github.com/pontussk/PMDtools + doi: "10.1073/pnas.1318934111" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + - bai: + type: file + description: BAM index file + pattern: "*.bai" + - threshold: + type: value + description: Post-mortem damage score threshold + - reference: + type: file + description: FASTA file + pattern: "*.{fa,fasta}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Filtered BAM file + pattern: "*.bam" + +authors: + - "@alexandregilardet" diff --git a/modules/porechop/functions.nf b/modules/porechop/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/porechop/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/porechop/main.nf b/modules/porechop/main.nf index cf564938..249efad9 100644 --- a/modules/porechop/main.nf +++ b/modules/porechop/main.nf @@ -1,42 +1,32 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PORECHOP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::porechop=0.2.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2" - } else { - container "quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2' : + 'quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2' }" input: tuple val(meta), path(reads) output: - tuple val(meta), path("*.fastq.gz") , emit: reads - path "versions.yml" , emit: versions + tuple val(meta), path("*.fastq.gz"), emit: reads + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ porechop \\ - -i ${reads} \\ - -t ${task.cpus} \\ - ${options.args} \\ + -i $reads \\ + -t $task.cpus \\ + $args \\ -o ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( porechop --version ) + "${task.process}": + porechop: \$( porechop --version ) END_VERSIONS """ } diff --git a/modules/preseq/lcextrap/functions.nf b/modules/preseq/lcextrap/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/preseq/lcextrap/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index f551a549..b5bd0620 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PRESEQ_LCEXTRAP { tag "$meta.id" label 'process_medium' label 'error_ignore' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::preseq=3.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/preseq:3.1.2--h06ef8b0_1" - } else { - container "quay.io/biocontainers/preseq:3.1.2--h06ef8b0_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/preseq:3.1.2--h06ef8b0_1' : + 'quay.io/biocontainers/preseq:3.1.2--h06ef8b0_1' }" input: tuple val(meta), path(bam) @@ -28,20 +17,21 @@ process PRESEQ_LCEXTRAP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' """ preseq \\ lc_extrap \\ - $options.args \\ + $args \\ $paired_end \\ -output ${prefix}.ccurve.txt \\ $bam cp .command.err ${prefix}.command.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') + "${task.process}": + preseq: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') END_VERSIONS """ } diff --git a/modules/prodigal/functions.nf b/modules/prodigal/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/prodigal/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/prodigal/main.nf b/modules/prodigal/main.nf index 572ffe92..184b17bb 100644 --- a/modules/prodigal/main.nf +++ b/modules/prodigal/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PRODIGAL { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::prodigal=2.6.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/prodigal:2.6.3--h516909a_2" - } else { - container "quay.io/biocontainers/prodigal:2.6.3--h516909a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/prodigal:2.6.3--h516909a_2' : + 'quay.io/biocontainers/prodigal:2.6.3--h516909a_2' }" input: tuple val(meta), path(genome) @@ -30,10 +19,11 @@ process PRODIGAL { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" """ prodigal -i "${genome}" \\ - $options.args \\ + $args \\ -f $output_format \\ -d "${prefix}.fna" \\ -o "${prefix}.${output_format}" \\ @@ -41,8 +31,8 @@ process PRODIGAL { -s "${prefix}_all.txt" cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(prodigal -v 2>&1 | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p') + "${task.process}": + prodigal: \$(prodigal -v 2>&1 | sed -n 's/Prodigal V\\(.*\\):.*/\\1/p') END_VERSIONS """ } diff --git a/modules/prokka/functions.nf b/modules/prokka/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/prokka/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/prokka/main.nf b/modules/prokka/main.nf index fb86078c..551a17b9 100644 --- a/modules/prokka/main.nf +++ b/modules/prokka/main.nf @@ -1,21 +1,11 @@ -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PROKKA { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::prokka=1.14.6" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/prokka:1.14.6--pl526_0" - } else { - container "quay.io/biocontainers/prokka:1.14.6--pl526_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/prokka:1.14.6--pl526_0' : + 'quay.io/biocontainers/prokka:1.14.6--pl526_0' }" input: tuple val(meta), path(fasta) @@ -38,12 +28,13 @@ process PROKKA { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def proteins_opt = proteins ? "--proteins ${proteins[0]}" : "" def prodigal_opt = prodigal_tf ? "--prodigaltf ${prodigal_tf[0]}" : "" """ prokka \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --prefix $prefix \\ $proteins_opt \\ @@ -51,8 +42,8 @@ process PROKKA { $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(prokka --version 2>&1) | sed 's/^.*prokka //') + "${task.process}": + prokka: \$(echo \$(prokka --version 2>&1) | sed 's/^.*prokka //') END_VERSIONS """ } diff --git a/modules/pycoqc/functions.nf b/modules/pycoqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pycoqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pycoqc/main.nf b/modules/pycoqc/main.nf index 2c263d61..e966b31c 100644 --- a/modules/pycoqc/main.nf +++ b/modules/pycoqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYCOQC { tag "$summary" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::pycoqc=2.5.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pycoqc:2.5.2--py_0" - } else { - container "quay.io/biocontainers/pycoqc:2.5.2--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pycoqc:2.5.2--py_0' : + 'quay.io/biocontainers/pycoqc:2.5.2--py_0' }" input: path summary @@ -27,16 +16,17 @@ process PYCOQC { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ pycoQC \\ - $options.args \\ + $args \\ -f $summary \\ -o pycoqc.html \\ -j pycoqc.json cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(pycoQC --version 2>&1 | sed 's/^.*pycoQC v//; s/ .*\$//') + "${task.process}": + pycoqc: \$(pycoQC --version 2>&1 | sed 's/^.*pycoQC v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/pydamage/analyze/functions.nf b/modules/pydamage/analyze/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pydamage/analyze/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index 9cfb8a1a..c06c592c 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYDAMAGE_ANALYZE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,17 +15,18 @@ process PYDAMAGE_ANALYZE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pydamage \\ analyze \\ - $options.args \\ + $args \\ -p $task.cpus \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') + "${task.process}": + pydamage: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/pydamage/filter/functions.nf b/modules/pydamage/filter/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/pydamage/filter/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index 6cd7ae7a..ab0b2115 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process PYDAMAGE_FILTER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0" - } else { - container "quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" input: tuple val(meta), path(csv) @@ -26,17 +15,18 @@ process PYDAMAGE_FILTER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ pydamage \\ filter \\ - $options.args \\ + $args \\ $csv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') + "${task.process}": + pydamage: \$(echo \$(pydamage --version 2>&1) | sed -e 's/pydamage, version //g') END_VERSIONS """ } diff --git a/modules/qcat/functions.nf b/modules/qcat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qcat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qcat/main.nf b/modules/qcat/main.nf index b650fb8c..7d81952d 100644 --- a/modules/qcat/main.nf +++ b/modules/qcat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QCAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qcat=1.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qcat:1.1.0--py_0" - } else { - container "quay.io/biocontainers/qcat:1.1.0--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qcat:1.1.0--py_0' : + 'quay.io/biocontainers/qcat:1.1.0--py_0' }" input: tuple val(meta), path(reads) @@ -27,7 +16,8 @@ process QCAT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ ## Unzip fastq file ## qcat doesn't support zipped files yet @@ -47,8 +37,8 @@ process QCAT { gzip fastq/* cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(qcat --version 2>&1 | sed 's/^.*qcat //; s/ .*\$//') + "${task.process}": + qcat: \$(qcat --version 2>&1 | sed 's/^.*qcat //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/qualimap/bamqc/functions.nf b/modules/qualimap/bamqc/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qualimap/bamqc/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index d33f1e67..973fd6a4 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUALIMAP_BAMQC { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qualimap=2.2.2d" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1" - } else { - container "quay.io/biocontainers/qualimap:2.2.2d--1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1' : + 'quay.io/biocontainers/qualimap:2.2.2d--1' }" input: tuple val(meta), path(bam) @@ -28,7 +17,8 @@ process QUALIMAP_BAMQC { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def collect_pairs = meta.single_end ? '' : '--collect-overlap-pairs' def memory = task.memory.toGiga() + "G" @@ -47,7 +37,7 @@ process QUALIMAP_BAMQC { qualimap \\ --java-mem-size=$memory \\ bamqc \\ - $options.args \\ + $args \\ -bam $bam \\ $regions \\ -p $strandedness \\ @@ -56,8 +46,8 @@ process QUALIMAP_BAMQC { -nt $task.cpus cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') + "${task.process}": + qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } diff --git a/modules/qualimap/rnaseq/functions.nf b/modules/qualimap/rnaseq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/qualimap/rnaseq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/qualimap/rnaseq/main.nf b/modules/qualimap/rnaseq/main.nf index 9492cec6..d83fcd99 100644 --- a/modules/qualimap/rnaseq/main.nf +++ b/modules/qualimap/rnaseq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUALIMAP_RNASEQ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::qualimap=2.2.2d" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1" - } else { - container "quay.io/biocontainers/qualimap:2.2.2d--1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/qualimap:2.2.2d--1' : + 'quay.io/biocontainers/qualimap:2.2.2d--1' }" input: tuple val(meta), path(bam) @@ -27,7 +16,8 @@ process QUALIMAP_RNASEQ { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-pe' def memory = task.memory.toGiga() + "G" @@ -44,7 +34,7 @@ process QUALIMAP_RNASEQ { qualimap \\ --java-mem-size=$memory \\ rnaseq \\ - $options.args \\ + $args \\ -bam $bam \\ -gtf $gtf \\ -p $strandedness \\ @@ -52,8 +42,8 @@ process QUALIMAP_RNASEQ { -outdir $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') + "${task.process}": + qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ } diff --git a/modules/quast/functions.nf b/modules/quast/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/quast/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/quast/main.nf b/modules/quast/main.nf index 072d649d..e88051b5 100644 --- a/modules/quast/main.nf +++ b/modules/quast/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process QUAST { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::quast=5.0.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/quast:5.0.2--py37pl526hb5aa323_2' - } else { - container 'quay.io/biocontainers/quast:5.0.2--py37pl526hb5aa323_2' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/quast:5.0.2--py37pl526hb5aa323_2' : + 'quay.io/biocontainers/quast:5.0.2--py37pl526hb5aa323_2' }" input: path consensus @@ -30,7 +19,8 @@ process QUAST { path "versions.yml" , emit: versions script: - prefix = options.suffix ?: software + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: 'quast' def features = use_gff ? "--features $gff" : '' def reference = use_fasta ? "-r $fasta" : '' """ @@ -39,12 +29,14 @@ process QUAST { $reference \\ $features \\ --threads $task.cpus \\ - $options.args \\ + $args \\ ${consensus.join(' ')} + ln -s ${prefix}/report.tsv + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(quast.py --version 2>&1 | sed 's/^.*QUAST v//; s/ .*\$//') + "${task.process}": + quast: \$(quast.py --version 2>&1 | sed 's/^.*QUAST v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/racon/functions.nf b/modules/racon/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/racon/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/racon/main.nf b/modules/racon/main.nf index 60a5061e..9be5ce63 100644 --- a/modules/racon/main.nf +++ b/modules/racon/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RACON { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::racon=1.4.20" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/racon:1.4.20--h9a82719_1" - } else { - container "quay.io/biocontainers/racon:1.4.20--h9a82719_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/racon:1.4.20--h9a82719_1' : + 'quay.io/biocontainers/racon:1.4.20--h9a82719_1' }" input: tuple val(meta), path(reads), path(assembly), path(paf) @@ -26,20 +15,21 @@ process RACON { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - racon -t "${task.cpus}" \\ + racon -t "$task.cpus" \\ "${reads}" \\ "${paf}" \\ - $options.args \\ + $args \\ "${assembly}" > \\ ${prefix}_assembly_consensus.fasta gzip -n ${prefix}_assembly_consensus.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( racon --version 2>&1 | sed 's/^.*v//' ) + "${task.process}": + racon: \$( racon --version 2>&1 | sed 's/^.*v//' ) END_VERSIONS """ } diff --git a/modules/rapidnj/functions.nf b/modules/rapidnj/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rapidnj/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rapidnj/main.nf b/modules/rapidnj/main.nf index aa23b56e..04a08227 100644 --- a/modules/rapidnj/main.nf +++ b/modules/rapidnj/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '2.3.2' // No version information printed +def VERSION = '2.3.2' // Version information not provided by tool on CLI process RAPIDNJ { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::rapidnj=2.3.2 conda-forge::biopython=1.78" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0" - } else { - container "quay.io/biocontainers/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0' : + 'quay.io/biocontainers/mulled-v2-805c6e0f138f952f9c61cdd57c632a1a263ea990:3c52e4c8da6b3e4d69b9ca83fa4d366168898179-0' }" input: path alignment @@ -28,20 +17,21 @@ process RAPIDNJ { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ python \\ -c 'from Bio import SeqIO; SeqIO.convert("$alignment", "fasta", "alignment.sth", "stockholm")' rapidnj \\ alignment.sth \\ - $options.args \\ + $args \\ -i sth \\ -c $task.cpus \\ -x rapidnj_phylogeny.tre cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + rapidnj: $VERSION biopython: \$(python -c "import Bio; print(Bio.__version__)") END_VERSIONS """ diff --git a/modules/rasusa/functions.nf b/modules/rasusa/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rasusa/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rasusa/main.nf b/modules/rasusa/main.nf index b9ba0b13..c2893d18 100644 --- a/modules/rasusa/main.nf +++ b/modules/rasusa/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RASUSA { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rasusa=0.3.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rasusa:0.3.0--h779adbc_1" - } else { - container "quay.io/biocontainers/rasusa:0.3.0--h779adbc_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rasusa:0.3.0--h779adbc_1' : + 'quay.io/biocontainers/rasusa:0.3.0--h779adbc_1' }" input: tuple val(meta), path(reads), val(genome_size) @@ -27,18 +16,19 @@ process RASUSA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def output = meta.single_end ? "--output ${prefix}.fastq.gz" : "--output ${prefix}_1.fastq.gz ${prefix}_2.fastq.gz" """ rasusa \\ - $options.args \\ + $args \\ --coverage $depth_cutoff \\ --genome-size $genome_size \\ --input $reads \\ $output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rasusa --version 2>&1 | sed -e "s/rasusa //g") + "${task.process}": + rasusa: \$(rasusa --version 2>&1 | sed -e "s/rasusa //g") END_VERSIONS """ } diff --git a/modules/raxmlng/functions.nf b/modules/raxmlng/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/raxmlng/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/raxmlng/main.nf b/modules/raxmlng/main.nf index f607b506..62b6c78a 100644 --- a/modules/raxmlng/main.nf +++ b/modules/raxmlng/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RAXMLNG { label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::raxml-ng=1.0.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/raxml-ng:1.0.3--h32fcf60_0" - } else { - container "quay.io/biocontainers/raxml-ng:1.0.3--h32fcf60_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/raxml-ng:1.0.3--h32fcf60_0' : + 'quay.io/biocontainers/raxml-ng:1.0.3--h32fcf60_0' }" input: path alignment @@ -26,16 +15,17 @@ process RAXMLNG { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ raxml-ng \\ - $options.args \\ + $args \\ --msa $alignment \\ --threads $task.cpus \\ --prefix output cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//') + "${task.process}": + raxmlng: \$(echo \$(raxml-ng --version 2>&1) | sed 's/^.*RAxML-NG v. //; s/released.*\$//') END_VERSIONS """ } diff --git a/modules/rmarkdownnotebook/functions.nf b/modules/rmarkdownnotebook/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rmarkdownnotebook/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rmarkdownnotebook/main.nf b/modules/rmarkdownnotebook/main.nf index 4bded58c..f8183216 100644 --- a/modules/rmarkdownnotebook/main.nf +++ b/modules/rmarkdownnotebook/main.nf @@ -1,29 +1,16 @@ -// Import generic module functions -include { initOptions; saveFiles; getProcessName; getSoftwareName } from './functions' -include { dump_params_yml; indent_code_block } from "./parametrize" - -params.options = [:] -options = initOptions(params.options) -params.parametrize = true -params.implicit_params = true -params.meta_params = true +include { dump_params_yml; indent_code_block } from "./parametrize" process RMARKDOWNNOTEBOOK { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } //NB: You likely want to override this with a container containing all required //dependencies for your analysis. The container at least needs to contain the //yaml and rmarkdown R packages. conda (params.enable_conda ? "r-base=4.1.0 r-rmarkdown=2.9 r-yaml=2.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5%3A0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" - } else { - container "quay.io/biocontainers/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5:0e852a1e4063fdcbe3f254ac2c7469747a60e361-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5%3A0e852a1e4063fdcbe3f254ac2c7469747a60e361-0' : + 'quay.io/biocontainers/mulled-v2-31ad840d814d356e5f98030a4ee308a16db64ec5:0e852a1e4063fdcbe3f254ac2c7469747a60e361-0' }" input: tuple val(meta), path(notebook) @@ -37,7 +24,11 @@ process RMARKDOWNNOTEBOOK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def parametrize = (task.ext.parametrize == null) ? true : task.ext.parametrize + def implicit_params = (task.ext.implicit_params == null) ? true : task.ext.implicit_params + def meta_params = (task.ext.meta_params == null) ? true : task.ext.meta_params // Dump parameters to yaml file. // Using a yaml file over using the CLI params because @@ -45,14 +36,14 @@ process RMARKDOWNNOTEBOOK { // * allows to pass nested maps instead of just single values def params_cmd = "" def render_cmd = "" - if (params.parametrize) { + if (parametrize) { nb_params = [:] - if (params.implicit_params) { + if (implicit_params) { nb_params["cpus"] = task.cpus nb_params["artifact_dir"] = "artifacts" nb_params["input_dir"] = "./" } - if (params.meta_params) { + if (meta_params) { nb_params["meta"] = meta } nb_params += parameters @@ -73,9 +64,9 @@ process RMARKDOWNNOTEBOOK { mkdir artifacts # Set parallelism for BLAS/MKL etc. to avoid over-booking of resources - export MKL_NUM_THREADS="${task.cpus}" - export OPENBLAS_NUM_THREADS="${task.cpus}" - export OMP_NUM_THREADS="${task.cpus}" + export MKL_NUM_THREADS="$task.cpus" + export OPENBLAS_NUM_THREADS="$task.cpus" + export OMP_NUM_THREADS="$task.cpus" # Work around https://github.com/rstudio/rmarkdown/issues/1508 # If the symbolic link is not replaced by a physical file @@ -90,7 +81,7 @@ process RMARKDOWNNOTEBOOK { EOF cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": rmarkdown: \$(Rscript -e "cat(paste(packageVersion('rmarkdown'), collapse='.'))") END_VERSIONS """ diff --git a/modules/roary/functions.nf b/modules/roary/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/roary/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/roary/main.nf b/modules/roary/main.nf index 9dc948fb..edda3281 100644 --- a/modules/roary/main.nf +++ b/modules/roary/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process ROARY { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::roary=3.13.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/roary:3.13.0--pl526h516909a_0" - } else { - container "quay.io/biocontainers/roary:3.13.0--pl526h516909a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/roary:3.13.0--pl526h516909a_0' : + 'quay.io/biocontainers/roary:3.13.0--pl526h516909a_0' }" input: tuple val(meta), path(gff) @@ -27,17 +16,18 @@ process ROARY { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ roary \\ - $options.args \\ + $args \\ -p $task.cpus \\ -f results/ \\ $gff cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( roary --version ) + "${task.process}": + roary: \$( roary --version ) END_VERSIONS """ } diff --git a/modules/rsem/calculateexpression/functions.nf b/modules/rsem/calculateexpression/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rsem/calculateexpression/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index f19392f7..4b2ada47 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEM_CALCULATEEXPRESSION { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } else { - container "quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" input: tuple val(meta), path(reads) @@ -34,7 +23,8 @@ process RSEM_CALCULATEEXPRESSION { tuple val(meta), path("${prefix}.transcript.bam"), optional:true, emit: bam_transcript script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -50,14 +40,14 @@ process RSEM_CALCULATEEXPRESSION { --temporary-folder ./tmp/ \\ $strandedness \\ $paired_end \\ - $options.args \\ + $args \\ $reads \\ \$INDEX \\ $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ diff --git a/modules/rsem/preparereference/functions.nf b/modules/rsem/preparereference/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rsem/preparereference/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index 7e671207..a5b8922a 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -1,36 +1,27 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEM_PREPAREREFERENCE { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } else { - container "quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" input: path fasta, stageAs: "rsem/*" path gtf output: - path "rsem" , emit: index - path "rsem/*transcripts.fa", emit: transcript_fasta - path "versions.yml" , emit: versions + path "rsem" , emit: index + path "*transcripts.fa", emit: transcript_fasta + path "versions.yml" , emit: versions script: - def args = options.args.tokenize() - if (args.contains('--star')) { - args.removeIf { it.contains('--star') } + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args_list = args.tokenize() + if (args_list.contains('--star')) { + args_list.removeIf { it.contains('--star') } def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' """ STAR \\ @@ -40,18 +31,20 @@ process RSEM_PREPAREREFERENCE { --sjdbGTFfile $gtf \\ --runThreadN $task.cpus \\ $memory \\ - $options.args2 + $args2 rsem-prepare-reference \\ --gtf $gtf \\ --num-threads $task.cpus \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ $fasta \\ rsem/genome + cp rsem/genome.transcripts.fa . + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ @@ -60,13 +53,15 @@ process RSEM_PREPAREREFERENCE { rsem-prepare-reference \\ --gtf $gtf \\ --num-threads $task.cpus \\ - $options.args \\ + $args \\ $fasta \\ rsem/genome + cp rsem/genome.transcripts.fa . + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") + "${task.process}": + rsem: \$(rsem-calculate-expression --version | sed -e "s/Current version: RSEM v//g") star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ diff --git a/modules/rseqc/bamstat/functions.nf b/modules/rseqc/bamstat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/bamstat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/bamstat/main.nf b/modules/rseqc/bamstat/main.nf index 64939add..1141a13f 100644 --- a/modules/rseqc/bamstat/main.nf +++ b/modules/rseqc/bamstat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_BAMSTAT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -26,16 +15,17 @@ process RSEQC_BAMSTAT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bam_stat.py \\ -i $bam \\ - $options.args \\ + $args \\ > ${prefix}.bam_stat.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(bam_stat.py --version | sed -e "s/bam_stat.py //g") + "${task.process}": + rseqc: \$(bam_stat.py --version | sed -e "s/bam_stat.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/inferexperiment/functions.nf b/modules/rseqc/inferexperiment/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/inferexperiment/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/inferexperiment/main.nf b/modules/rseqc/inferexperiment/main.nf index c5e94943..2243c43e 100644 --- a/modules/rseqc/inferexperiment/main.nf +++ b/modules/rseqc/inferexperiment/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_INFEREXPERIMENT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -27,17 +16,18 @@ process RSEQC_INFEREXPERIMENT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ infer_experiment.py \\ -i $bam \\ -r $bed \\ - $options.args \\ + $args \\ > ${prefix}.infer_experiment.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(infer_experiment.py --version | sed -e "s/infer_experiment.py //g") + "${task.process}": + rseqc: \$(infer_experiment.py --version | sed -e "s/infer_experiment.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/innerdistance/functions.nf b/modules/rseqc/innerdistance/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/innerdistance/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/innerdistance/main.nf b/modules/rseqc/innerdistance/main.nf index 622cd5cd..425737d6 100644 --- a/modules/rseqc/innerdistance/main.nf +++ b/modules/rseqc/innerdistance/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_INNERDISTANCE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -31,27 +20,28 @@ process RSEQC_INNERDISTANCE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (!meta.single_end) { """ inner_distance.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args \\ + $args \\ > stdout.txt head -n 2 stdout.txt > ${prefix}.inner_distance_mean.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + "${task.process}": + rseqc: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") END_VERSIONS """ } else { """ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") + "${task.process}": + rseqc: \$(inner_distance.py --version | sed -e "s/inner_distance.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/junctionannotation/functions.nf b/modules/rseqc/junctionannotation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/junctionannotation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/junctionannotation/main.nf b/modules/rseqc/junctionannotation/main.nf index 1b75d915..d2562e5c 100644 --- a/modules/rseqc/junctionannotation/main.nf +++ b/modules/rseqc/junctionannotation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_JUNCTIONANNOTATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -33,18 +22,19 @@ process RSEQC_JUNCTIONANNOTATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ junction_annotation.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args \\ + $args \\ 2> ${prefix}.junction_annotation.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(junction_annotation.py --version | sed -e "s/junction_annotation.py //g") + "${task.process}": + rseqc: \$(junction_annotation.py --version | sed -e "s/junction_annotation.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/junctionsaturation/functions.nf b/modules/rseqc/junctionsaturation/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/junctionsaturation/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/junctionsaturation/main.nf b/modules/rseqc/junctionsaturation/main.nf index fa435aea..695762b5 100644 --- a/modules/rseqc/junctionsaturation/main.nf +++ b/modules/rseqc/junctionsaturation/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_JUNCTIONSATURATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -28,17 +17,18 @@ process RSEQC_JUNCTIONSATURATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ junction_saturation.py \\ -i $bam \\ -r $bed \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(junction_saturation.py --version | sed -e "s/junction_saturation.py //g") + "${task.process}": + rseqc: \$(junction_saturation.py --version | sed -e "s/junction_saturation.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/readdistribution/functions.nf b/modules/rseqc/readdistribution/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/readdistribution/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/readdistribution/main.nf b/modules/rseqc/readdistribution/main.nf index 0c83fdf0..333193e3 100644 --- a/modules/rseqc/readdistribution/main.nf +++ b/modules/rseqc/readdistribution/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_READDISTRIBUTION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -27,7 +16,8 @@ process RSEQC_READDISTRIBUTION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ read_distribution.py \\ -i $bam \\ @@ -35,8 +25,8 @@ process RSEQC_READDISTRIBUTION { > ${prefix}.read_distribution.txt cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(read_distribution.py --version | sed -e "s/read_distribution.py //g") + "${task.process}": + rseqc: \$(read_distribution.py --version | sed -e "s/read_distribution.py //g") END_VERSIONS """ } diff --git a/modules/rseqc/readduplication/functions.nf b/modules/rseqc/readduplication/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/rseqc/readduplication/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/rseqc/readduplication/main.nf b/modules/rseqc/readduplication/main.nf index bee82682..134f2e8d 100644 --- a/modules/rseqc/readduplication/main.nf +++ b/modules/rseqc/readduplication/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process RSEQC_READDUPLICATION { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1" - } else { - container "quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/rseqc:3.0.1--py37h516909a_1' : + 'quay.io/biocontainers/rseqc:3.0.1--py37h516909a_1' }" input: tuple val(meta), path(bam) @@ -29,16 +18,17 @@ process RSEQC_READDUPLICATION { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ read_duplication.py \\ -i $bam \\ -o $prefix \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(read_duplication.py --version | sed -e "s/read_duplication.py //g") + "${task.process}": + rseqc: \$(read_duplication.py --version | sed -e "s/read_duplication.py //g") END_VERSIONS """ } diff --git a/modules/salmon/index/functions.nf b/modules/salmon/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/salmon/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/salmon/index/main.nf b/modules/salmon/index/main.nf index c3fcef01..b0a2f973 100644 --- a/modules/salmon/index/main.nf +++ b/modules/salmon/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SALMON_INDEX { tag "$transcript_fasta" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" - } else { - container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0' : + 'quay.io/biocontainers/salmon:1.5.2--h84f40af_0' }" input: path genome_fasta @@ -27,6 +16,7 @@ process SALMON_INDEX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def get_decoy_ids = "grep '^>' $genome_fasta | cut -d ' ' -f 1 > decoys.txt" def gentrome = "gentrome.fa" if (genome_fasta.endsWith('.gz')) { @@ -43,11 +33,11 @@ process SALMON_INDEX { --threads $task.cpus \\ -t $gentrome \\ -d decoys.txt \\ - $options.args \\ + $args \\ -i salmon cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") + "${task.process}": + salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/salmon/quant/functions.nf b/modules/salmon/quant/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/salmon/quant/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/salmon/quant/main.nf b/modules/salmon/quant/main.nf index 7c2e0e17..6cae4f72 100644 --- a/modules/salmon/quant/main.nf +++ b/modules/salmon/quant/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SALMON_QUANT { tag "$meta.id" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::salmon=1.5.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0" - } else { - container "quay.io/biocontainers/salmon:1.5.2--h84f40af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/salmon:1.5.2--h84f40af_0' : + 'quay.io/biocontainers/salmon:1.5.2--h84f40af_0' }" input: tuple val(meta), path(reads) @@ -31,7 +20,8 @@ process SALMON_QUANT { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def reference = "--index $index" def input_reads = meta.single_end ? "-r $reads" : "-1 ${reads[0]} -2 ${reads[1]}" @@ -68,12 +58,12 @@ process SALMON_QUANT { --libType=$strandedness \\ $reference \\ $input_reads \\ - $options.args \\ + $args \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(salmon --version) | sed -e "s/salmon //g") + "${task.process}": + salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") END_VERSIONS """ } diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf new file mode 100644 index 00000000..c6573283 --- /dev/null +++ b/modules/samblaster/main.nf @@ -0,0 +1,34 @@ +process SAMBLASTER { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::samblaster=0.1.26 bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' : + 'quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' }" + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if( "$bam" == "${prefix}.bam" ) error "Input and output names are the same, use the suffix option to disambiguate" + """ + samtools view -h $args2 $bam | \\ + samblaster $args | \\ + samtools view $args3 -Sb - >${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samblaster: \$( samblaster -h 2>&1 | head -n 1 | sed 's/^samblaster: Version //' ) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/samblaster/meta.yml b/modules/samblaster/meta.yml new file mode 100644 index 00000000..4d51f4fe --- /dev/null +++ b/modules/samblaster/meta.yml @@ -0,0 +1,53 @@ +name: samblaster +description: | + This module combines samtools and samblaster in order to use + samblaster capability to filter or tag SAM files, with the advantage + of maintaining both input and output in BAM format. + Samblaster input must contain a sequence header: for this reason it has been piped + with the "samtools view -h" command. + Additional desired arguments for samtools can be passed using: + options.args2 for the input bam file + options.args3 for the output bam file +keywords: + - sort +tools: + - samblaster: + description: | + samblaster is a fast and flexible program for marking duplicates in read-id grouped paired-end SAM files. + It can also optionally output discordant read pairs and/or split read mappings to separate SAM files, + and/or unmapped/clipped reads to a separate FASTQ file. + By default, samblaster reads SAM input from stdin and writes SAM to stdout. + homepage: None + documentation: https://github.com/GregoryFaust/samblaster + tool_dev_url: https://github.com/GregoryFaust/samblaster + doi: "10.1093/bioinformatics/btu314" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file + pattern: "*.bam" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Tagged or filtered BAM file + pattern: "*.bam" + +authors: + - "@lescai" diff --git a/modules/samtools/ampliconclip/functions.nf b/modules/samtools/ampliconclip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/ampliconclip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 3da1d6fe..55a2f736 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_AMPLICONCLIP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? "bioconda::samtools=1.13" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -31,14 +20,14 @@ process SAMTOOLS_AMPLICONCLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def rejects = save_cliprejects ? "--rejects-file ${prefix}.cliprejects.bam" : "" def stats = save_clipstats ? "-f ${prefix}.clipstats.txt" : "" """ samtools \\ ampliconclip \\ - $options.args \\ - -@ $task.cpus \\ + $args \\ $rejects \\ $stats \\ -b $bed \\ @@ -46,8 +35,8 @@ process SAMTOOLS_AMPLICONCLIP { $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/bam2fq/functions.nf b/modules/samtools/bam2fq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/bam2fq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf index 48e3249c..689eb960 100644 --- a/modules/samtools/bam2fq/main.nf +++ b/modules/samtools/bam2fq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_BAM2FQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(inputbam) @@ -27,13 +16,14 @@ process SAMTOOLS_BAM2FQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (split){ """ samtools \\ bam2fq \\ - $options.args \\ + $args \\ -@ $task.cpus \\ -1 ${prefix}_1.fq.gz \\ -2 ${prefix}_2.fq.gz \\ @@ -42,21 +32,21 @@ process SAMTOOLS_BAM2FQ { $inputbam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } else { """ samtools \\ bam2fq \\ - $options.args \\ + $args \\ -@ $task.cpus \\ $inputbam >${prefix}_interleaved.fq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/depth/functions.nf b/modules/samtools/depth/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/depth/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf index 9c46b011..ebf029aa 100644 --- a/modules/samtools/depth/main.nf +++ b/modules/samtools/depth/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_DEPTH { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,18 +15,19 @@ process SAMTOOLS_DEPTH { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools \\ depth \\ - $options.args \\ + $args \\ -o ${prefix}.tsv \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/faidx/functions.nf b/modules/samtools/faidx/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/faidx/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 80cedeab..d8308b03 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -1,36 +1,26 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FAIDX { tag "$fasta" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: - path fasta + tuple val(meta), path(fasta) output: - path "*.fai" , emit: fai - path "versions.yml", emit: versions + tuple val(meta), path ("*.fai") , emit: fai + path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ samtools faidx $fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/faidx/meta.yml b/modules/samtools/faidx/meta.yml index 16c0b334..bae97a39 100644 --- a/modules/samtools/faidx/meta.yml +++ b/modules/samtools/faidx/meta.yml @@ -14,11 +14,21 @@ tools: doi: 10.1093/bioinformatics/btp352 licence: ['MIT'] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fasta: type: file description: FASTA file pattern: "*.{fa,fasta}" output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fai: type: file description: FASTA index file diff --git a/modules/samtools/fastq/functions.nf b/modules/samtools/fastq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/fastq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 0b454360..212e804e 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -1,43 +1,33 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FASTQ { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) output: tuple val(meta), path("*.fastq.gz"), emit: fastq - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def endedness = meta.single_end ? "-0 ${prefix}.fastq.gz" : "-1 ${prefix}_1.fastq.gz -2 ${prefix}_2.fastq.gz" """ samtools fastq \\ - $options.args \\ - -@ $task.cpus \\ + $args \\ + --threads ${task.cpus-1} \\ $endedness \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/fixmate/functions.nf b/modules/samtools/fixmate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/fixmate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index e1a766a1..8f86c1c4 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FIXMATE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::samtools=1.14" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0" - } else { - container "quay.io/biocontainers/samtools:1.14--hb421002_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,20 +15,21 @@ process SAMTOOLS_FIXMATE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if ("$bam" == "${prefix}.bam") error "Input and output names are the same, use the suffix option to disambiguate!" """ samtools \\ fixmate \\ - $options.args \\ - -@ $task.cpus \\ + $args \\ + --threads ${task.cpus-1} \\ $bam \\ ${prefix}.bam \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/flagstat/functions.nf b/modules/samtools/flagstat/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/flagstat/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index f9115c6b..03721d0b 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_FLAGSTAT { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,11 +15,12 @@ process SAMTOOLS_FLAGSTAT { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - samtools flagstat $bam > ${bam}.flagstat + samtools flagstat --threads ${task.cpus-1} $bam > ${bam}.flagstat cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/idxstats/functions.nf b/modules/samtools/idxstats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/idxstats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index b005088a..cd068679 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_IDXSTATS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,11 +15,12 @@ process SAMTOOLS_IDXSTATS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ samtools idxstats $bam > ${bam}.idxstats cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/index/functions.nf b/modules/samtools/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index febbc11c..db025a8f 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -1,37 +1,29 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_INDEX { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: - tuple val(meta), path(bam) + tuple val(meta), path(input) output: - tuple val(meta), path("*.bai"), optional:true, emit: bai - tuple val(meta), path("*.csi"), optional:true, emit: csi - path "versions.yml" , emit: versions + tuple val(meta), path("*.bai") , optional:true, emit: bai + tuple val(meta), path("*.csi") , optional:true, emit: csi + tuple val(meta), path("*.crai"), optional:true, emit: crai + path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - samtools index $options.args $bam + samtools index -@ ${task.cpus-1} $args $input + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/index/meta.yml b/modules/samtools/index/meta.yml index 988e8f53..0905b3cd 100644 --- a/modules/samtools/index/meta.yml +++ b/modules/samtools/index/meta.yml @@ -35,6 +35,10 @@ output: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" + - crai: + type: file + description: BAM/CRAM/SAM index file + pattern: "*.{bai,crai,sai}" - csi: type: file description: CSI index file @@ -46,3 +50,4 @@ output: authors: - "@drpatelh" - "@ewels" + - "@maxulysse" diff --git a/modules/samtools/merge/functions.nf b/modules/samtools/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index fefb423b..fcfcf61f 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_MERGE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input_files) @@ -28,14 +17,16 @@ process SAMTOOLS_MERGE { path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" def file_type = input_files[0].getExtension() def reference = fasta ? "--reference ${fasta}" : "" """ - samtools merge ${reference} ${prefix}.${file_type} $input_files + samtools merge --threads ${task.cpus-1} $args ${reference} ${prefix}.${file_type} $input_files + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/mpileup/functions.nf b/modules/samtools/mpileup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/mpileup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 9e120526..c40f46d1 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -27,16 +16,17 @@ process SAMTOOLS_MPILEUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ samtools mpileup \\ --fasta-ref $fasta \\ --output ${prefix}.mpileup \\ - $options.args \\ + $args \\ $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/sort/functions.nf b/modules/samtools/sort/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/sort/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index b30f6f45..0c2cf25e 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_SORT { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(bam) @@ -26,12 +15,13 @@ process SAMTOOLS_SORT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - samtools sort $options.args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam + samtools sort $args -@ $task.cpus -o ${prefix}.bam -T $prefix $bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/stats/functions.nf b/modules/samtools/stats/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/stats/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index aab43410..83c87002 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_STATS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input), path(input_index) @@ -27,12 +16,14 @@ process SAMTOOLS_STATS { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def reference = fasta ? "--reference ${fasta}" : "" """ - samtools stats ${reference} ${input} > ${input}.stats + samtools stats --threads ${task.cpus-1} ${reference} ${input} > ${input}.stats + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/samtools/view/functions.nf b/modules/samtools/view/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/samtools/view/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index b7a047ee..619b84dc 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -1,41 +1,32 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SAMTOOLS_VIEW { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::samtools=1.13' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/samtools:1.13--h8c37831_0" - } else { - container "quay.io/biocontainers/samtools:1.13--h8c37831_0" - } + conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : + 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" input: tuple val(meta), path(input) path fasta output: - tuple val(meta), path("*.bam") , optional: true, emit: bam - tuple val(meta), path("*.cram"), optional: true, emit: cram - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam") , emit: bam , optional: true + tuple val(meta), path("*.cram"), emit: cram, optional: true + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "--reference ${fasta} -C" : "" def file_type = input.getExtension() """ - samtools view ${reference} $options.args $input > ${prefix}.${file_type} + samtools view --threads ${task.cpus-1} ${reference} $args $input > ${prefix}.${file_type} + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/scoary/main.nf b/modules/scoary/main.nf new file mode 100644 index 00000000..ca33041d --- /dev/null +++ b/modules/scoary/main.nf @@ -0,0 +1,35 @@ +process SCOARY { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::scoary=1.6.16" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/scoary:1.6.16--py_2' : + 'quay.io/biocontainers/scoary:1.6.16--py_2' }" + + input: + tuple val(meta), path(genes), path(traits) + path(tree) + + output: + tuple val(meta), path("*.csv"), emit: csv + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def newick_tree = tree ? "-n ${tree}" : "" + """ + scoary \\ + $args \\ + --no-time \\ + --threads $task.cpus \\ + --traits $traits \\ + --genes $genes + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + scoary: \$( scoary --version 2>&1 ) + END_VERSIONS + """ +} diff --git a/modules/scoary/meta.yml b/modules/scoary/meta.yml new file mode 100644 index 00000000..e8e8515e --- /dev/null +++ b/modules/scoary/meta.yml @@ -0,0 +1,51 @@ +name: scoary +description: Use pangenome outputs for GWAS +keywords: + - gwas + - pangenome + - prokaryote +tools: + - scoary: + description: Microbial pan-GWAS using the output from Roary + homepage: https://github.com/AdmiralenOla/Scoary + documentation: https://github.com/AdmiralenOla/Scoary + tool_dev_url: https://github.com/AdmiralenOla/Scoary + doi: "10.1186/s13059-016-1108-8" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - genes: + type: file + description: A presence/absence matrix of genes in the pan-genome + pattern: "*.csv" + - traits: + type: file + description: A CSV file containing trait information per-sample + pattern: "*.csv" + - tree: + type: file + description: A Newick formtted tree for phylogenetic analyses + pattern: "*.{dnd,nwk,treefile}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - csv: + type: file + description: Gene associations in a CSV file per trait + pattern: "*.csv" + +authors: + - "@rpetit3" diff --git a/modules/seacr/callpeak/functions.nf b/modules/seacr/callpeak/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seacr/callpeak/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seacr/callpeak/main.nf b/modules/seacr/callpeak/main.nf index 97bf1c0b..12b9205f 100644 --- a/modules/seacr/callpeak/main.nf +++ b/modules/seacr/callpeak/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.3' +def VERSION = '1.3' // Version information not provided by tool on CLI process SEACR_CALLPEAK { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seacr=1.3 conda-forge::r-base=4.0.2 bioconda::bedtools=2.30.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0" - } else { - container 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' : + 'quay.io/biocontainers/mulled-v2-03bfeb32fe80910c231f630d4262b83677c8c0f4:f4bb19b68e66de27e4c64306f951d5ff11919931-0' }" input: tuple val(meta), path(bedgraph), path(ctrlbedgraph) @@ -29,17 +18,19 @@ process SEACR_CALLPEAK { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def function_switch = ctrlbedgraph ? "$ctrlbedgraph" : "$threshold" """ SEACR_1.3.sh \\ $bedgraph \\ $function_switch \\ - $options.args \\ + $args \\ $prefix + cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + seacr: $VERSION bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") r-base: \$(echo \$(R --version 2>&1) | sed 's/^.*R version //; s/ .*\$//') END_VERSIONS diff --git a/modules/seqkit/split2/functions.nf b/modules/seqkit/split2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqkit/split2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqkit/split2/main.nf b/modules/seqkit/split2/main.nf index 80f55bb6..5bed1dae 100644 --- a/modules/seqkit/split2/main.nf +++ b/modules/seqkit/split2/main.nf @@ -1,59 +1,49 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQKIT_SPLIT2 { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::seqkit=0.16.1' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqkit:0.16.1--h9ee0642_0" - } else { - container "quay.io/biocontainers/seqkit:0.16.1--h9ee0642_0" - } + conda (params.enable_conda ? 'bioconda::seqkit=2.1.0' : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqkit:2.1.0--h9ee0642_0' : + 'quay.io/biocontainers/seqkit:2.1.0--h9ee0642_0' }" input: tuple val(meta), path(reads) output: - tuple val(meta), path("*${prefix}/*.gz"), emit: reads - path "versions.yml" , emit: versions + tuple val(meta), path("**/*.gz"), emit: reads + path "versions.yml" , emit: versions script: - prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if(meta.single_end){ """ seqkit \\ split2 \\ - $options.args \\ + $args \\ --threads $task.cpus \\ - -1 $reads \\ - --out-dir $prefix + $reads \\ + --out-dir ${prefix} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqkit: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { """ seqkit \\ split2 \\ - $options.args \\ + $args \\ --threads $task.cpus \\ - -1 ${reads[0]} \\ - -2 ${reads[1]} \\ - --out-dir $prefix + --read1 ${reads[0]} \\ + --read2 ${reads[1]} \\ + --out-dir ${prefix} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqkit: \$(echo \$(seqkit 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqsero2/functions.nf b/modules/seqsero2/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqsero2/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqsero2/main.nf b/modules/seqsero2/main.nf index 3748a6e4..0a7aa6ad 100644 --- a/modules/seqsero2/main.nf +++ b/modules/seqsero2/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQSERO2 { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqsero2=1.2.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqsero2:1.2.1--py_0" - } else { - container "quay.io/biocontainers/seqsero2:1.2.1--py_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqsero2:1.2.1--py_0' : + 'quay.io/biocontainers/seqsero2:1.2.1--py_0' }" input: tuple val(meta), path(seqs) @@ -28,18 +17,19 @@ process SEQSERO2 { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ SeqSero2_package.py \\ - $options.args \\ + $args \\ -d results/ \\ -n $prefix \\ -p $task.cpus \\ -i $seqs cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$( SeqSero2_package.py --version 2>&1) | sed 's/^.*SeqSero2_package.py //' ) + "${task.process}": + seqsero2: \$( echo \$( SeqSero2_package.py --version 2>&1) | sed 's/^.*SeqSero2_package.py //' ) END_VERSIONS """ } diff --git a/modules/seqtk/mergepe/functions.nf b/modules/seqtk/mergepe/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/mergepe/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/mergepe/main.nf b/modules/seqtk/mergepe/main.nf index fb8eb382..299c9ea4 100644 --- a/modules/seqtk/mergepe/main.nf +++ b/modules/seqtk/mergepe/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_MERGEPE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: tuple val(meta), path(reads) @@ -26,27 +15,28 @@ process SEQTK_MERGEPE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ ln -s ${reads} ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { """ seqtk \\ mergepe \\ - $options.args \\ + $args \\ ${reads} \\ | gzip -n >> ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqtk/sample/functions.nf b/modules/seqtk/sample/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/sample/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/sample/main.nf b/modules/seqtk/sample/main.nf index 3b039fb9..96e08fd4 100644 --- a/modules/seqtk/sample/main.nf +++ b/modules/seqtk/sample/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_SAMPLE { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: tuple val(meta), path(reads) @@ -27,43 +16,44 @@ process SEQTK_SAMPLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ seqtk \\ sample \\ - $options.args \\ + $args \\ $reads \\ $sample_size \\ | gzip --no-name > ${prefix}.fastq.gz \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } else { - if (!(options.args ==~ /.*-s[0-9]+.*/)) { - options.args = options.args + " -s100" + if (!(args ==~ /.*-s[0-9]+.*/)) { + args += " -s100" } """ seqtk \\ sample \\ - $options.args \\ + $args \\ ${reads[0]} \\ $sample_size \\ | gzip --no-name > ${prefix}_1.fastq.gz \\ seqtk \\ sample \\ - $options.args \\ + $args \\ ${reads[1]} \\ $sample_size \\ | gzip --no-name > ${prefix}_2.fastq.gz \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqtk/subseq/functions.nf b/modules/seqtk/subseq/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqtk/subseq/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqtk/subseq/main.nf b/modules/seqtk/subseq/main.nf index df8783de..abfe4faa 100644 --- a/modules/seqtk/subseq/main.nf +++ b/modules/seqtk/subseq/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQTK_SUBSEQ { tag '$sequences' label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::seqtk=1.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3" - } else { - container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3' : + 'quay.io/biocontainers/seqtk:1.3--h5bf99c6_3' }" input: path sequences @@ -27,7 +16,8 @@ process SEQTK_SUBSEQ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ?: '' + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: '' def ext = "fa" if ("$sequences" ==~ /.+\.fq|.+\.fq.gz|.+\.fastq|.+\.fastq.gz/) { ext = "fq" @@ -35,14 +25,14 @@ process SEQTK_SUBSEQ { """ seqtk \\ subseq \\ - $options.args \\ + $args \\ $sequences \\ $filter_list | \\ gzip --no-name > ${sequences}${prefix}.${ext}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/bam2seqz/functions.nf b/modules/sequenzautils/bam2seqz/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/sequenzautils/bam2seqz/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sequenzautils/bam2seqz/main.nf b/modules/sequenzautils/bam2seqz/main.nf index 61ca70c6..ce9d1962 100644 --- a/modules/sequenzautils/bam2seqz/main.nf +++ b/modules/sequenzautils/bam2seqz/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQUENZAUTILS_BAM2SEQZ { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sequenza-utils=3.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2" - } else { - container "quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2' : + 'quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2' }" input: tuple val(meta), path(normalbam), path(tumourbam) @@ -28,11 +17,12 @@ process SEQUENZAUTILS_BAM2SEQZ { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ sequenza-utils \\ bam2seqz \\ - $options.args \\ + $args \\ -n $normalbam \\ -t $tumourbam \\ --fasta $fasta \\ @@ -40,8 +30,8 @@ process SEQUENZAUTILS_BAM2SEQZ { -o ${prefix}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') + "${task.process}": + sequenzautils: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/sequenzautils/gcwiggle/functions.nf b/modules/sequenzautils/gcwiggle/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/sequenzautils/gcwiggle/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sequenzautils/gcwiggle/main.nf b/modules/sequenzautils/gcwiggle/main.nf index c952bb70..a6fcb559 100644 --- a/modules/sequenzautils/gcwiggle/main.nf +++ b/modules/sequenzautils/gcwiggle/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SEQUENZAUTILS_GCWIGGLE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sequenza-utils=3.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2" - } else { - container "quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sequenza-utils:3.0.0--py38h6ed170a_2' : + 'quay.io/biocontainers/sequenza-utils:3.0.0--py38h6ed170a_2' }" input: tuple val(meta), path(fasta) @@ -26,17 +15,18 @@ process SEQUENZAUTILS_GCWIGGLE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ sequenza-utils \\ gc_wiggle \\ - $options.args \\ + $args \\ --fasta $fasta \\ -o ${prefix}.wig.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') + "${task.process}": + sequenzautils: \$(echo \$(sequenza-utils 2>&1) | sed 's/^.*is version //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/seqwish/induce/functions.nf b/modules/seqwish/induce/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/seqwish/induce/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/seqwish/induce/main.nf b/modules/seqwish/induce/main.nf index aaabce51..6d6b33e7 100644 --- a/modules/seqwish/induce/main.nf +++ b/modules/seqwish/induce/main.nf @@ -1,24 +1,14 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' -params.options = [:] -options = initOptions(params.options) - -def VERSION = '0.7.1' +def VERSION = '0.7.2' // Version information not provided by tool on CLI process SEQWISH_INDUCE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } - conda (params.enable_conda ? 'bioconda::seqwish=0.7.1' : null) + conda (params.enable_conda ? 'bioconda::seqwish=0.7.2' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/seqwish:0.7.1--h2e03b76_0" - } else { - container "quay.io/biocontainers/seqwish:0.7.1--h2e03b76_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqwish:0.7.2--h2e03b76_0' : + 'quay.io/biocontainers/seqwish:0.7.2--h2e03b76_0' }" input: tuple val(meta), path(paf), path(fasta) @@ -27,20 +17,20 @@ process SEQWISH_INDUCE { tuple val(meta), path("*.gfa"), emit: gfa path "versions.yml" , emit: versions - script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ seqwish \\ --threads $task.cpus \\ --paf-alns=$paf \\ --seqs=$fasta \\ --gfa=${prefix}.gfa \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + seqwish: $VERSION END_VERSIONS """ } diff --git a/modules/shovill/functions.nf b/modules/shovill/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/shovill/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/shovill/main.nf b/modules/shovill/main.nf index 48425f9f..1a56df27 100644 --- a/modules/shovill/main.nf +++ b/modules/shovill/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SHOVILL { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::shovill=1.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/shovill:1.1.0--0" - } else { - container "quay.io/biocontainers/shovill:1.1.0--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/shovill:1.1.0--0' : + 'quay.io/biocontainers/shovill:1.1.0--0' }" input: tuple val(meta), path(reads) @@ -30,20 +19,21 @@ process SHOVILL { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def memory = task.memory.toGiga() """ shovill \\ --R1 ${reads[0]} \\ --R2 ${reads[1]} \\ - $options.args \\ + $args \\ --cpus $task.cpus \\ --ram $memory \\ --outdir ./ \\ --force cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(shovill --version 2>&1) | sed 's/^.*shovill //') + "${task.process}": + shovill: \$(echo \$(shovill --version 2>&1) | sed 's/^.*shovill //') END_VERSIONS """ } diff --git a/modules/snpdists/functions.nf b/modules/snpdists/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpdists/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpdists/main.nf b/modules/snpdists/main.nf index 506a922a..c8d61161 100644 --- a/modules/snpdists/main.nf +++ b/modules/snpdists/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SNPDISTS { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::snp-dists=0.8.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snp-dists:0.8.2--h5bf99c6_0" - } else { - container "quay.io/biocontainers/snp-dists:0.8.2--h5bf99c6_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snp-dists:0.8.2--h5bf99c6_0' : + 'quay.io/biocontainers/snp-dists:0.8.2--h5bf99c6_0' }" input: tuple val(meta), path(alignment) @@ -26,15 +15,16 @@ process SNPDISTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ snp-dists \\ - $options.args \\ + $args \\ $alignment > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(snp-dists -v 2>&1 | sed 's/snp-dists //;') + "${task.process}": + snpdists: \$(snp-dists -v 2>&1 | sed 's/snp-dists //;') END_VERSIONS """ } diff --git a/modules/snpeff/functions.nf b/modules/snpeff/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpeff/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpeff/main.nf b/modules/snpeff/main.nf index 3a1f6a52..6248fee3 100644 --- a/modules/snpeff/main.nf +++ b/modules/snpeff/main.nf @@ -1,27 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) -params.use_cache = false -params.snpeff_tag = "" - process SNPEFF { + tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::snpeff=5.0" : null) - if (params.use_cache) { - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1" - } else { - container "quay.io/biocontainers/snpeff:5.0--hdfd78af_1" - } - } else { - container "nfcore/snpeff:${params.snpeff_tag}" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snpeff:5.0--hdfd78af_1' : + 'quay.io/biocontainers/snpeff:5.0--hdfd78af_1' }" input: tuple val(meta), path(vcf) @@ -34,26 +18,28 @@ process SNPEFF { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def avail_mem = 6 if (!task.memory) { log.info '[snpEff] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - cache = params.use_cache ? "-dataDir \${PWD}/${snpeff_cache}" : "" + def prefix = task.ext.prefix ?: "${meta.id}" + def cache_command = cache ? "-dataDir \${PWD}/${cache}" : "" """ - snpEff -Xmx${avail_mem}g \\ + snpEff \\ + -Xmx${avail_mem}g \\ $db \\ - $options.args \\ + $args \\ -csvStats ${prefix}.csv \\ - $cache \\ + $cache_command \\ $vcf \\ > ${prefix}.ann.vcf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(snpEff -version 2>&1) | cut -f 2 -d ' ') + "${task.process}": + snpeff: \$(echo \$(snpEff -version 2>&1) | cut -f 2 -d ' ') END_VERSIONS """ } diff --git a/modules/snpsites/functions.nf b/modules/snpsites/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/snpsites/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/snpsites/main.nf b/modules/snpsites/main.nf index 543ee01c..60e694ac 100644 --- a/modules/snpsites/main.nf +++ b/modules/snpsites/main.nf @@ -1,21 +1,10 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SNPSITES { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::snp-sites=2.5.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/snp-sites:2.5.1--hed695b0_0" - } else { - container "quay.io/biocontainers/snp-sites:2.5.1--hed695b0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snp-sites:2.5.1--hed695b0_0' : + 'quay.io/biocontainers/snp-sites:2.5.1--hed695b0_0' }" input: path alignment @@ -27,10 +16,11 @@ process SNPSITES { env CONSTANT_SITES, emit: constant_sites_string script: + def args = task.ext.args ?: '' """ snp-sites \\ $alignment \\ - $options.args \\ + $args \\ > filtered_alignment.fas echo \$(snp-sites -C $alignment) > constant.sites.txt @@ -38,8 +28,8 @@ process SNPSITES { CONSTANT_SITES=\$(cat constant.sites.txt) cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(snp-sites -V 2>&1 | sed 's/snp-sites //') + "${task.process}": + snpsites: \$(snp-sites -V 2>&1 | sed 's/snp-sites //') END_VERSIONS """ } diff --git a/modules/sortmerna/functions.nf b/modules/sortmerna/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sortmerna/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sortmerna/main.nf b/modules/sortmerna/main.nf index 9602bb53..5c0950d8 100644 --- a/modules/sortmerna/main.nf +++ b/modules/sortmerna/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SORTMERNA { tag "$meta.id" label "process_high" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::sortmerna=4.3.4" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0" - } else { - container "quay.io/biocontainers/sortmerna:4.3.4--h9ee0642_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sortmerna:4.3.4--h9ee0642_0' : + 'quay.io/biocontainers/sortmerna:4.3.4--h9ee0642_0' }" input: tuple val(meta), path(reads) @@ -28,7 +17,8 @@ process SORTMERNA { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ sortmerna \\ @@ -38,14 +28,14 @@ process SORTMERNA { --workdir . \\ --aligned rRNA_reads \\ --other non_rRNA_reads \\ - $options.args + $args mv non_rRNA_reads.fq.gz ${prefix}.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + "${task.process}": + sortmerna: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } else { @@ -60,15 +50,15 @@ process SORTMERNA { --other non_rRNA_reads \\ --paired_in \\ --out2 \\ - $options.args + $args mv non_rRNA_reads_fwd.fq.gz ${prefix}_1.fastq.gz mv non_rRNA_reads_rev.fq.gz ${prefix}_2.fastq.gz mv rRNA_reads.log ${prefix}.sortmerna.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') + "${task.process}": + sortmerna: \$(echo \$(sortmerna --version 2>&1) | sed 's/^.*SortMeRNA version //; s/ Build Date.*\$//') END_VERSIONS """ } diff --git a/modules/spades/functions.nf b/modules/spades/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/spades/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/spades/main.nf b/modules/spades/main.nf index c21066e2..ba690d35 100644 --- a/modules/spades/main.nf +++ b/modules/spades/main.nf @@ -1,69 +1,70 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SPADES { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::spades=3.15.3' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/spades:3.15.3--h95f258a_0" - } else { - container "quay.io/biocontainers/spades:3.15.3--h95f258a_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/spades:3.15.3--h95f258a_0' : + 'quay.io/biocontainers/spades:3.15.3--h95f258a_0' }" input: - tuple val(meta), path(reads) + tuple val(meta), path(illumina), path(pacbio), path(nanopore) path hmm output: - tuple val(meta), path('*.scaffolds.fa') , optional:true, emit: scaffolds - tuple val(meta), path('*.contigs.fa') , optional:true, emit: contigs - tuple val(meta), path('*.transcripts.fa') , optional:true, emit: transcripts - tuple val(meta), path('*.gene_clusters.fa'), optional:true, emit: gene_clusters - tuple val(meta), path('*.assembly.gfa') , optional:true, emit: gfa - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path('*.scaffolds.fa.gz') , optional:true, emit: scaffolds + tuple val(meta), path('*.contigs.fa.gz') , optional:true, emit: contigs + tuple val(meta), path('*.transcripts.fa.gz') , optional:true, emit: transcripts + tuple val(meta), path('*.gene_clusters.fa.gz'), optional:true, emit: gene_clusters + tuple val(meta), path('*.assembly.gfa.gz') , optional:true, emit: gfa + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" - def custom_hmms = params.spades_hmm ? "--custom-hmms $hmm" : "" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def maxmem = task.memory.toGiga() + def illumina_reads = illumina ? ( meta.single_end ? "-s $illumina" : "-1 ${illumina[0]} -2 ${illumina[1]}" ) : "" + def pacbio_reads = pacbio ? "--pacbio $pacbio" : "" + def nanopore_reads = nanopore ? "--nanopore $nanopore" : "" + def custom_hmms = hmm ? "--custom-hmms $hmm" : "" """ spades.py \\ - $options.args \\ + $args \\ --threads $task.cpus \\ + --memory $maxmem \\ $custom_hmms \\ - $input_reads \\ + $illumina_reads \\ + $pacbio_reads \\ + $nanopore_reads \\ -o ./ mv spades.log ${prefix}.spades.log if [ -f scaffolds.fasta ]; then mv scaffolds.fasta ${prefix}.scaffolds.fa + gzip -n ${prefix}.scaffolds.fa fi if [ -f contigs.fasta ]; then mv contigs.fasta ${prefix}.contigs.fa + gzip -n ${prefix}.contigs.fa fi if [ -f transcripts.fasta ]; then mv transcripts.fasta ${prefix}.transcripts.fa + gzip -n ${prefix}.transcripts.fa fi if [ -f assembly_graph_with_scaffolds.gfa ]; then mv assembly_graph_with_scaffolds.gfa ${prefix}.assembly.gfa + gzip -n ${prefix}.assembly.gfa fi if [ -f gene_clusters.fasta ]; then mv gene_clusters.fasta ${prefix}.gene_clusters.fa + gzip -n ${prefix}.gene_clusters.fa fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(spades.py --version 2>&1 | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//') + "${task.process}": + spades: \$(spades.py --version 2>&1 | sed 's/^.*SPAdes genome assembler v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/spades/meta.yml b/modules/spades/meta.yml index 3d5943ae..b6878d3d 100644 --- a/modules/spades/meta.yml +++ b/modules/spades/meta.yml @@ -20,11 +20,20 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - reads: + - illumina: type: file description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, - respectively. + List of input FastQ (Illumina or PacBio CCS reads) files + of size 1 and 2 for single-end and paired-end data, + respectively. This input data type is required. + - pacbio: + type: file + description: | + List of input PacBio CLR FastQ files of size 1. + - nanopore: + type: file + description: | + List of input FastQ files of size 1, originating from Oxford Nanopore technology. - hmm: type: file description: @@ -39,26 +48,32 @@ output: type: file description: | Fasta file containing scaffolds + pattern: "*.fa.gz" - contigs: type: file description: | Fasta file containing contigs + pattern: "*.fa.gz" - transcripts: type: file description: | Fasta file containing transcripts + pattern: "*.fa.gz" - gene_clusters: type: file description: | Fasta file containing gene_clusters + pattern: "*.fa.gz" - gfa: type: file description: | gfa file containing assembly + pattern: "*.gfa.gz" - log: type: file description: | Spades log file + pattern: "*.log" - versions: type: file description: File containing software versions @@ -67,3 +82,4 @@ output: authors: - "@JoseEspinosa" - "@drpatelh" + - "@d4straub" diff --git a/modules/spatyper/functions.nf b/modules/spatyper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/spatyper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/spatyper/main.nf b/modules/spatyper/main.nf index ce320bfc..e0ba8d13 100644 --- a/modules/spatyper/main.nf +++ b/modules/spatyper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SPATYPER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::spatyper=0.3.3" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/spatyper%3A0.3.3--pyhdfd78af_3" - } else { - container "quay.io/biocontainers/spatyper:0.3.3--pyhdfd78af_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/spatyper%3A0.3.3--pyhdfd78af_3' : + 'quay.io/biocontainers/spatyper:0.3.3--pyhdfd78af_3' }" input: tuple val(meta), path(fasta) @@ -28,19 +17,19 @@ process SPATYPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def input_args = repeats && repeat_order ? "-r ${repeats} -o ${repeat_order}" : "" """ - env spaTyper \\ - $options.args \\ + $args \\ $input_args \\ --fasta $fasta \\ --output ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(spaTyper --version 2>&1) | sed 's/^.*spaTyper //' ) + "${task.process}": + spatyper: \$( echo \$(spaTyper --version 2>&1) | sed 's/^.*spaTyper //' ) END_VERSIONS """ } diff --git a/modules/sratools/fasterqdump/functions.nf b/modules/sratools/fasterqdump/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sratools/fasterqdump/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sratools/fasterqdump/main.nf b/modules/sratools/fasterqdump/main.nf index 08ef9045..73e3673d 100644 --- a/modules/sratools/fasterqdump/main.nf +++ b/modules/sratools/fasterqdump/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SRATOOLS_FASTERQDUMP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0 conda-forge::pigz=2.6' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' - } else { - container 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' : + 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' }" input: tuple val(meta), path(sra) @@ -26,6 +15,8 @@ process SRATOOLS_FASTERQDUMP { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" // Paired-end data extracted by fasterq-dump (--split-3 the default) always creates // *_1.fastq *_2.fastq files but sometimes also an additional *.fastq file @@ -39,19 +30,19 @@ process SRATOOLS_FASTERQDUMP { fi fasterq-dump \\ - ${options.args} \\ + $args \\ --threads $task.cpus \\ ${sra.name} pigz \\ - ${options.args2} \\ + $args2 \\ --no-name \\ --processes $task.cpus \\ *.fastq cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') + "${task.process}": + sratools: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ diff --git a/modules/sratools/prefetch/functions.nf b/modules/sratools/prefetch/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/sratools/prefetch/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/sratools/prefetch/main.nf b/modules/sratools/prefetch/main.nf index 207d1e10..1e1eb802 100644 --- a/modules/sratools/prefetch/main.nf +++ b/modules/sratools/prefetch/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SRATOOLS_PREFETCH { tag "$id" label 'process_low' label 'error_retry' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::sra-tools=2.11.0' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5262h314213e_0' - } else { - container 'quay.io/biocontainers/sra-tools:2.11.0--pl5262h314213e_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/sra-tools:2.11.0--pl5262h314213e_0' : + 'quay.io/biocontainers/sra-tools:2.11.0--pl5262h314213e_0' }" input: tuple val(meta), val(id) @@ -27,6 +16,7 @@ process SRATOOLS_PREFETCH { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' def config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" """ eval "\$(vdb-config -o n NCBI_SETTINGS | sed 's/[" ]//g')" @@ -36,15 +26,15 @@ process SRATOOLS_PREFETCH { fi prefetch \\ - $options.args \\ + $args \\ --progress \\ $id vdb-validate $id cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(prefetch --version 2>&1 | grep -Eo '[0-9.]+') + "${task.process}": + sratools: \$(prefetch --version 2>&1 | grep -Eo '[0-9.]+') END_VERSIONS """ } diff --git a/modules/staphopiasccmec/functions.nf b/modules/staphopiasccmec/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/staphopiasccmec/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/staphopiasccmec/main.nf b/modules/staphopiasccmec/main.nf index 08def401..dbb61a27 100644 --- a/modules/staphopiasccmec/main.nf +++ b/modules/staphopiasccmec/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAPHOPIASCCMEC { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::staphopia-sccmec=1.0.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/staphopia-sccmec:1.0.0--hdfd78af_0" - } else { - container "quay.io/biocontainers/staphopia-sccmec:1.0.0--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/staphopia-sccmec:1.0.0--hdfd78af_0' : + 'quay.io/biocontainers/staphopia-sccmec:1.0.0--hdfd78af_0' }" input: tuple val(meta), path(fasta) @@ -26,13 +15,14 @@ process STAPHOPIASCCMEC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - staphopia-sccmec --assembly $fasta $options.args > ${prefix}.tsv + staphopia-sccmec --assembly $fasta $args > ${prefix}.tsv cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(staphopia-sccmec --version 2>&1 | sed 's/^.*staphopia-sccmec //') + "${task.process}": + staphopiasccmec: \$(staphopia-sccmec --version 2>&1 | sed 's/^.*staphopia-sccmec //') END_VERSIONS """ } diff --git a/modules/star/align/functions.nf b/modules/star/align/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/star/align/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index e0ccba8c..9725496f 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -1,28 +1,20 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAR_ALIGN { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? 'bioconda::star=2.7.9a' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container 'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' - } else { - container 'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' : + 'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' }" input: tuple val(meta), path(reads) path index path gtf + val star_ignore_sjdbgtf + val seq_platform + val seq_center output: tuple val(meta), path('*d.out.bam') , emit: bam @@ -39,12 +31,13 @@ process STAR_ALIGN { tuple val(meta), path('*.out.junction') , optional:true, emit: junction script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def ignore_gtf = params.star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" - def seq_platform = params.seq_platform ? "'PL:$params.seq_platform'" : "" - def seq_center = params.seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$params.seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " - def out_sam_type = (options.args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' - def mv_unsorted_bam = (options.args.contains('--outSAMtype BAM Unsorted SortedByCoordinate')) ? "mv ${prefix}.Aligned.out.bam ${prefix}.Aligned.unsort.out.bam" : '' + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def ignore_gtf = star_ignore_sjdbgtf ? '' : "--sjdbGTFfile $gtf" + def seq_platform = seq_platform ? "'PL:$seq_platform'" : "" + def seq_center = seq_center ? "--outSAMattrRGline ID:$prefix 'CN:$seq_center' 'SM:$prefix' $seq_platform " : "--outSAMattrRGline ID:$prefix 'SM:$prefix' $seq_platform " + def out_sam_type = (args.contains('--outSAMtype')) ? '' : '--outSAMtype BAM Unsorted' + def mv_unsorted_bam = (args.contains('--outSAMtype BAM Unsorted SortedByCoordinate')) ? "mv ${prefix}.Aligned.out.bam ${prefix}.Aligned.unsort.out.bam" : '' """ STAR \\ --genomeDir $index \\ @@ -54,7 +47,7 @@ process STAR_ALIGN { $out_sam_type \\ $ignore_gtf \\ $seq_center \\ - $options.args + $args $mv_unsorted_bam @@ -68,8 +61,8 @@ process STAR_ALIGN { fi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") END_VERSIONS """ } diff --git a/modules/star/genomegenerate/functions.nf b/modules/star/genomegenerate/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/star/genomegenerate/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index c932fafe..ad32c0dd 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -1,23 +1,12 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STAR_GENOMEGENERATE { tag "$fasta" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.13 conda-forge::gawk=5.1.0" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" - } else { - container "quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' : + 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' }" input: path fasta @@ -28,9 +17,10 @@ process STAR_GENOMEGENERATE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' + def args_list = args.tokenize() def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' - def args = options.args.tokenize() - if (args.contains('--genomeSAindexNbases')) { + if (args_list.contains('--genomeSAindexNbases')) { """ mkdir star STAR \\ @@ -40,11 +30,11 @@ process STAR_GENOMEGENERATE { --sjdbGTFfile $gtf \\ --runThreadN $task.cpus \\ $memory \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS @@ -63,11 +53,11 @@ process STAR_GENOMEGENERATE { --runThreadN $task.cpus \\ --genomeSAindexNbases \$NUM_BASES \\ $memory \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(STAR --version | sed -e "s/STAR_//g") + "${task.process}": + star: \$(STAR --version | sed -e "s/STAR_//g") samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//') END_VERSIONS diff --git a/modules/strelka/germline/functions.nf b/modules/strelka/germline/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/strelka/germline/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/strelka/germline/main.nf b/modules/strelka/germline/main.nf index 5e913c40..324be6df 100644 --- a/modules/strelka/germline/main.nf +++ b/modules/strelka/germline/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRELKA_GERMLINE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::strelka=2.9.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/strelka:2.9.10--0" - } else { - container "quay.io/biocontainers/strelka:2.9.10--0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/strelka:2.9.10--0' : + 'quay.io/biocontainers/strelka:2.9.10--0' }" input: tuple val(meta), path(input), path(input_index) @@ -25,7 +14,6 @@ process STRELKA_GERMLINE { path target_bed path target_bed_tbi - output: tuple val(meta), path("*variants.vcf.gz") , emit: vcf tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi @@ -34,14 +22,15 @@ process STRELKA_GERMLINE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def regions = target_bed ? "--exome --callRegions ${target_bed}" : "" """ configureStrelkaGermlineWorkflow.py \\ --bam $input \\ --referenceFasta $fasta \\ $regions \\ - $options.args \\ + $args \\ --runDir strelka python strelka/runWorkflow.py -m local -j $task.cpus @@ -51,8 +40,8 @@ process STRELKA_GERMLINE { mv strelka/results/variants/variants.vcf.gz.tbi ${prefix}.variants.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaGermlineWorkflow.py --version ) + "${task.process}": + strelka: \$( configureStrelkaGermlineWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/strelka/somatic/functions.nf b/modules/strelka/somatic/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/strelka/somatic/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/strelka/somatic/main.nf b/modules/strelka/somatic/main.nf index 633b0a2c..a9766d01 100644 --- a/modules/strelka/somatic/main.nf +++ b/modules/strelka/somatic/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRELKA_SOMATIC { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::strelka=2.9.10" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1" - } else { - container "quay.io/biocontainers/strelka:2.9.10--h9ee0642_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/strelka:2.9.10--h9ee0642_1' : + 'quay.io/biocontainers/strelka:2.9.10--h9ee0642_1' }" input: tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor), path(manta_candidate_small_indels), path(manta_candidate_small_indels_tbi) @@ -33,7 +22,8 @@ process STRELKA_SOMATIC { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def options_target_bed = target_bed ? "--exome --callRegions ${target_bed}" : "" def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : "" """ @@ -43,7 +33,7 @@ process STRELKA_SOMATIC { --referenceFasta $fasta \\ $options_target_bed \\ $options_manta \\ - $options.args \\ + $args \\ --runDir strelka python strelka/runWorkflow.py -m local -j $task.cpus @@ -54,8 +44,8 @@ process STRELKA_SOMATIC { mv strelka/results/variants/somatic.snvs.vcf.gz.tbi ${prefix}.somatic_snvs.vcf.gz.tbi cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( configureStrelkaSomaticWorkflow.py --version ) + "${task.process}": + strelka: \$( configureStrelkaSomaticWorkflow.py --version ) END_VERSIONS """ } diff --git a/modules/stringtie/merge/functions.nf b/modules/stringtie/merge/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/stringtie/merge/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/stringtie/merge/main.nf b/modules/stringtie/merge/main.nf index 371533bb..756dc6ec 100644 --- a/modules/stringtie/merge/main.nf +++ b/modules/stringtie/merge/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRINGTIE_MERGE { label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } // Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" - } else { - container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : + 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" input: path stringtie_gtf @@ -27,6 +16,7 @@ process STRINGTIE_MERGE { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ stringtie \\ --merge $stringtie_gtf \\ @@ -34,8 +24,8 @@ process STRINGTIE_MERGE { -o stringtie.merged.gtf cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + "${task.process}": + stringtie: \$(stringtie --version 2>&1) END_VERSIONS """ } diff --git a/modules/stringtie/stringtie/functions.nf b/modules/stringtie/stringtie/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/stringtie/stringtie/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index 3579e47c..9d62a966 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process STRINGTIE { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0" - } else { - container "quay.io/biocontainers/stringtie:2.1.7--h978d192_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : + 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" input: tuple val(meta), path(bam) @@ -30,7 +19,8 @@ process STRINGTIE { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def strandedness = '' if (meta.strandedness == 'forward') { @@ -48,11 +38,11 @@ process STRINGTIE { -C ${prefix}.coverage.gtf \\ -b ${prefix}.ballgown \\ -p $task.cpus \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(stringtie --version 2>&1) + "${task.process}": + stringtie: \$(stringtie --version 2>&1) END_VERSIONS """ } diff --git a/modules/subread/featurecounts/functions.nf b/modules/subread/featurecounts/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/subread/featurecounts/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/subread/featurecounts/main.nf b/modules/subread/featurecounts/main.nf index 0a0285db..53eb279e 100644 --- a/modules/subread/featurecounts/main.nf +++ b/modules/subread/featurecounts/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process SUBREAD_FEATURECOUNTS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::subread=2.0.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/subread:2.0.1--hed695b0_0" - } else { - container "quay.io/biocontainers/subread:2.0.1--hed695b0_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/subread:2.0.1--hed695b0_0' : + 'quay.io/biocontainers/subread:2.0.1--hed695b0_0' }" input: tuple val(meta), path(bams), path(annotation) @@ -27,7 +16,8 @@ process SUBREAD_FEATURECOUNTS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def paired_end = meta.single_end ? '' : '-p' def strandedness = 0 @@ -38,7 +28,7 @@ process SUBREAD_FEATURECOUNTS { } """ featureCounts \\ - $options.args \\ + $args \\ $paired_end \\ -T $task.cpus \\ -a $annotation \\ @@ -47,8 +37,8 @@ process SUBREAD_FEATURECOUNTS { ${bams.join(' ')} cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") + "${task.process}": + subread: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") END_VERSIONS """ } diff --git a/modules/tabix/bgzip/functions.nf b/modules/tabix/bgzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/bgzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index 43726f17..ed9362b2 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_BGZIP { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(input) @@ -26,13 +15,14 @@ process TABIX_BGZIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - bgzip -c $options.args $input > ${prefix}.${input.getExtension()}.gz + bgzip -c $args $input > ${prefix}.${input.getExtension()}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/bgziptabix/functions.nf b/modules/tabix/bgziptabix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/bgziptabix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index e44a7226..e419d153 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -1,39 +1,30 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_BGZIPTABIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(input) output: - tuple val(meta), path("*.gz"), path("*.tbi"), emit: tbi + tuple val(meta), path("*.gz"), path("*.tbi"), emit: gz_tbi path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ - bgzip -c $options.args $input > ${prefix}.gz - tabix $options.args2 ${prefix}.gz + bgzip -c $args $input > ${prefix}.gz + tabix $args2 ${prefix}.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tabix/tabix/functions.nf b/modules/tabix/tabix/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tabix/tabix/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index 1574c0b5..c721a554 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TABIX_TABIX { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::tabix=1.11' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0" - } else { - container "quay.io/biocontainers/tabix:1.11--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tabix:1.11--hdfd78af_0' : + 'quay.io/biocontainers/tabix:1.11--hdfd78af_0' }" input: tuple val(meta), path(tab) @@ -26,12 +15,13 @@ process TABIX_TABIX { path "versions.yml" , emit: versions script: + def args = task.ext.args ?: '' """ - tabix $options.args $tab + tabix $args $tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tbprofiler/profile/main.nf b/modules/tbprofiler/profile/main.nf new file mode 100644 index 00000000..87175a39 --- /dev/null +++ b/modules/tbprofiler/profile/main.nf @@ -0,0 +1,38 @@ +process TBPROFILER_PROFILE { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::tb-profiler=3.0.8" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tb-profiler:3.0.8--pypyh5e36f6f_0' : + 'quay.io/biocontainers/tb-profiler:3.0.8--pypyh5e36f6f_0' }" + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("bam/*.bam") , emit: bam + tuple val(meta), path("results/*.csv") , emit: csv, optional: true + tuple val(meta), path("results/*.json"), emit: json + tuple val(meta), path("results/*.txt") , emit: txt, optional: true + tuple val(meta), path("vcf/*.vcf.gz") , emit: vcf + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" + def input_reads = meta.single_end ? "--read1 $reads" : "--read1 ${reads[0]} --read2 ${reads[1]}" + """ + tb-profiler \\ + profile \\ + $args \\ + --prefix ${prefix} \\ + --threads $task.cpus \\ + $input_reads + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + tbprofiler: \$( echo \$(tb-profiler --version 2>&1) | sed 's/TBProfiler version //') + END_VERSIONS + """ +} diff --git a/modules/tbprofiler/profile/meta.yml b/modules/tbprofiler/profile/meta.yml new file mode 100644 index 00000000..0cac6d6b --- /dev/null +++ b/modules/tbprofiler/profile/meta.yml @@ -0,0 +1,59 @@ +name: tbprofiler_profile +description: A tool to detect resistance and lineages of M. tuberculosis genomes +keywords: + - Mycobacterium tuberculosis + - resistance + - serotype +tools: + - tbprofiler: + description: Profiling tool for Mycobacterium tuberculosis to detect drug resistance and lineage from WGS data + homepage: https://github.com/jodyphelan/TBProfiler + documentation: https://jodyphelan.gitbook.io/tb-profiler/ + tool_dev_url: https://github.com/jodyphelan/TBProfiler + doi: "10.1186/s13073-019-0650-x" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: FASTQ file + pattern: "*.{fastq.gz,fq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: BAM file with alignment details + pattern: "*.bam" + - csv: + type: file + description: Optional CSV formated result file of resistance and strain type + pattern: "*.csv" + - json: + type: file + description: JSON formated result file of resistance and strain type + pattern: "*.json" + - txt: + type: file + description: Optional text file of resistance and strain type + pattern: "*.txt" + - vcf: + type: file + description: VCF with variant info again refernce genomes + pattern: "*.vcf" + +authors: + - "@rpetit3" diff --git a/modules/tiddit/cov/functions.nf b/modules/tiddit/cov/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tiddit/cov/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf index a3a8a171..c5a1ca0f 100644 --- a/modules/tiddit/cov/main.nf +++ b/modules/tiddit/cov/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TIDDIT_COV { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0" - } else { - container "quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' : + 'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }" input: tuple val(meta), path(bam) @@ -25,25 +14,23 @@ process TIDDIT_COV { output: tuple val(meta), path("*.tab"), optional: true, emit: cov tuple val(meta), path("*.wig"), optional: true, emit: wig - path "versions.yml" , emit: versions script: - def software = getSoftwareName(task.process) - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta ? "--ref $fasta" : "" """ tiddit \\ --cov \\ -o $prefix \\ - $options.args \\ + $args \\ --bam $bam \\ $reference cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/tiddit/sv/functions.nf b/modules/tiddit/sv/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/tiddit/sv/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index e262221a..08eecc01 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TIDDIT_SV { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0" - } else { - container "quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' : + 'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }" input: tuple val(meta), path(bam) @@ -30,19 +19,20 @@ process TIDDIT_SV { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" """ tiddit \\ --sv \\ - $options.args \\ + $args \\ --bam $bam \\ $reference \\ -o $prefix cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/trimgalore/functions.nf b/modules/trimgalore/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/trimgalore/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 8e77f1f7..ee40b780 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process TRIMGALORE { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::trim-galore=0.6.7' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/trim-galore:0.6.7--hdfd78af_0" - } else { - container "quay.io/biocontainers/trim-galore:0.6.7--hdfd78af_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/trim-galore:0.6.7--hdfd78af_0' : + 'quay.io/biocontainers/trim-galore:0.6.7--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -30,6 +19,7 @@ process TRIMGALORE { tuple val(meta), path("*.zip") , emit: zip optional true script: + def args = task.ext.args ?: '' // Calculate number of --cores for TrimGalore based on value of task.cpus // See: https://github.com/FelixKrueger/TrimGalore/blob/master/Changelog.md#version-060-release-on-1-mar-2019 // See: https://github.com/nf-core/atacseq/pull/65 @@ -48,20 +38,20 @@ process TRIMGALORE { def tpc_r2 = params.three_prime_clip_r2 > 0 ? "--three_prime_clip_r2 ${params.three_prime_clip_r2}" : '' // Added soft-links to original fastqs for consistent naming in MultiQC - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ [ ! -f ${prefix}.fastq.gz ] && ln -s $reads ${prefix}.fastq.gz trim_galore \\ - $options.args \\ + $args \\ --cores $cores \\ --gzip \\ $c_r1 \\ $tpc_r1 \\ ${prefix}.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + "${task.process}": + trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') cutadapt: \$(cutadapt --version) END_VERSIONS """ @@ -70,7 +60,7 @@ process TRIMGALORE { [ ! -f ${prefix}_1.fastq.gz ] && ln -s ${reads[0]} ${prefix}_1.fastq.gz [ ! -f ${prefix}_2.fastq.gz ] && ln -s ${reads[1]} ${prefix}_2.fastq.gz trim_galore \\ - $options.args \\ + $args \\ --cores $cores \\ --paired \\ --gzip \\ @@ -81,8 +71,8 @@ process TRIMGALORE { ${prefix}_1.fastq.gz \\ ${prefix}_2.fastq.gz cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') + "${task.process}": + trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') cutadapt: \$(cutadapt --version) END_VERSIONS """ diff --git a/modules/ucsc/bed12tobigbed/functions.nf b/modules/ucsc/bed12tobigbed/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/bed12tobigbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bed12tobigbed/main.nf b/modules/ucsc/bed12tobigbed/main.nf index 81f39a6f..742798b3 100644 --- a/modules/ucsc/bed12tobigbed/main.nf +++ b/modules/ucsc/bed12tobigbed/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BED12TOBIGBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedtobigbed=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedtobigbed:377--h446ed27_1" - } else { - container "quay.io/biocontainers/ucsc-bedtobigbed:377--h446ed27_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedtobigbed:377--h446ed27_1' : + 'quay.io/biocontainers/ucsc-bedtobigbed:377--h446ed27_1' }" input: tuple val(meta), path(bed) @@ -29,7 +18,8 @@ process UCSC_BED12TOBIGBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedToBigBed \\ $bed \\ @@ -37,8 +27,8 @@ process UCSC_BED12TOBIGBED { ${prefix}.bigBed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bedclip/functions.nf b/modules/ucsc/bedclip/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/bedclip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bedclip/main.nf b/modules/ucsc/bedclip/main.nf index 5fbc2b3b..dacd7260 100755 --- a/modules/ucsc/bedclip/main.nf +++ b/modules/ucsc/bedclip/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BEDCLIP { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedclip=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedclip:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-bedclip:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedclip:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-bedclip:377--h0b8a92a_2' }" input: tuple val(meta), path(bedgraph) @@ -29,7 +18,8 @@ process UCSC_BEDCLIP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedClip \\ $bedgraph \\ @@ -37,8 +27,8 @@ process UCSC_BEDCLIP { ${prefix}.bedGraph cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bedgraphtobigwig/functions.nf b/modules/ucsc/bedgraphtobigwig/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/bedgraphtobigwig/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bedgraphtobigwig/main.nf b/modules/ucsc/bedgraphtobigwig/main.nf index f55cdb07..9ba306ab 100644 --- a/modules/ucsc/bedgraphtobigwig/main.nf +++ b/modules/ucsc/bedgraphtobigwig/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BEDGRAPHTOBIGWIG { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bedgraphtobigwig=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bedgraphtobigwig:377--h446ed27_1" - } else { - container "quay.io/biocontainers/ucsc-bedgraphtobigwig:377--h446ed27_1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bedgraphtobigwig:377--h446ed27_1' : + 'quay.io/biocontainers/ucsc-bedgraphtobigwig:377--h446ed27_1' }" input: tuple val(meta), path(bedgraph) @@ -29,7 +18,8 @@ process UCSC_BEDGRAPHTOBIGWIG { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ bedGraphToBigWig \\ $bedgraph \\ @@ -37,8 +27,8 @@ process UCSC_BEDGRAPHTOBIGWIG { ${prefix}.bigWig cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/bigwigaverageoverbed/functions.nf b/modules/ucsc/bigwigaverageoverbed/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/bigwigaverageoverbed/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/bigwigaverageoverbed/main.nf b/modules/ucsc/bigwigaverageoverbed/main.nf index 72491443..1e97c83d 100644 --- a/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/modules/ucsc/bigwigaverageoverbed/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_BIGWIGAVERAGEOVERBED { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-bigwigaverageoverbed=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-bigwigaverageoverbed:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-bigwigaverageoverbed:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-bigwigaverageoverbed:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-bigwigaverageoverbed:377--h0b8a92a_2' }" input: tuple val(meta), path(bed) @@ -29,18 +18,19 @@ process UCSC_BIGWIGAVERAGEOVERBED { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + // BUG: bigWigAverageOverBed cannot handle ensembl seqlevels style """ - # there is a bug that bigWigAverageOverBed can not handle ensembl seqlevels style. bigWigAverageOverBed \\ - $options.args \\ + $args \\ $bigwig \\ $bed \\ ${prefix}.tab cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/liftover/functions.nf b/modules/ucsc/liftover/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/ucsc/liftover/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/liftover/main.nf b/modules/ucsc/liftover/main.nf index 3739a1e5..3dd9531e 100644 --- a/modules/ucsc/liftover/main.nf +++ b/modules/ucsc/liftover/main.nf @@ -1,24 +1,13 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_LIFTOVER { tag "$meta.id" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::ucsc-liftover=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-liftover:377--h0b8a92a_3" - } else { - container "quay.io/biocontainers/ucsc-liftover:377--h0b8a92a_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-liftover:377--h0b8a92a_3' : + 'quay.io/biocontainers/ucsc-liftover:377--h0b8a92a_3' }" input: tuple val(meta), path(bed) @@ -30,19 +19,20 @@ process UCSC_LIFTOVER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ liftOver \\ - $options.args \ + $args \ $bed \\ $chain \\ ${prefix}.lifted.bed \\ ${prefix}.unlifted.bed cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ucsc/wigtobigwig/functions.nf b/modules/ucsc/wigtobigwig/functions.nf deleted file mode 100755 index 85628ee0..00000000 --- a/modules/ucsc/wigtobigwig/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/ucsc/wigtobigwig/main.nf b/modules/ucsc/wigtobigwig/main.nf index d03a2c4a..4c596c9a 100644 --- a/modules/ucsc/wigtobigwig/main.nf +++ b/modules/ucsc/wigtobigwig/main.nf @@ -1,45 +1,34 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '377' // No version information printed +def VERSION = '377' // Version information not provided by tool on CLI process UCSC_WIGTOBIGWIG { tag '$wig' label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::ucsc-wigtobigwig=377" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/ucsc-wigtobigwig:377--h0b8a92a_2" - } else { - container "quay.io/biocontainers/ucsc-wigtobigwig:377--h0b8a92a_2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ucsc-wigtobigwig:377--h0b8a92a_2' : + 'quay.io/biocontainers/ucsc-wigtobigwig:377--h0b8a92a_2' }" input: path wig - path chromsizes + path sizes output: - path "*.bw" , emit: bw - path "versions.yml" , emit: versions + path "*.bw" , emit: bw + path "versions.yml", emit: versions script: - + def args = task.ext.args ?: '' """ wigToBigWig \\ - $options.args \\ + $args \\ $wig \\ - $chromsizes \\ + $sizes \\ ${wig.getSimpleName()}.bw cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo "$VERSION") + "${task.process}": + ucsc: $VERSION END_VERSIONS """ } diff --git a/modules/ultra/pipeline/main.nf b/modules/ultra/pipeline/main.nf new file mode 100644 index 00000000..f2dcb543 --- /dev/null +++ b/modules/ultra/pipeline/main.nf @@ -0,0 +1,38 @@ +process ULTRA_PIPELINE { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0' : + 'quay.io/biocontainers/ultra_bioinformatics:0.0.4.1--pyh5e36f6f_0' }" + + input: + tuple val(meta), path(reads) + path genome + path gtf + + output: + tuple val(meta), path("*.sam"), emit: sam + path "versions.yml" , emit: versions + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + uLTRA \\ + pipeline \\ + --t $task.cpus \\ + --prefix $prefix \\ + $args \\ + $genome \\ + $gtf \\ + $reads \\ + ./ + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ultra: \$( uLTRA --version|sed 's/uLTRA //g' ) + END_VERSIONS + """ +} diff --git a/modules/ultra/pipeline/meta.yml b/modules/ultra/pipeline/meta.yml new file mode 100644 index 00000000..fa8366e8 --- /dev/null +++ b/modules/ultra/pipeline/meta.yml @@ -0,0 +1,52 @@ +name: ultra_pipeline +description: uLTRA aligner - A wrapper around minimap2 to improve small exon detection +keywords: + - uLTRA + - minimap2 +tools: + - ultra: + description: Splice aligner of long transcriptomic reads to genome. + homepage: https://github.com/ksahlin/uLTRA + documentation: https://github.com/ksahlin/uLTRA + tool_dev_url: https://github.com/ksahlin/uLTRA + doi: "10.1093/bioinformatics/btab540" + licence: ['GNU GPLV3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: A fasta or fastq file of reads to align + pattern: "*.{fasta,fastq}" + - genome: + type: file + description: fasta file of reference genome + pattern: "*.fasta" + - gtf: + type: file + description: A annotation of use the genome + pattern: "*.gtf" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - sam: + type: file + description: The aligned reads in sam format + pattern: "*.sam" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sguizard" + - "@lassefolkersen" + - "@ksahlin" diff --git a/modules/umitools/dedup/functions.nf b/modules/umitools/dedup/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/umitools/dedup/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 0ec9741b..ce21437d 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UMITOOLS_DEDUP { tag "$meta.id" label "process_medium" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" - } else { - container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0' : + 'quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0' }" input: tuple val(meta), path(bam), path(bai) @@ -26,18 +15,19 @@ process UMITOOLS_DEDUP { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def paired = meta.single_end ? "" : "--paired" """ umi_tools dedup \\ -I $bam \\ -S ${prefix}.bam \\ $paired \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } diff --git a/modules/umitools/extract/functions.nf b/modules/umitools/extract/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/umitools/extract/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/umitools/extract/main.nf b/modules/umitools/extract/main.nf index d90a3ba8..fba8f054 100644 --- a/modules/umitools/extract/main.nf +++ b/modules/umitools/extract/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UMITOOLS_EXTRACT { tag "$meta.id" label "process_low" - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::umi_tools=1.1.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0" - } else { - container "quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/umi_tools:1.1.2--py38h4a8c8d9_0' : + 'quay.io/biocontainers/umi_tools:1.1.2--py38h4a8c8d9_0' }" input: tuple val(meta), path(reads) @@ -27,19 +16,20 @@ process UMITOOLS_EXTRACT { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ umi_tools \\ extract \\ -I $reads \\ -S ${prefix}.umi_extract.fastq.gz \\ - $options.args \\ + $args \\ > ${prefix}.umi_extract.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } else { @@ -50,12 +40,12 @@ process UMITOOLS_EXTRACT { --read2-in=${reads[1]} \\ -S ${prefix}.umi_extract_1.fastq.gz \\ --read2-out=${prefix}.umi_extract_2.fastq.gz \\ - $options.args \\ + $args \\ > ${prefix}.umi_extract.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') + "${task.process}": + umitools: \$(umi_tools --version 2>&1 | sed 's/^.*UMI-tools version://; s/ *\$//') END_VERSIONS """ } diff --git a/modules/unicycler/functions.nf b/modules/unicycler/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/unicycler/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/unicycler/main.nf b/modules/unicycler/main.nf index 2f7c49d6..1ccc72a9 100644 --- a/modules/unicycler/main.nf +++ b/modules/unicycler/main.nf @@ -1,49 +1,43 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNICYCLER { tag "$meta.id" label 'process_high' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? 'bioconda::unicycler=0.4.8' : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/unicycler:0.4.8--py38h8162308_3" - } else { - container "quay.io/biocontainers/unicycler:0.4.8--py38h8162308_3" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/unicycler:0.4.8--py38h8162308_3' : + 'quay.io/biocontainers/unicycler:0.4.8--py38h8162308_3' }" input: - tuple val(meta), path(reads) + tuple val(meta), path(shortreads), path(longreads) output: - tuple val(meta), path('*.scaffolds.fa'), emit: scaffolds - tuple val(meta), path('*.assembly.gfa'), emit: gfa - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path('*.scaffolds.fa.gz'), emit: scaffolds + tuple val(meta), path('*.assembly.gfa.gz'), emit: gfa + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def input_reads = meta.single_end ? "-s $reads" : "-1 ${reads[0]} -2 ${reads[1]}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def short_reads = shortreads ? ( meta.single_end ? "-s $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" ) : "" + def long_reads = longreads ? "-l $longreads" : "" """ unicycler \\ --threads $task.cpus \\ - $options.args \\ - $input_reads \\ + $args \\ + $short_reads \\ + $long_reads \\ --out ./ mv assembly.fasta ${prefix}.scaffolds.fa + gzip -n ${prefix}.scaffolds.fa mv assembly.gfa ${prefix}.assembly.gfa + gzip -n ${prefix}.assembly.gfa mv unicycler.log ${prefix}.unicycler.log cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//') + "${task.process}": + unicycler: \$(echo \$(unicycler --version 2>&1) | sed 's/^.*Unicycler v//; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/unicycler/meta.yml b/modules/unicycler/meta.yml index e3b1aab9..b04ac882 100644 --- a/modules/unicycler/meta.yml +++ b/modules/unicycler/meta.yml @@ -19,11 +19,15 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - reads: + - shortreads: type: file description: | - List of input FastQ files of size 1 and 2 for single-end and paired-end data, + List of input Illumina FastQ files of size 1 and 2 for single-end and paired-end data, respectively. + - longreads: + type: file + description: | + List of input FastQ files of size 1, PacBio or Nanopore long reads. output: - meta: type: map @@ -37,11 +41,11 @@ output: - scaffolds: type: file description: Fasta file containing scaffolds - pattern: "*.{scaffolds.fa}" + pattern: "*.{scaffolds.fa.gz}" - gfa: type: file description: gfa file containing assembly - pattern: "*.{assembly.gfa}" + pattern: "*.{assembly.gfa.gz}" - log: type: file description: unicycler log file @@ -53,3 +57,4 @@ output: authors: - "@JoseEspinosa" - "@drpatelh" + - "@d4straub" diff --git a/modules/untar/functions.nf b/modules/untar/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/untar/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/untar/main.nf b/modules/untar/main.nf index efb9d825..6d1996e7 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNTAR { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "conda-forge::sed=4.7" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img" - } else { - container "biocontainers/biocontainers:v1.2.0_cv1" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : + 'biocontainers/biocontainers:v1.2.0_cv1' }" input: path archive @@ -26,16 +15,19 @@ process UNTAR { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' untar = archive.toString() - '.tar.gz' """ tar \\ -xzvf \\ - $options.args \\ - $archive + $args \\ + $archive \\ + $args2 \\ cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + "${task.process}": + untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') END_VERSIONS """ } diff --git a/modules/unzip/functions.nf b/modules/unzip/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/unzip/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index f39e75e8..294ac0b0 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -1,23 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process UNZIP { tag "$archive" label 'process_low' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) } - conda (params.enable_conda ? "bioconda::p7zip=15.09" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/p7zip:15.09--h2d50403_4" - } else { - container "quay.io/biocontainers/p7zip:15.09--h2d50403_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/p7zip:15.09--h2d50403_4' : + 'quay.io/biocontainers/p7zip:15.09--h2d50403_4' }" input: path archive @@ -27,17 +15,17 @@ process UNZIP { path "versions.yml" , emit: versions script: - + def args = task.ext.args ?: '' if ( archive instanceof List && archive.name.size > 1 ) { exit 1, "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } """ 7za \\ e \\ -o"${archive.baseName}"/ \\ - $options.args \\ + $args \\ $archive cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: + "${task.process}": 7za: \$(echo \$(7za --help) | sed 's/.*p7zip Version //; s/(.*//') END_VERSIONS """ diff --git a/modules/variantbam/functions.nf b/modules/variantbam/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/variantbam/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/variantbam/main.nf b/modules/variantbam/main.nf index e73b8bf1..11059a9a 100644 --- a/modules/variantbam/main.nf +++ b/modules/variantbam/main.nf @@ -1,43 +1,33 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - -def VERSION = '1.4.4a' +def VERSION = '1.4.4a' // Version information not provided by tool on CLI process VARIANTBAM { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::variantbam=1.4.4a" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/variantbam:1.4.4a--h7d7f7ad_5" - } else { - container "quay.io/biocontainers/variantbam:1.4.4a--h7d7f7ad_5" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/variantbam:1.4.4a--h7d7f7ad_5' : + 'quay.io/biocontainers/variantbam:1.4.4a--h7d7f7ad_5' }" input: tuple val(meta), path(bam) output: - tuple val(meta), path("*.bam") , emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ variant \\ $bam \\ -o ${prefix}.bam \\ - $options.args + $args cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo $VERSION) + "${task.process}": + variantbam: $VERSION END_VERSIONS """ } diff --git a/modules/vcftools/functions.nf b/modules/vcftools/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/vcftools/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/vcftools/main.nf b/modules/vcftools/main.nf index 768d5a23..fbe646ca 100644 --- a/modules/vcftools/main.nf +++ b/modules/vcftools/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process VCFTOOLS { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::vcftools=0.1.16" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/vcftools:0.1.16--he513fc3_4" - } else { - container "quay.io/biocontainers/vcftools:0.1.16--he513fc3_4" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/vcftools:0.1.16--he513fc3_4' : + 'quay.io/biocontainers/vcftools:0.1.16--he513fc3_4' }" input: // Owing to the nature of vcftools we here provide solutions to working with optional bed files and optional @@ -93,22 +82,23 @@ process VCFTOOLS { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" - def args = options.args.tokenize() + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def args_list = args.tokenize() - def bed_arg = (options.args.contains('--bed')) ? "--bed ${bed}" : - (options.args.contains('--exclude-bed')) ? "--exclude-bed ${bed}" : - (options.args.contains('--hapcount')) ? "--hapcount ${bed}" : '' - args.removeIf { it.contains('--bed') } - args.removeIf { it.contains('--exclude-bed') } - args.removeIf { it.contains('--hapcount') } + def bed_arg = (args.contains('--bed')) ? "--bed ${bed}" : + (args.contains('--exclude-bed')) ? "--exclude-bed ${bed}" : + (args.contains('--hapcount')) ? "--hapcount ${bed}" : '' + args_list.removeIf { it.contains('--bed') } + args_list.removeIf { it.contains('--exclude-bed') } + args_list.removeIf { it.contains('--hapcount') } - def diff_variant_arg = (options.args.contains('--diff')) ? "--diff ${diff_variant_file}" : - (options.args.contains('--gzdiff')) ? "--gzdiff ${diff_variant_file}" : - (options.args.contains('--diff-bcf')) ? "--diff-bcf ${diff_variant_file}" : '' - args.removeIf { it.contains('--diff') } - args.removeIf { it.contains('--gzdiff') } - args.removeIf { it.contains('--diff-bcf') } + def diff_variant_arg = (args.contains('--diff')) ? "--diff ${diff_variant_file}" : + (args.contains('--gzdiff')) ? "--gzdiff ${diff_variant_file}" : + (args.contains('--diff-bcf')) ? "--diff-bcf ${diff_variant_file}" : '' + args_list.removeIf { it.contains('--diff') } + args_list.removeIf { it.contains('--gzdiff') } + args_list.removeIf { it.contains('--diff-bcf') } def input_file = ("$variant_file".endsWith(".vcf")) ? "--vcf ${variant_file}" : ("$variant_file".endsWith(".vcf.gz")) ? "--gzvcf ${variant_file}" : @@ -118,13 +108,13 @@ process VCFTOOLS { vcftools \\ $input_file \\ --out $prefix \\ - ${args.join(' ')} \\ + ${args_list.join(' ')} \\ $bed_arg \\ $diff_variant_arg cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(vcftools --version 2>&1) | sed 's/^.*VCFtools (//;s/).*//') + "${task.process}": + vcftools: \$(echo \$(vcftools --version 2>&1) | sed 's/^.*VCFtools (//;s/).*//') END_VERSIONS """ } diff --git a/modules/yara/index/functions.nf b/modules/yara/index/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/yara/index/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/yara/index/main.nf b/modules/yara/index/main.nf index 51ae8a32..77122c78 100644 --- a/modules/yara/index/main.nf +++ b/modules/yara/index/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process YARA_INDEX { tag "$fasta" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:'index', meta:[:], publish_by_meta:[]) } conda (params.enable_conda ? "bioconda::yara=1.0.2" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/yara:1.0.2--2" - } else { - container "quay.io/biocontainers/yara:1.0.2--2" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/yara:1.0.2--2' : + 'quay.io/biocontainers/yara:1.0.2--2' }" input: path fasta @@ -26,6 +15,7 @@ process YARA_INDEX { path "versions.yml", emit: versions script: + def args = task.ext.args ?: '' """ mkdir yara @@ -38,8 +28,8 @@ process YARA_INDEX { cp $fasta yara/yara.fasta cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_indexer --version 2>&1) | sed 's/^.*yara_indexer version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_indexer --version 2>&1) | sed 's/^.*yara_indexer version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/modules/yara/mapper/functions.nf b/modules/yara/mapper/functions.nf deleted file mode 100644 index 85628ee0..00000000 --- a/modules/yara/mapper/functions.nf +++ /dev/null @@ -1,78 +0,0 @@ -// -// Utility functions used in nf-core DSL2 module files -// - -// -// Extract name of software tool from process name using $task.process -// -def getSoftwareName(task_process) { - return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase() -} - -// -// Extract name of module from process name using $task.process -// -def getProcessName(task_process) { - return task_process.tokenize(':')[-1] -} - -// -// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules -// -def initOptions(Map args) { - def Map options = [:] - options.args = args.args ?: '' - options.args2 = args.args2 ?: '' - options.args3 = args.args3 ?: '' - options.publish_by_meta = args.publish_by_meta ?: [] - options.publish_dir = args.publish_dir ?: '' - options.publish_files = args.publish_files - options.suffix = args.suffix ?: '' - return options -} - -// -// Tidy up and join elements of a list to return a path string -// -def getPathFromList(path_list) { - def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries - paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes - return paths.join('/') -} - -// -// Function to save/publish module results -// -def saveFiles(Map args) { - def ioptions = initOptions(args.options) - def path_list = [ ioptions.publish_dir ?: args.publish_dir ] - - // Do not publish versions.yml unless running from pytest workflow - if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) { - return null - } - if (ioptions.publish_by_meta) { - def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta - for (key in key_list) { - if (args.meta && key instanceof String) { - def path = key - if (args.meta.containsKey(key)) { - path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key] - } - path = path instanceof String ? path : '' - path_list.add(path) - } - } - } - if (ioptions.publish_files instanceof Map) { - for (ext in ioptions.publish_files) { - if (args.filename.endsWith(ext.key)) { - def ext_list = path_list.collect() - ext_list.add(ext.value) - return "${getPathFromList(ext_list)}/$args.filename" - } - } - } else if (ioptions.publish_files == null) { - return "${getPathFromList(path_list)}/$args.filename" - } -} diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 3d69674c..4539033d 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -1,22 +1,11 @@ -// Import generic module functions -include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions' - -params.options = [:] -options = initOptions(params.options) - process YARA_MAPPER { tag "$meta.id" label 'process_medium' - publishDir "${params.outdir}", - mode: params.publish_dir_mode, - saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) } conda (params.enable_conda ? "bioconda::yara=1.0.2 bioconda::samtools=1.12" : null) - if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { - container "https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0" - } else { - container "quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0" - } + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' : + 'quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' }" input: tuple val(meta), path(reads) @@ -27,27 +16,28 @@ process YARA_MAPPER { path "versions.yml" , emit: versions script: - def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ yara_mapper \\ - $options.args \\ + $args \\ -t $task.cpus \\ -f bam \\ ${index}/yara \\ $reads | samtools view -@ $task.cpus -hb -F4 > ${prefix}.mapped.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } else { """ yara_mapper \\ - $options.args \\ - -t ${task.cpus} \\ + $args \\ + -t $task.cpus \\ -f bam \\ ${index}/yara \\ ${reads[0]} \\ @@ -57,8 +47,8 @@ process YARA_MAPPER { samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam cat <<-END_VERSIONS > versions.yml - ${getProcessName(task.process)}: - ${getSoftwareName(task.process)}: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') + "${task.process}": + yara: \$(echo \$(yara_mapper --version 2>&1) | sed 's/^.*yara_mapper version: //; s/ .*\$//') samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ diff --git a/subworkflows/nf-core/annotation_ensemblvep/main.nf b/subworkflows/nf-core/annotation_ensemblvep/main.nf new file mode 100644 index 00000000..3f3ecc6e --- /dev/null +++ b/subworkflows/nf-core/annotation_ensemblvep/main.nf @@ -0,0 +1,26 @@ +// +// Run VEP to annotate VCF files +// + +include { ENSEMBLVEP } from '../../../modules/ensemblvep/main' +include { TABIX_BGZIPTABIX as ANNOTATION_BGZIPTABIX } from '../../../modules/tabix/bgziptabix/main' + +workflow ANNOTATION_ENSEMBLVEP { + take: + vcf // channel: [ val(meta), vcf ] + vep_genome // value: which genome + vep_species // value: which species + vep_cache_version // value: which cache version + vep_cache // path: path_to_vep_cache (optionnal) + + main: + ENSEMBLVEP(vcf, vep_genome, vep_species, vep_cache_version, vep_cache) + ANNOTATION_BGZIPTABIX(ENSEMBLVEP.out.vcf) + + ch_versions = ENSEMBLVEP.out.versions.first().mix(ANNOTATION_BGZIPTABIX.out.versions.first()) + + emit: + vcf_tbi = ANNOTATION_BGZIPTABIX.out.gz_tbi // channel: [ val(meta), vcf.gz, vcf.gz.tbi ] + reports = ENSEMBLVEP.out.report // path: *.html + versions = ch_versions // path: versions.yml +} diff --git a/subworkflows/nf-core/annotation_ensemblvep/meta.yml b/subworkflows/nf-core/annotation_ensemblvep/meta.yml new file mode 100644 index 00000000..e7d92ce9 --- /dev/null +++ b/subworkflows/nf-core/annotation_ensemblvep/meta.yml @@ -0,0 +1,29 @@ +name: annotation_ensemblvep +description: | + Perform annotation with ensemblvep and bgzip + tabix index the resulting VCF file +keywords: + - ensemblvep +modules: + - ensemblvep + - tabix/bgziptabix +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: vcf + description: list containing one vcf file + pattern: "[ *.{vcf,vcf.gz} ]" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - vcf_tbi: + type: file + description: Compressed vcf file + tabix index + pattern: "[ *{.vcf.gz,vcf.gz.tbi} ]" +authors: + - '@maxulysse' diff --git a/subworkflows/nf-core/annotation_snpeff/main.nf b/subworkflows/nf-core/annotation_snpeff/main.nf new file mode 100644 index 00000000..add5f9c8 --- /dev/null +++ b/subworkflows/nf-core/annotation_snpeff/main.nf @@ -0,0 +1,23 @@ +// +// Run SNPEFF to annotate VCF files +// + +include { SNPEFF } from '../../../modules/snpeff/main' +include { TABIX_BGZIPTABIX as ANNOTATION_BGZIPTABIX } from '../../../modules/tabix/bgziptabix/main' + +workflow ANNOTATION_SNPEFF { + take: + vcf // channel: [ val(meta), vcf ] + snpeff_db // value: version of db to use + snpeff_cache // path: path_to_snpeff_cache (optionnal) + + main: + SNPEFF(vcf, snpeff_db, snpeff_cache) + ANNOTATION_BGZIPTABIX(SNPEFF.out.vcf) + ch_versions = SNPEFF.out.versions.first().mix(ANNOTATION_BGZIPTABIX.out.versions.first()) + + emit: + vcf_tbi = ANNOTATION_BGZIPTABIX.out.gz_tbi // channel: [ val(meta), vcf.gz, vcf.gz.tbi ] + reports = SNPEFF.out.report // path: *.html + versions = ch_versions // path: versions.yml +} diff --git a/subworkflows/nf-core/annotation_snpeff/meta.yml b/subworkflows/nf-core/annotation_snpeff/meta.yml new file mode 100644 index 00000000..164a0ee2 --- /dev/null +++ b/subworkflows/nf-core/annotation_snpeff/meta.yml @@ -0,0 +1,29 @@ +name: annotation_snpeff +description: | + Perform annotation with snpeff and bgzip + tabix index the resulting VCF file +keywords: + - snpeff +modules: + - snpeff + - tabix/bgziptabix +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: vcf + description: list containing one vcf file + pattern: "[ *.{vcf,vcf.gz} ]" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - vcf_tbi: + type: file + description: Compressed vcf file + tabix index + pattern: "[ *{.vcf.gz,vcf.gz.tbi} ]" +authors: + - '@maxulysse' diff --git a/subworkflows/nf-core/bam_sort_samtools/nextflow.config b/subworkflows/nf-core/bam_sort_samtools/nextflow.config index 2fd55747..72128aad 100644 --- a/subworkflows/nf-core/bam_sort_samtools/nextflow.config +++ b/subworkflows/nf-core/bam_sort_samtools/nextflow.config @@ -1 +1,3 @@ -params.options = [:] +params.sort_options = [:] +params.index_options = [:] +params.stats_options = [:] diff --git a/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf b/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf new file mode 100644 index 00000000..042d0bbd --- /dev/null +++ b/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf @@ -0,0 +1,86 @@ +// +// Runs FGBIO tools to remove UMI tags from FASTQ reads +// Convert them to unmapped BAM file, map them to the reference genome, +// use the mapped information to group UMIs and generate consensus reads +// + + +include { BWAMEM2_INDEX } from '../../../modules/bwamem2/index/main.nf' +include { BWAMEM2_MEM } from '../../../modules/bwamem2/mem/main' +include { BWA_INDEX as BWAMEM1_INDEX } from '../../../modules/bwa/index/main.nf' +include { BWA_MEM as BWAMEM1_MEM } from '../../../modules/bwa/mem/main' +include { FGBIO_CALLMOLECULARCONSENSUSREADS as CALLUMICONSENSUS } from '../../../modules/fgbio/callmolecularconsensusreads/main.nf' +include { FGBIO_FASTQTOBAM as FASTQTOBAM } from '../../../modules/fgbio/fastqtobam/main' +include { FGBIO_GROUPREADSBYUMI as GROUPREADSBYUMI } from '../../../modules/fgbio/groupreadsbyumi/main' +include { SAMBLASTER } from '../../../modules/samblaster/main' +include { SAMTOOLS_BAM2FQ as BAM2FASTQ } from '../../../modules/samtools/bam2fq/main.nf' + + +workflow CREATE_UMI_CONSENSUS { + take: + reads // channel: [mandatory] [ val(meta), [ reads ] ] + fasta // channel: [mandatory] /path/to/reference/fasta + read_structure // string: [mandatory] "read_structure" + groupreadsbyumi_strategy // string: [mandatory] grouping strategy - default: "Adjacency" + aligner // string: [mandatory] "bwa-mem" or "bwa-mem2" + + main: + ch_versions = Channel.empty() + + // using information in val(read_structure) FASTQ reads are converted into + // a tagged unmapped BAM file (uBAM) + FASTQTOBAM ( reads, read_structure ) + ch_versions = ch_versions.mix(FASTQTOBAM.out.version) + + // in order to map uBAM using BWA MEM, we need to convert uBAM to FASTQ + // but keep the appropriate UMI tags in the FASTQ comment field and produce + // an interleaved FASQT file (hence, split = false) + split = false + BAM2FASTQ ( FASTQTOBAM.out.umibam, split ) + ch_versions = ch_versions.mix(BAM2FASTQ.out.versions) + + // the user can choose here to use either bwa-mem (default) or bwa-mem2 + aligned_bam = Channel.empty() + + if (aligner == "bwa-mem") { + // reference is indexed + BWAMEM1_INDEX ( fasta ) + ch_versions = ch_versions.mix(BWAMEM1_INDEX.out.versions) + + // appropriately tagged interleaved FASTQ reads are mapped to the reference + BWAMEM1_MEM ( BAM2FASTQ.out.reads, BWAMEM1_INDEX.out.index, false ) + ch_versions = ch_versions.mix(BWAMEM1_MEM.out.versions) + aligned_bam = BWAMEM1_MEM.out.bam + } else { + // reference is indexed + BWAMEM2_INDEX ( fasta ) + ch_versions = ch_versions.mix(BWAMEM2_INDEX.out.versions) + + // appropriately tagged interleaved FASTQ reads are mapped to the reference + BWAMEM2_MEM ( BAM2FASTQ.out.reads, BWAMEM2_INDEX.out.index, false ) + ch_versions = ch_versions.mix(BWAMEM2_MEM.out.versions) + aligned_bam = BWAMEM2_MEM.out.bam + } + + // samblaster is used in order to tag mates information in the BAM file + // this is used in order to group reads by UMI + SAMBLASTER ( aligned_bam ) + ch_versions = ch_versions.mix(SAMBLASTER.out.versions) + + // appropriately tagged reads are now grouped by UMI information + GROUPREADSBYUMI ( SAMBLASTER.out.bam, groupreadsbyumi_strategy ) + ch_versions = ch_versions.mix(GROUPREADSBYUMI.out.versions) + + // using the above created groups, a consensus across reads in the same grou + // can be called + // this will emit a consensus BAM file + CALLUMICONSENSUS ( GROUPREADSBYUMI.out.bam ) + ch_versions = ch_versions.mix(CALLUMICONSENSUS.out.versions) + + emit: + ubam = FASTQTOBAM.out.umibam // channel: [ val(meta), [ bam ] ] + groupbam = GROUPREADSBYUMI.out.bam // channel: [ val(meta), [ bam ] ] + consensusbam = CALLUMICONSENSUS.out.bam // channel: [ val(meta), [ bam ] ] + versions = ch_versions // channel: [ versions.yml ] +} + diff --git a/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml b/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml new file mode 100644 index 00000000..2cb61206 --- /dev/null +++ b/subworkflows/nf-core/fgbio_create_umi_consensus/meta.yml @@ -0,0 +1,67 @@ +name: fgbio_create_umi_consensus +description: | + This workflow uses the suite FGBIO to identify and remove UMI tags from FASTQ reads + convert them to unmapped BAM file, map them to the reference genome, + and finally use the mapped information to group UMIs and generate consensus reads in each group +keywords: + - fgbio + - umi + - samblaster + - samtools + - bwa +modules: + - bwa/index + - bwa/mem + - fgbio/fastqtobam + - fgbio/groupreadsbyumi + - fgbio/callmolecularconsensusreads + - samblaster + - samtools/bam2fq +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - reads: + type: list + description: list umi-tagged reads + pattern: "[ *.{fastq.gz/fq.gz} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - read_structure: + type: string + description: | + A read structure should always be provided for each of the fastq files. + If single end, the string will contain only one structure (i.e. "2M11S+T"), if paired-end the string + will contain two structures separated by a blank space (i.e. "2M11S+T 2M11S+T"). + If the read does not contain any UMI, the structure will be +T (i.e. only template of any length). + https://github.com/fulcrumgenomics/fgbio/wiki/Read-Structures + - groupreadsbyumi_strategy: + type: string + description: | + Reguired argument: defines the UMI assignment strategy. + Must be chosen among: Identity, Edit, Adjacency, Paired. +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - ubam: + type: file + description: unmapped bam file + pattern: '*.bam' + - groupbam: + type: file + description: mapped bam file, where reads are grouped by UMI tag + pattern: '*.bam' + - consensusbam: + type: file + description: | + mapped bam file, where reads are created as consensus of those + belonging to the same UMI group + pattern: '*.bam' +authors: + - '@lescai' diff --git a/subworkflows/nf-core/gatk_create_som_pon/main.nf b/subworkflows/nf-core/gatk_create_som_pon/main.nf new file mode 100644 index 00000000..89a9566e --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -0,0 +1,57 @@ +// +// Run GATK mutect2, genomicsdbimport and createsomaticpanelofnormals +// +params.mutect2_options = [args: '--max-mnp-distance 0'] +params.gendbimport_options = [:] +params.createsompon_options = [:] + +include { GATK4_MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_GENOMICSDBIMPORT } from '../../../modules/gatk4/genomicsdbimport/main' addParams( options: params.gendbimport_options ) +include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../modules/gatk4/createsomaticpanelofnormals/main' addParams( options: params.createsompon_options ) + +workflow GATK_CREATE_SOM_PON { + take: + ch_mutect2_in // channel: [ val(meta), [ input ], [ input_index ], [] ] + fasta // channel: /path/to/reference/fasta + fai // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + pon_name // channel: name for panel of normals + interval_file // channel: /path/to/interval/file + + main: + ch_versions = Channel.empty() + input = channel.from(ch_mutect2_in) + // + //Perform variant calling for each sample using mutect2 module in panel of normals mode. + // + GATK4_MUTECT2 ( input, false, true, false, [], fasta, fai, dict, [], [], [], [] ) + ch_versions = ch_versions.mix(GATK4_MUTECT2.out.versions.first()) + + // + //Convert all sample vcfs into a genomicsdb workspace using genomicsdbimport. + // + ch_vcf = GATK4_MUTECT2.out.vcf.collect{it[1]}.toList() + ch_index = GATK4_MUTECT2.out.tbi.collect{it[1]}.toList() + gendb_input = Channel.of([[ id:pon_name ]]).combine(ch_vcf).combine(ch_index).combine([interval_file]).combine(['']).combine([dict]) + GATK4_GENOMICSDBIMPORT ( gendb_input, false, false, false ) + ch_versions = ch_versions.mix(GATK4_GENOMICSDBIMPORT.out.versions.first()) + + // + //Panel of normals made from genomicsdb workspace using createsomaticpanelofnormals. + // + GATK4_GENOMICSDBIMPORT.out.genomicsdb.view() + GATK4_CREATESOMATICPANELOFNORMALS ( GATK4_GENOMICSDBIMPORT.out.genomicsdb, fasta, fai, dict ) + ch_versions = ch_versions.mix(GATK4_CREATESOMATICPANELOFNORMALS.out.versions.first()) + + emit: + mutect2_vcf = GATK4_MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_index = GATK4_MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = GATK4_MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + genomicsdb = GATK4_GENOMICSDBIMPORT.out.genomicsdb // channel: [ val(meta), [ genomicsdb ] ] + + pon_vcf = GATK4_CREATESOMATICPANELOFNORMALS.out.vcf // channel: [ val(meta), [ vcf.gz ] ] + pon_index = GATK4_CREATESOMATICPANELOFNORMALS.out.tbi // channel: [ val(meta), [ tbi ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_create_som_pon/meta.yml b/subworkflows/nf-core/gatk_create_som_pon/meta.yml new file mode 100644 index 00000000..07404aae --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/meta.yml @@ -0,0 +1,75 @@ +name: gatk_create_som_pon +description: Perform variant calling on a set of normal samples using mutect2 panel of normals mode. Group them into a genomicsdbworkspace using genomicsdbimport, then use this to create a panel of normals using createsomaticpanelofnormals. +keywords: + - gatk4 + - mutect2 + - genomicsdbimport + - createsomaticpanelofnormals + - variant_calling + - genomicsdb_workspace + - panel_of_normals +modules: + - gatk4/mutect2 + - gatk4/genomicsdbimport + - gatk4/createsomaticpanelofnormals +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list of BAM files, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list of BAM file indexes, also able to take CRAM indexes as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - pon_name: + type: String + Description: name to be used for the genomicsdb workspace and panel of normals as meta_id has the individual sample names and a name for the combined files is reuired here. + pattern: "example_name" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: list + description: List of compressed vcf files to be used to make the gendb workspace + pattern: "[ *.vcf.gz ]" + - mutect2_index: + type: list + description: List of indexes of mutect2_vcf files + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: list + description: List of stats files that pair with mutect2_vcf files + pattern: "[ *vcf.gz.stats ]" + - genomicsdb: + type: directory + description: Directory containing the files that compose the genomicsdb workspace. + pattern: "path/name_of_workspace" + - pon_vcf: + type: file + description: Panel of normal as compressed vcf file + pattern: "*.vcf.gz" + - pon_index: + type: file + description: Index of pon_vcf file + pattern: "*vcf.gz.tbi" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_create_som_pon/nextflow.config b/subworkflows/nf-core/gatk_create_som_pon/nextflow.config new file mode 100644 index 00000000..6f560c9e --- /dev/null +++ b/subworkflows/nf-core/gatk_create_som_pon/nextflow.config @@ -0,0 +1,3 @@ +params.mutect2_options = [:] +params.gendbimport_options = [:] +params.createsompon_options = [:] diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf new file mode 100644 index 00000000..25c63687 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/main.nf @@ -0,0 +1,109 @@ +// +// Run GATK mutect2 in tumor normal mode, getepileupsummaries, calculatecontamination, learnreadorientationmodel and filtermutectcalls +// + +params.mutect2_options = [:] +params.learnorientation_options = [:] +params.getpileup_tumor_options = [suffix: '_tumor'] +params.getpileup_normal_options = [suffix: '_normal'] +params.calccontam_options = [:] +params.filtercalls_options = [suffix: '_filtered'] + +include { GATK4_MUTECT2 as MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_LEARNREADORIENTATIONMODEL as LEARNREADORIENTATIONMODEL } from '../../../modules/gatk4/learnreadorientationmodel/main' addParams( options: params.learnorientation_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES_TUMOR } from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_tumor_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES_NORMAL} from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_normal_options ) +include { GATK4_CALCULATECONTAMINATION as CALCULATECONTAMINATION } from '../../../modules/gatk4/calculatecontamination/main' addParams( options: params.calccontam_options ) +include { GATK4_FILTERMUTECTCALLS as FILTERMUTECTCALLS } from '../../../modules/gatk4/filtermutectcalls/main' addParams( options: params.filtercalls_options ) + +workflow GATK_TUMOR_NORMAL_SOMATIC_VARIANT_CALLING { + take: + input // channel: [ val(meta), [ input ], [ input_index ], [which_norm] ] + fasta // channel: /path/to/reference/fasta + fai // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + germline_resource // channel: /path/to/germline/resource + germline_resource_tbi // channel: /path/to/germline/index + panel_of_normals // channel: /path/to/panel/of/normals + panel_of_normals_tbi // channel: /path/to/panel/of/normals/index + interval_file // channel: /path/to/interval/file + + + main: + ch_versions = Channel.empty() + + // + //Perform variant calling using mutect2 module in tumor single mode. + // + mutect2_input = channel.from(input) + MUTECT2 ( mutect2_input, false, false, false, [], fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + ch_versions = ch_versions.mix(MUTECT2.out.versions) + + // + //Generate artifactpriors using learnreadorientationmodel on the f1r2 output of mutect2. + // + ch_learnread_in = MUTECT2.out.f1r2.collect() + LEARNREADORIENTATIONMODEL (ch_learnread_in) + ch_versions = ch_versions.mix(LEARNREADORIENTATIONMODEL.out.versions) + + // + //Generate pileup summary tables using getepileupsummaries. tumor sample should always be passed in as the first input and input list entries of ch_mutect2_in, + //to ensure correct file order for calculatecontamination. + // + pileup_tumor_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[0], input_index[0]] + } + + pileup_normal_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[1], input_index[1]] + } + GETPILEUPSUMMARIES_TUMOR ( pileup_tumor_input, germline_resource, germline_resource_tbi, interval_file ) + GETPILEUPSUMMARIES_NORMAL ( pileup_normal_input, germline_resource, germline_resource_tbi, interval_file ) + ch_versions = ch_versions.mix(GETPILEUPSUMMARIES_NORMAL.out.versions) + + // + //Contamination and segmentation tables created using calculatecontamination on the pileup summary table. + // + ch_pileup_tumor = GETPILEUPSUMMARIES_TUMOR.out.table.collect() + ch_pileup_normal = GETPILEUPSUMMARIES_NORMAL.out.table.collect() + ch_calccon_in = ch_pileup_tumor.combine(ch_pileup_normal, by: 0) + CALCULATECONTAMINATION ( ch_calccon_in, true ) + ch_versions = ch_versions.mix(CALCULATECONTAMINATION.out.versions) + + // + //Mutect2 calls filtered by filtermutectcalls using the artifactpriors, contamination and segmentation tables. + // + ch_vcf = MUTECT2.out.vcf.collect() + ch_tbi = MUTECT2.out.tbi.collect() + ch_stats = MUTECT2.out.stats.collect() + ch_orientation = LEARNREADORIENTATIONMODEL.out.artifactprior.collect() + ch_segment = CALCULATECONTAMINATION.out.segmentation.collect() + ch_contamination = CALCULATECONTAMINATION.out.contamination.collect() + //[] is used as a placeholder for optional input to specify the contamination estimate as a value, since the contamination table is used, this is not needed. + ch_contamination.add([]) + ch_filtermutect_in = ch_vcf.combine(ch_tbi, by: 0).combine(ch_stats, by: 0).combine(ch_orientation, by: 0).combine(ch_segment, by: 0).combine(ch_contamination, by: 0) + FILTERMUTECTCALLS ( ch_filtermutect_in, fasta, fai, dict ) + ch_versions = ch_versions.mix(FILTERMUTECTCALLS.out.versions) + + emit: + mutect2_vcf = MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_tbi = MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + mutect2_f1r2 = MUTECT2.out.f1r2.collect() // channel: [ val(meta), [ f1r2 ] ] + + artifact_priors = LEARNREADORIENTATIONMODEL.out.artifactprior.collect() // channel: [ val(meta), [ artifactprior ] ] + + pileup_table_tumor = GETPILEUPSUMMARIES_TUMOR.out.table.collect() // channel: [ val(meta), [ table_tumor ] ] + pileup_table_normal = GETPILEUPSUMMARIES_NORMAL.out.table.collect() // channel: [ val(meta), [ table_normal ] ] + + contamination_table = CALCULATECONTAMINATION.out.contamination.collect() // channel: [ val(meta), [ contamination ] ] + segmentation_table = CALCULATECONTAMINATION.out.segmentation.collect() // channel: [ val(meta), [ segmentation ] ] + + filtered_vcf = FILTERMUTECTCALLS.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + filtered_tbi = FILTERMUTECTCALLS.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + filtered_stats = FILTERMUTECTCALLS.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml new file mode 100644 index 00000000..4c42addf --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/meta.yml @@ -0,0 +1,127 @@ +name: gatk_tumor_normal_somatic_variant_calling +description: | + Perform variant calling on a paired tumor normal set of samples using mutect2 tumor normal mode. + f1r2 output of mutect2 is run through learnreadorientationmodel to get the artifact priors. + Run the input bam files through getpileupsummarries and then calculatecontamination to get the contamination and segmentation tables. + Filter the mutect2 output vcf using filtermutectcalls, artifact priors and the contamination & segmentation tables for additional filtering. +keywords: + - gatk4 + - mutect2 + - learnreadorientationmodel + - getpileupsummaries + - calculatecontamination + - filtermutectcalls + - variant_calling + - tumor_only + - filtered_vcf +modules: + - gatk4/mutect2 + - gatk4/learnreadorientationmodel + - gatk4/getpileupsummaries + - gatk4/calculatecontamination + - gatk4/filtermutectcalls +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list containing the tumor and normal BAM files, in that order, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list containing the tumor and normal BAM file indexes, in that order, also able to take CRAM index as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - which_norm: + type: list + description: optional list of sample headers contained in the normal sample input file. + pattern: "testN" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - germline_resource: + type: file + description: Population vcf of germline sequencing, containing allele fractions. + pattern: "*.vcf.gz" + - germline_resource_tbi: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz.tbi" + - panel_of_normals: + type: file + description: vcf file to be used as a panel of normals. + pattern: "*.vcf.gz" + - panel_of_normals_tbi: + type: file + description: Index for the panel of normals. + pattern: "*.vcf.gz.tbi" + - interval_file: + type: file + description: File containing intervals. + pattern: "*.interval_list" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: file + description: Compressed vcf file to be used for variant_calling. + pattern: "[ *.vcf.gz ]" + - mutect2_tbi: + type: file + description: Indexes of the mutect2_vcf file + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: file + description: Stats files for the mutect2 vcf + pattern: "[ *vcf.gz.stats ]" + - mutect2_f1r2: + type: file + description: file containing information to be passed to LearnReadOrientationModel. + pattern: "*.f1r2.tar.gz" + - artifact_priors: + type: file + description: file containing artifact-priors to be used by filtermutectcalls. + pattern: "*.tar.gz" + - pileup_table_tumor: + type: file + description: File containing the tumor pileup summary table, kept separate as calculatecontamination needs them individually specified. + pattern: "*_tumor.pileups.table" + - pileup_table_normal: + type: file + description: File containing the normal pileup summary table, kept separate as calculatecontamination needs them individually specified. + pattern: "*_normal.pileups.table" + - contamination_table: + type: file + description: File containing the contamination table. + pattern: "*.contamination.table" + - segmentation_table: + type: file + description: Output table containing segmentation of tumor minor allele fractions. + pattern: "*.segmentation.table" + - filtered_vcf: + type: file + description: file containing filtered mutect2 calls. + pattern: "*.vcf.gz" + - filtered_tbi: + type: file + description: tbi file that pairs with filtered vcf. + pattern: "*.vcf.gz.tbi" + - filtered_stats: + type: file + description: file containing statistics of the filtermutectcalls run. + pattern: "*.filteringStats.tsv" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config new file mode 100644 index 00000000..bb8d1bc4 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/nextflow.config @@ -0,0 +1,6 @@ +params.mutect2_options = [:] +params.learnorientation_options = [:] +params.getpileup_tumor_options = [:] +params.getpileup_normal_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [:] diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf new file mode 100644 index 00000000..20d8a176 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/main.nf @@ -0,0 +1,88 @@ +// +// Run GATK mutect2 in tumor only mode, getepileupsummaries, calculatecontamination and filtermutectcalls +// + +params.mutect2_options = [:] +params.getpileup_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [suffix: '_filtered'] + +include { GATK4_MUTECT2 as MUTECT2 } from '../../../modules/gatk4/mutect2/main' addParams( options: params.mutect2_options ) +include { GATK4_GETPILEUPSUMMARIES as GETPILEUPSUMMARIES } from '../../../modules/gatk4/getpileupsummaries/main' addParams( options: params.getpileup_options ) +include { GATK4_CALCULATECONTAMINATION as CALCULATECONTAMINATION } from '../../../modules/gatk4/calculatecontamination/main' addParams( options: params.calccontam_options ) +include { GATK4_FILTERMUTECTCALLS as FILTERMUTECTCALLS } from '../../../modules/gatk4/filtermutectcalls/main' addParams( options: params.filtercalls_options ) + +workflow GATK_TUMOR_ONLY_SOMATIC_VARIANT_CALLING { + take: + input // channel: [ val(meta), [ input ], [ input_index ], [] ] + fasta // channel: /path/to/reference/fasta + fai // channel: /path/to/reference/fasta/index + dict // channel: /path/to/reference/fasta/dictionary + germline_resource // channel: /path/to/germline/resource + germline_resource_tbi // channel: /path/to/germline/index + panel_of_normals // channel: /path/to/panel/of/normals + panel_of_normals_tbi // channel: /path/to/panel/of/normals/index + interval_file // channel: /path/to/interval/file + + + main: + ch_versions = Channel.empty() + mutect2_input = channel.from(input) + + // + //Perform variant calling using mutect2 module in tumor single mode. + // + MUTECT2 ( mutect2_input , true , false , false , [] , fasta , fai , dict , germline_resource , germline_resource_tbi , panel_of_normals , panel_of_normals_tbi ) + ch_versions = ch_versions.mix(MUTECT2.out.versions) + + // + //Generate pileup summary table using getepileupsummaries. + // + pileup_input = channel.from(input).map { + meta, input_file, input_index, which_norm -> + [meta, input_file[0], input_index[0]] + } + GETPILEUPSUMMARIES ( pileup_input , germline_resource , germline_resource_tbi , interval_file ) + ch_versions = ch_versions.mix(GETPILEUPSUMMARIES.out.versions) + + // + //Contamination and segmentation tables created using calculatecontamination on the pileup summary table. + // + ch_pileup = GETPILEUPSUMMARIES.out.table.collect() + //[] is a placeholder for the optional input where the matched normal sample would be passed in for tumor-normal samples, which is not necessary for this workflow. + ch_pileup.add([]) + CALCULATECONTAMINATION ( ch_pileup, true ) + ch_versions = ch_versions.mix(CALCULATECONTAMINATION.out.versions) + + // + //Mutect2 calls filtered by filtermutectcalls using the contamination and segmentation tables. + // + ch_vcf = MUTECT2.out.vcf.collect() + ch_tbi = MUTECT2.out.tbi.collect() + ch_stats = MUTECT2.out.stats.collect() + //[] is added as a placeholder for the optional input file artifact priors, which is only used for tumor-normal samples and therefor isn't needed in this workflow. + ch_stats.add([]) + ch_segment = CALCULATECONTAMINATION.out.segmentation.collect() + ch_contamination = CALCULATECONTAMINATION.out.contamination.collect() + //[] is added as a placeholder for entering a contamination estimate value, which is not needed as this workflow uses the contamination table instead. + ch_contamination.add([]) + ch_filtermutect_in = ch_vcf.combine(ch_tbi, by: 0).combine(ch_stats, by: 0).combine(ch_segment, by: 0).combine(ch_contamination, by: 0) + FILTERMUTECTCALLS ( ch_filtermutect_in, fasta, fai, dict ) + ch_versions = ch_versions.mix(FILTERMUTECTCALLS.out.versions) + + emit: + mutect2_vcf = MUTECT2.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + mutect2_index = MUTECT2.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + mutect2_stats = MUTECT2.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + pileup_table = GETPILEUPSUMMARIES.out.table.collect() // channel: [ val(meta), [ table ] ] + + contamination_table = CALCULATECONTAMINATION.out.contamination.collect() // channel: [ val(meta), [ contamination ] ] + segmentation_table = CALCULATECONTAMINATION.out.segmentation.collect() // channel: [ val(meta), [ segmentation ] ] + + filtered_vcf = FILTERMUTECTCALLS.out.vcf.collect() // channel: [ val(meta), [ vcf ] ] + filtered_index = FILTERMUTECTCALLS.out.tbi.collect() // channel: [ val(meta), [ tbi ] ] + filtered_stats = FILTERMUTECTCALLS.out.stats.collect() // channel: [ val(meta), [ stats ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml new file mode 100644 index 00000000..14329691 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/meta.yml @@ -0,0 +1,108 @@ +name: gatk_tumor_only_somatic_variant_calling +description: | + Perform variant calling on a single tumor sample using mutect2 tumor only mode. + Run the input bam file through getpileupsummarries and then calculatecontaminationto get the contamination and segmentation tables. + Filter the mutect2 output vcf using filtermutectcalls and the contamination & segmentation tables for additional filtering. +keywords: + - gatk4 + - mutect2 + - getpileupsummaries + - calculatecontamination + - filtermutectcalls + - variant_calling + - tumor_only + - filtered_vcf +modules: + - gatk4/mutect2 + - gatk4/getpileupsummaries + - gatk4/calculatecontamination + - gatk4/filtermutectcalls +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list containing one BAM file, also able to take CRAM as an input + pattern: "[ *.{bam/cram} ]" + - input_index: + type: list + description: list containing one BAM file indexe, also able to take CRAM index as an input + pattern: "[ *.{bam.bai/cram.crai} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - germline_resource: + type: file + description: Population vcf of germline sequencing, containing allele fractions. + pattern: "*.vcf.gz" + - germline_resource_tbi: + type: file + description: Index file for the germline resource. + pattern: "*.vcf.gz.tbi" + - panel_of_normals: + type: file + description: vcf file to be used as a panel of normals. + pattern: "*.vcf.gz" + - panel_of_normals_tbi: + type: file + description: Index for the panel of normals. + pattern: "*.vcf.gz.tbi" + - interval_file: + type: file + description: File containing intervals. + pattern: "*.interval_list" +output: + - versions: + type: file + description: File containing software versions + pattern: 'versions.yml' + - mutect2_vcf: + type: file + description: Compressed vcf file to be used for variant_calling. + pattern: "[ *.vcf.gz ]" + - mutect2_tbi: + type: file + description: Indexes of the mutect2_vcf file + pattern: "[ *vcf.gz.tbi ]" + - mutect2_stats: + type: file + description: Stats files for the mutect2 vcf + pattern: "[ *vcf.gz.stats ]" + - pileup_table: + type: file + description: File containing the pileup summary table. + pattern: "*.pileups.table" + - contamination_table: + type: file + description: File containing the contamination table. + pattern: "*.contamination.table" + - segmentation_table: + type: file + description: Output table containing segmentation of tumor minor allele fractions. + pattern: "*.segmentation.table" + - filtered_vcf: + type: file + description: file containing filtered mutect2 calls. + pattern: "*.vcf.gz" + - filtered_tbi: + type: file + description: tbi file that pairs with filtered vcf. + pattern: "*.vcf.gz.tbi" + - filtered_stats: + type: file + description: file containing statistics of the filtermutectcalls run. + pattern: "*.filteringStats.tsv" +authors: + - '@GCJMackenzie' diff --git a/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config new file mode 100644 index 00000000..af50c2b0 --- /dev/null +++ b/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/nextflow.config @@ -0,0 +1,4 @@ +params.mutect2_options = [:] +params.getpileup_options = [:] +params.calccontam_options = [:] +params.filtercalls_options = [:] diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config index cd22dde8..741edf5e 100644 --- a/tests/config/nextflow.config +++ b/tests/config/nextflow.config @@ -6,9 +6,9 @@ params { } process { - cpus = 2 - memory = 3.GB - time = 2.h + cpus = 2 + memory = 3.GB + time = 2.h } if ("$PROFILE" == "singularity") { @@ -28,5 +28,5 @@ conda { createTimeout = "120 min" } includeConfig 'test_data.config' manifest { - nextflowVersion = '!>=21.04.0' + nextflowVersion = '!>=21.10.3' } diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 008c98dc..7601671b 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -34,6 +34,14 @@ assemblyscan: - modules/assemblyscan/** - tests/modules/assemblyscan/** +ataqv/ataqv: + - modules/ataqv/ataqv/** + - tests/modules/ataqv/ataqv/** + +bakta: + - modules/bakta/** + - tests/modules/bakta/** + bamaligncleaner: - modules/bamaligncleaner/** - tests/modules/bamaligncleaner/** @@ -42,6 +50,10 @@ bamtools/split: - modules/bamtools/split/** - tests/modules/bamtools/split/** +bamutil/trimbam: + - modules/bamutil/trimbam/** + - tests/modules/bamutil/trimbam/** + bandage/image: - modules/bandage/image/** - tests/modules/bandage/image/** @@ -258,9 +270,25 @@ cat/fastq: - modules/cat/fastq/** - tests/modules/cat/fastq/** -cellranger/mkref: +cellranger/gtf: # &cellranger/gtf + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/mkref: # &cellranger/mkref - modules/cellranger/mkref/** - tests/modules/cellranger/mkref/** + # - *cellranger/gtf + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** + +cellranger/count: + - modules/cellranger/count/** + - tests/modules/cellranger/count/** + # - *cellranger/mkref + - modules/cellranger/mkref/** + - tests/modules/cellranger/mkref/** + - modules/cellranger/gtf/** + - tests/modules/cellranger/gtf/** checkm/lineagewf: - modules/checkm/lineagewf/** @@ -274,9 +302,21 @@ chromap/index: - modules/chromap/index/** - tests/modules/chromap/index/** -cnvkit: - - modules/cnvkit/** - - tests/modules/cnvkit/** +clonalframeml: + - modules/clonalframeml/** + - tests/modules/clonalframeml/** + +cmseq/polymut: + - modules/cmseq/polymut/** + - tests/modules/cmseq/polymut/** + +cnvkit/batch: + - modules/cnvkit/batch/** + - tests/modules/cnvkit/batch/** + +cooler/cload: + - modules/cooler/cload/** + - tests/modules/cooler/cload/** cooler/digest: - modules/cooler/digest/** @@ -286,14 +326,30 @@ cooler/dump: - modules/cooler/dump/** - tests/modules/cooler/dump/** +cooler/merge: + - modules/cooler/merge/** + - tests/modules/cooler/merge/** + +cooler/zoomify: + - modules/cooler/zoomify/** + - tests/software/cooler/zoomify/** + csvtk/concat: - modules/csvtk/concat/** - tests/modules/csvtk/concat/** +csvtk/split: + - modules/csvtk/split/** + - tests/modules/csvtk/split/** + custom/dumpsoftwareversions: - modules/custom/dumpsoftwareversions/** - tests/modules/custom/dumpsoftwareversions/** +custom/getchromsizes: + - modules/custom/getchromsizes/** + - tests/modules/custom/getchromsizes/** + cutadapt: - modules/cutadapt/** - tests/modules/cutadapt/** @@ -302,6 +358,14 @@ damageprofiler: - modules/damageprofiler/** - tests/modules/damageprofiler/** +dastool/dastool: + - modules/dastool/dastool/** + - tests/modules/dastool/dastool/** + +dastool/scaffolds2bin: + - modules/dastool/scaffolds2bin/** + - tests/modules/dastool/scaffolds2bin/** + dedup: - modules/dedup/** - tests/modules/dedup/** @@ -338,6 +402,14 @@ diamond/makedb: - modules/diamond/makedb/** - tests/modules/diamond/makedb/** +dragmap/align: + - modules/dragmap/align/** + - tests/modules/dragmap/align/** + +dragmap/hashtable: + - modules/dragmap/hashtable/** + - tests/modules/dragmap/hashtable/** + dragonflye: - modules/dragonflye/** - tests/modules/dragonflye/** @@ -362,6 +434,14 @@ dshbio/splitgff3: - modules/dshbio/splitgff3/** - tests/modules/dshbio/splitgff3/** +ectyper: + - modules/ectyper/** + - tests/modules/ectyper/** + +emmtyper: + - modules/emmtyper/** + - tests/modules/emmtyper/** + ensemblvep: - modules/ensemblvep/** - tests/modules/ensemblvep/** @@ -370,6 +450,10 @@ expansionhunter: - modules/expansionhunter/** - tests/modules/expansionhunter/** +fargene: + - modules/fargene/** + - tests/modules/fargene/** + fastani: - modules/fastani/** - tests/modules/fastani/** @@ -382,6 +466,10 @@ fastqc: - modules/fastqc/** - tests/modules/fastqc/** +fastqscan: + - modules/fastqscan/** + - tests/modules/fastqscan/** + fasttree: - modules/fasttree/** - tests/modules/fasttree/** @@ -426,7 +514,7 @@ gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** -gatk4/calculatecontamination: +gatk4/calculatecontamination: #&gatk4_calculatecontamination - modules/gatk4/calculatecontamination/** - tests/modules/gatk4/calculatecontamination/** @@ -434,7 +522,7 @@ gatk4/createsequencedictionary: - modules/gatk4/createsequencedictionary/** - tests/modules/gatk4/createsequencedictionary/** -gatk4/createsomaticpanelofnormals: +gatk4/createsomaticpanelofnormals: #&gatk4_createsomaticpanelofnormals - modules/gatk4/createsomaticpanelofnormals/** - tests/modules/gatk4/createsomaticpanelofnormals/** @@ -446,15 +534,23 @@ gatk4/fastqtosam: - modules/gatk4/fastqtosam/** - tests/modules/gatk4/fastqtosam/** -gatk4/filtermutectcalls: +gatk4/filtermutectcalls: #&gatk4_filtermutectcalls - modules/gatk4/filtermutectcalls/** - tests/modules/gatk4/filtermutectcalls/** -gatk4/genomicsdbimport: +gatk4/gatherbqsrreports: + - modules/gatk4/gatherbqsrreports/** + - tests/modules/gatk4/gatherbqsrreports/** + +gatk4/genomicsdbimport: #&gatk4_genomicsdbimport - modules/gatk4/genomicsdbimport/** - tests/modules/gatk4/genomicsdbimport/** -gatk4/getpileupsummaries: +gatk4/genotypegvcfs: + - modules/gatk4/genotypegvcfs/** + - tests/modules/gatk4/genotypegvcfs/** + +gatk4/getpileupsummaries: #&gatk4_getpileupsummaries - modules/gatk4/getpileupsummaries/** - tests/modules/gatk4/getpileupsummaries/** @@ -462,11 +558,15 @@ gatk4/haplotypecaller: - modules/gatk4/haplotypecaller/** - tests/modules/gatk4/haplotypecaller/** +gatk4/indexfeaturefile: + - modules/gatk4/indexfeaturefile/** + - tests/modules/gatk4/indexfeaturefile/** + gatk4/intervallisttools: - modules/gatk4/intervallisttools/** - tests/modules/gatk4/intervallisttools/** -gatk4/learnreadorientationmodel: +gatk4/learnreadorientationmodel: #&gatk4_learnreadorientationmodel - modules/gatk4/learnreadorientationmodel/** - tests/modules/gatk4/learnreadorientationmodel/** @@ -482,7 +582,7 @@ gatk4/mergevcfs: - modules/gatk4/mergevcfs/** - tests/modules/gatk4/mergevcfs/** -gatk4/mutect2: +gatk4/mutect2: #&gatk4_mutect2 - modules/gatk4/mutect2/** - tests/modules/gatk4/mutect2/** @@ -613,6 +713,10 @@ idr: - modules/idr/** - tests/modules/idr/** +imputeme/vcftoprs: + - modules/imputeme/vcftoprs/** + - tests/modules/imputeme/vcftoprs/** + iqtree: - modules/iqtree/** - tests/modules/iqtree/** @@ -670,6 +774,14 @@ kraken2/kraken2: - modules/untar/** - tests/modules/kraken2/kraken2/** +krona/kronadb: + - modules/krona/kronadb/** + - tests/modules/krona/kronadb/** + +krona/ktimporttaxonomy: + - modules/krona/ktimporttaxonomy/** + - tests/modules/krona/ktimporttaxonomy/** + last/dotplot: - modules/last/dotplot/** - tests/modules/last/dotplot/** @@ -702,10 +814,18 @@ last/train: - modules/last/train/** - tests/modules/last/train/** +leehom: + - modules/leehom/** + - tests/modules/leehom/** + lima: - modules/lima/** - tests/modules/lima/** +lissero: + - modules/lissero/** + - tests/modules/lissero/** + lofreq/call: - modules/lofreq/call/** - tests/modules/lofreq/call/** @@ -722,6 +842,14 @@ lofreq/indelqual: - modules/lofreq/indelqual/** - tests/modules/lofreq/indelqual/** +macrel/contigs: + - modules/macrel/contigs/** + - tests/modules/macrel/contigs/** + +macs2/callpeak: + - modules/macs2/callpeak/** + - tests/modules/macs2/callpeak/** + malt/build: - modules/malt/build/** - tests/modules/malt/build_test/** @@ -746,6 +874,10 @@ manta/tumoronly: - modules/manta/tumoronly/** - tests/modules/manta/tumoronly/** +mapdamage2: + - modules/mapdamage2/** + - tests/modules/mapdamage2/** + mash/sketch: - modules/mash/sketch/** - tests/modules/mash/sketch/** @@ -758,10 +890,26 @@ maxbin2: - modules/maxbin2/** - tests/modules/maxbin2/** +medaka: + - modules/medaka/** + - tests/modules/medaka/** + megahit: - modules/megahit/** - tests/modules/megahit/** +meningotype: + - modules/meningotype/** + - tests/modules/meningotype/** + +metabat2/jgisummarizebamcontigdepths: + - modules/metabat2/jgisummarizebamcontigdepths/** + - tests/modules/metabat2/jgisummarizebamcontigdepths/** + +metabat2/metabat2: + - modules/metabat2/metabat2/** + - tests/modules/metabat2/metabat2/** + metaphlan3: - modules/metaphlan3/** - tests/modules/metaphlan3/** @@ -778,6 +926,10 @@ minia: - modules/minia/** - tests/modules/minia/** +miniasm: + - modules/miniasm/** + - tests/modules/miniasm/** + minimap2/align: - modules/minimap2/align/** - tests/modules/minimap2/align/** @@ -811,6 +963,10 @@ multiqc: - modules/multiqc/** - tests/modules/multiqc/** +mummer: + - modules/mummer/** + - tests/modules/mummer/** + muscle: - modules/muscle/** - tests/modules/muscle/** @@ -823,10 +979,22 @@ nanoplot: - modules/nanoplot/** - tests/modules/nanoplot/** +ncbigenomedownload: + - modules/ncbigenomedownload/** + - tests/modules/ncbigenomedownload/** + nextclade: - modules/nextclade/** - tests/modules/nextclade/** +ngmaster: + - modules/ngmaster/** + - tests/modules/ngmaster/** + +nucmer: + - modules/nucmer/** + - tests/modules/nucmer/** + optitype: - modules/optitype/** - tests/modules/optitype/** @@ -875,6 +1043,18 @@ pbccs: - modules/pbccs/** - tests/modules/pbccs/** +peddy: + - modules/peddy/** + - tests/modules/peddy/** + +phyloflash: + - modules/phyloflash/** + - tests/modules/phyloflash/** + +picard/collecthsmetrics: + - modules/picard/collecthsmetrics/** + - tests/modules/picard/collecthsmetrics/** + picard/collectmultiplemetrics: - modules/picard/collectmultiplemetrics/** - tests/modules/picard/collectmultiplemetrics/** @@ -907,10 +1087,22 @@ plasmidid: - modules/plasmidid/** - tests/modules/plasmidid/** +plink/extract: + - modules/plink/extract/** + - tests/modules/plink/extract/** + plink/vcf: - modules/plink/vcf/** - tests/modules/plink/vcf/** +plink2/vcf: + - modules/plink2/vcf/** + - tests/modules/plink2/vcf/** + +pmdtools/filter: + - modules/pmdtools/filter/** + - tests/modules/pmdtools/filter/** + porechop: - modules/porechop/** - tests/modules/porechop/** @@ -1019,6 +1211,10 @@ salmon/quant: - modules/salmon/quant/** - tests/modules/salmon/quant/** +samblaster: + - modules/samblaster/** + - tests/modules/samblaster/** + samtools/ampliconclip: - modules/samtools/ampliconclip/** - tests/modules/samtools/ampliconclip/** @@ -1051,7 +1247,7 @@ samtools/idxstats: - modules/samtools/idxstats/** - tests/modules/samtools/idxstats/** -samtools/index: +samtools/index: #&samtools_index - modules/samtools/index/** - tests/modules/samtools/index/** @@ -1063,7 +1259,7 @@ samtools/mpileup: - modules/samtools/mpileup/** - tests/modules/samtools/mpileup/** -samtools/sort: +samtools/sort: #&samtools_sort - modules/samtools/sort/** - tests/modules/samtools/sort/** @@ -1075,6 +1271,10 @@ samtools/view: - modules/samtools/view/** - tests/modules/samtools/view/** +scoary: + - modules/scoary/** + - tests/modules/scoary/** + seacr/callpeak: - modules/seacr/callpeak/** - tests/modules/seacr/callpeak/** @@ -1135,11 +1335,11 @@ spatyper: - modules/spatyper/** - tests/modules/spatyper/** -sratools/fasterqdump: +sratools/fasterqdump: #&sratools_fasterqdump - modules/sratools/fasterqdump/** - tests/modules/sratools/fasterqdump/** -sratools/prefetch: +sratools/prefetch: #&sratools_prefetch - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** @@ -1187,6 +1387,10 @@ tabix/tabix: - modules/tabix/tabix/** - tests/modules/tabix/tabix/** +tbprofiler/profile: + - modules/tbprofiler/profile/** + - tests/modules/tbprofiler/profile/** + tiddit/cov: - modules/tiddit/cov/** - tests/modules/tiddit/cov/** @@ -1223,6 +1427,10 @@ ucsc/wigtobigwig: - modules/ucsc/wigtobigwig/** - tests/modules/ucsc/wigtobigwig/** +ultra/pipeline: + - modules/ultra/pipeline/** + - tests/modules/ultra/pipeline/** + unicycler: - modules/unicycler/** - tests/modules/unicycler/** @@ -1250,3 +1458,56 @@ yara/index: yara/mapper: - modules/yara/mapper/** - tests/modules/yara/mapper/** + +# subworkflows/align_bowtie2: +# - subworkflows/nf-core/align_bowtie2/** +# - tests/subworkflows/nf-core/align_bowtie2/** +# - *subworkflows_bam_sort_samtools + +# subworkflows/annotation_ensemblvep: &subworkflows_annotation_ensemblvep +# - subworkflows/nf-core/annotation_ensemblvep/** +# - tests/subworkflows/nf-core/annotation_ensemblvep/** + +# subworkflows/annotation_snpeff: &subworkflows_annotation_snpeff +# - subworkflows/nf-core/annotation_snpeff/** +# - tests/subworkflows/nf-core/annotation_snpeff/** + +# subworkflows/bam_stats_samtools: &subworkflows_bam_stats_samtools +# - subworkflows/nf-core/bam_stats_samtools/** +# - tests/subworkflows/nf-core/bam_stats_samtools/** + +# subworkflows/bam_sort_samtools: &subworkflows_bam_sort_samtools +# - subworkflows/nf-core/bam_sort_samtools/** +# - tests/subworkflows/nf-core/bam_sort_samtools/** +# - *samtools_sort +# - *samtools_index +# - *subworkflows_bam_stats_samtools + +# subworkflows/gatk_create_som_pon: +# - subworkflows/nf-core/gatk_create_som_pon/** +# - tests/subworkflows/nf-core/gatk_create_som_pon/** +# - *gatk4_genomicsdbimport +# - *gatk4_createsomaticpanelofnormals + +# subworkflows/gatk_tumor_normal_somatic_variant_calling: +# - subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** +# - tests/subworkflows/nf-core/gatk_tumor_normal_somatic_variant_calling/** +# - *gatk4_mutect2 +# - *gatk4_learnreadorientationmodel +# - *gatk4_getpileupsummaries +# - *gatk4_calculatecontamination +# - *gatk4_filtermutectcalls + +# subworkflows/gatk_tumor_only_somatic_variant_calling: +# - subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** +# - tests/subworkflows/nf-core/gatk_tumor_only_somatic_variant_calling/** +# - *gatk4_mutect2 +# - *gatk4_getpileupsummaries +# - *gatk4_calculatecontamination +# - *gatk4_filtermutectcalls + +# subworkflows/sra_fastq: +# - subworkflows/nf-core/sra_fastq/** +# - tests/subworkflows/nf-core/sra_fastq/** +# - *sratools_fasterqdump +# - *sratools_prefetch diff --git a/tests/config/pytest_subworkflows.yml b/tests/config/pytest_subworkflows.yml deleted file mode 100644 index 84919be8..00000000 --- a/tests/config/pytest_subworkflows.yml +++ /dev/null @@ -1,16 +0,0 @@ -subworkflows/align_bowtie2: - - subworkflows/nf-core/align_bowtie2/** - - tests/subworkflows/nf-core/align_bowtie2/** - -subworkflows/bam_stats_samtools: - - subworkflows/nf-core/bam_stats_samtools/** - - tests/subworkflows/nf-core/bam_stats_samtools/** - -subworkflows/bam_sort_samtools: - - subworkflows/nf-core/bam_sort_samtools/** - - tests/subworkflows/nf-core/bam_sort_samtools/** - -subworkflows/sra_fastq: - - subworkflows/nf-core/sra_fastq/** - - tests/subworkflows/nf-core/sra_fastq/** - diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 12252542..31e17618 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -34,6 +34,9 @@ params { contigs_genome_maf_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.maf.gz" contigs_genome_par = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/contigs.genome.par" lastdb_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/alignment/last/lastdb.tar.gz" + + baits_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/baits.interval_list" + targets_interval_list = "${test_data_dir}/genomics/sarscov2/genome/picard/targets.interval_list" } 'illumina' { test_single_end_bam = "${test_data_dir}/genomics/sarscov2/illumina/bam/test.single_end.bam" @@ -119,8 +122,16 @@ params { gnomad_r2_1_1_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz.tbi" mills_and_1000g_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" + + syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz" + syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" + index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" + justhusky_ped = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky.ped" + justhusky_minimal_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz" + justhusky_minimal_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz.tbi" + } 'illumina' { test_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam" @@ -184,6 +195,8 @@ params { test2_pileups_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.pileups.table" test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" + test_genomicsdb_tar_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test_genomicsdb.tar.gz" + test_test2_paired_mutect2_calls_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz" test_test2_paired_mutect2_calls_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.tbi" test_test2_paired_mutect2_calls_vcf_gz_stats = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/paired_mutect2_calls/test_test2_paired_mutect2_calls.vcf.gz.stats" @@ -208,8 +221,8 @@ params { test_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test.narrowPeak" test2_narrowpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/narrowpeak/test2.narrowPeak" - test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_1.fastq.gz" - test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test.10x_2.fastq.gz" + test_10x_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test_10x_S1_L001_R1_001.fastq.gz" + test_10x_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/10xgenomics/test_10x_S1_L001_R2_001.fastq.gz" test_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test.yak" test2_yak = "${test_data_dir}/genomics/homo_sapiens/illumina/yak/test2.yak" @@ -239,32 +252,85 @@ params { filelist = "${test_data_dir}/genomics/homo_sapiens/pacbio/txt/filelist.txt" } } + 'bacteroides_fragilis' { + 'genome' { + genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz" + genome_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.paf" + } + 'illumina' { + test1_contigs_fa_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" + test1_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test1_1.fastq.gz" + test1_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" + test2_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" + test2_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" + test1_paired_end_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.bam" + test1_paired_end_sorted_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.sorted.bam" + test1_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test1.sorted.bam.bai" + test2_paired_end_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.bam" + test2_paired_end_sorted_bam = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.sorted.bam" + test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/bam/test2.sorted.bam.bai" + } + 'nanopore' { + test_fastq_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" + overlap_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/nanopore/overlap.paf" + } + } + 'candidatus_portiera_aleyrodidarum' { + 'genome' { + genome_fasta = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.fasta" + genome_sizes = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.sizes" + genome_aln_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.aln.gz" + genome_aln_nwk = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.aln.nwk" + proteome_fasta = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/proteome.fasta" + test1_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test1.gff" + test2_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test2.gff" + test3_gff = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/gff/test3.gff" + } + 'illumina' { + test_1_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fasta/test_1.fastq.gz" + test_2_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fastq/test_2.fastq.gz" + test_se_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/illumina/fastq/test_se.fastq.gz" + } + 'nanopore' { + test_fastq_gz = "${test_data_dir}/genomics/prokaryotes/candidatus_portiera_aleyrodidarum/nanopore/fastq/test.fastq.gz" + } + } + 'haemophilus_influenzae' { + 'genome' { + genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.fna.gz" + genome_aln_gz = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.aln.gz" + genome_aln_nwk = "${test_data_dir}/genomics/prokaryotes/haemophilus_influenzae/genome/genome.aln.nwk" + } + } 'generic' { + 'csv' { + test_csv = "${test_data_dir}/generic/csv/test.csv" + } 'notebooks' { rmarkdown = "${test_data_dir}/generic/notebooks/rmarkdown/rmarkdown_notebook.Rmd" ipython_md = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.md" ipython_ipynb = "${test_data_dir}/generic/notebooks/jupyter/ipython_notebook.ipynb" } + 'tsv' { + test_tsv = "${test_data_dir}/generic/tsv/test.tsv" + } 'txt' { hello = "${test_data_dir}/generic/txt/hello.txt" } + 'cnn' { + reference = "${test_data_dir}/generic/cnn/reference.cnn" + } + 'cooler'{ + test_pairix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz" + test_pairix_pair_gz_px2 = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz.px2" + test_pairs_pair = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.sample1.pairs" + test_tabix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.sorted.possrt.txt.gz" + test_tabix_pair_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.sorted.possrt.txt.gz.tbi" + hg19_chrom_sizes = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.chrom.sizes" + test_merge_cool = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cool" + test_merge_cool_cp2 = "${test_data_dir}/genomics/homo_sapiens/cooler/merge/toy/toy.symm.upper.2.cp2.cool" + + } } - 'bacteroides_fragilis'{ - 'genome' { - genome_fna_gz = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.fna.gz" - genome_paf = "${test_data_dir}/genomics/bacteroides_fragilis/genome/genome.paf" - } - 'illumina' { - test1_contigs_fa_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" - test1_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_1.fastq.gz" - test1_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test1_2.fastq.gz" - test2_1_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_1.fastq.gz" - test2_2_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/illumina/fastq/test2_2.fastq.gz" - } - 'nanopore' { - test_fastq_gz = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/fastq/test.fastq.gz" - overlap_paf = "${test_data_dir}/genomics/bacteroides_fragilis/nanopore/overlap.paf" - } - } } } diff --git a/tests/modules/abacas/main.nf b/tests/modules/abacas/main.nf index dc58ed61..542a67af 100644 --- a/tests/modules/abacas/main.nf +++ b/tests/modules/abacas/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ABACAS } from '../../../modules/abacas/main.nf' addParams ( options: ['args' : '-m -p nucmer'] ) +include { ABACAS } from '../../../modules/abacas/main.nf' workflow test_abacas { diff --git a/tests/modules/abacas/nextflow.config b/tests/modules/abacas/nextflow.config new file mode 100644 index 00000000..17296503 --- /dev/null +++ b/tests/modules/abacas/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ABACAS { + ext.args = '-m -p nucmer' + } + +} diff --git a/tests/modules/abacas/test.yml b/tests/modules/abacas/test.yml index 899bc4db..c466a6ed 100644 --- a/tests/modules/abacas/test.yml +++ b/tests/modules/abacas/test.yml @@ -1,5 +1,5 @@ - name: abacas - command: nextflow run ./tests/modules/abacas -entry test_abacas -c tests/config/nextflow.config + command: nextflow run ./tests/modules/abacas -entry test_abacas -c ./tests/config/nextflow.config -c ./tests/modules/abacas/nextflow.config tags: - abacas files: diff --git a/tests/modules/adapterremoval/main.nf b/tests/modules/adapterremoval/main.nf index 9dd37aa9..ee7f1c44 100644 --- a/tests/modules/adapterremoval/main.nf +++ b/tests/modules/adapterremoval/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' addParams( options: [:] ) +include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' workflow test_adapterremoval_single_end { input = [ [ id:'test', single_end:true, collapse:false ], // meta map diff --git a/tests/modules/adapterremoval/nextflow.config b/tests/modules/adapterremoval/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/adapterremoval/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index 95cd4b04..a6c4a6cf 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -1,5 +1,5 @@ - name: adapterremoval test_adapterremoval_single_end - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_single_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: @@ -9,23 +9,23 @@ md5sum: 62139afee94defad5b83bdd0b8475a1f - name: adapterremoval test_adapterremoval_paired_end - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: - - path: output/adapterremoval/test.pair2.trimmed.fastq.gz - md5sum: f076a9f666235e01a3281f8c46c9d010 - path: output/adapterremoval/test.log - md5sum: bea86105aff4d27fe29c83e24498fefa + md5sum: b8a451d3981b327f3fdb44f40ba2d6d1 - path: output/adapterremoval/test.pair1.trimmed.fastq.gz - md5sum: f076a9f666235e01a3281f8c46c9d010 + md5sum: 294a6277f0139bd597e57c6fa31f39c7 + - path: output/adapterremoval/test.pair2.trimmed.fastq.gz + md5sum: de7b38e2c881bced8671acb1ab452d78 - name: adapterremoval test_adapterremoval_paired_end_collapse - command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config tags: - adapterremoval files: - path: output/adapterremoval/test.log - md5sum: 97cb97b3d03123ac88430768b2e36c59 + md5sum: 7f0b2328152226e46101a535cce718b3 - path: output/adapterremoval/test.merged.fastq.gz - md5sum: 50a4f9fdac6a24e211eb4dcf9f292bef + md5sum: 07a8f725bfd3ecbeabdc41b32d898dee diff --git a/tests/modules/agrvate/main.nf b/tests/modules/agrvate/main.nf index 58058fe3..ac682bef 100644 --- a/tests/modules/agrvate/main.nf +++ b/tests/modules/agrvate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { AGRVATE } from '../../../modules/agrvate/main.nf' addParams( options: ["args": "--mummer"] ) +include { AGRVATE } from '../../../modules/agrvate/main.nf' workflow test_agrvate { diff --git a/tests/modules/agrvate/nextflow.config b/tests/modules/agrvate/nextflow.config new file mode 100644 index 00000000..7f127e5e --- /dev/null +++ b/tests/modules/agrvate/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: AGRVATE { + ext.args = '--mummer' + } + +} diff --git a/tests/modules/agrvate/test.yml b/tests/modules/agrvate/test.yml index ec413663..36e8886c 100644 --- a/tests/modules/agrvate/test.yml +++ b/tests/modules/agrvate/test.yml @@ -1,5 +1,5 @@ - name: agrvate - command: nextflow run ./tests/modules/agrvate -entry test_agrvate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/agrvate -entry test_agrvate -c ./tests/config/nextflow.config -c ./tests/modules/agrvate/nextflow.config tags: - agrvate files: diff --git a/tests/modules/allelecounter/main.nf b/tests/modules/allelecounter/main.nf index b938ab94..3fe11be3 100644 --- a/tests/modules/allelecounter/main.nf +++ b/tests/modules/allelecounter/main.nf @@ -1,7 +1,7 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -include { ALLELECOUNTER } from '../../../modules/allelecounter/main.nf' addParams( options: [:] ) +include { ALLELECOUNTER } from '../../../modules/allelecounter/main.nf' workflow test_allelecounter_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/allelecounter/nextflow.config b/tests/modules/allelecounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/allelecounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/allelecounter/test.yml b/tests/modules/allelecounter/test.yml index bbef0ecc..a0afbc12 100644 --- a/tests/modules/allelecounter/test.yml +++ b/tests/modules/allelecounter/test.yml @@ -1,5 +1,5 @@ - name: allelecounter test_allelecounter_bam - command: nextflow run tests/modules/allelecounter -entry test_allelecounter_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/allelecounter -entry test_allelecounter_bam -c ./tests/config/nextflow.config -c ./tests/modules/allelecounter/nextflow.config tags: - allelecounter files: @@ -7,7 +7,7 @@ md5sum: 2bbe9d7331b78bdac30fe30dbc5fdaf3 - name: allelecounter test_allelecounter_cram - command: nextflow run tests/modules/allelecounter -entry test_allelecounter_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/allelecounter -entry test_allelecounter_cram -c ./tests/config/nextflow.config -c ./tests/modules/allelecounter/nextflow.config tags: - allelecounter files: diff --git a/tests/modules/amps/main.nf b/tests/modules/amps/main.nf index 7d7a40d1..15572096 100644 --- a/tests/modules/amps/main.nf +++ b/tests/modules/amps/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) -include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [args: "-f def_anc"] ) -include { AMPS } from '../../../modules/amps/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../modules/malt/run/main.nf' +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' +include { AMPS } from '../../../modules/amps/main.nf' workflow test_amps { diff --git a/tests/modules/amps/nextflow.config b/tests/modules/amps/nextflow.config new file mode 100644 index 00000000..b58ac3fe --- /dev/null +++ b/tests/modules/amps/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MALTEXTRACT { + ext.args = '-f def_anc' + } + +} diff --git a/tests/modules/amps/test.yml b/tests/modules/amps/test.yml index 04691f18..f38320e4 100644 --- a/tests/modules/amps/test.yml +++ b/tests/modules/amps/test.yml @@ -1,5 +1,5 @@ - name: amps - command: nextflow run ./tests/modules/amps -entry test_amps -c tests/config/nextflow.config + command: nextflow run ./tests/modules/amps -entry test_amps -c ./tests/config/nextflow.config -c ./tests/modules/amps/nextflow.config tags: - amps files: diff --git a/tests/modules/arriba/main.nf b/tests/modules/arriba/main.nf index 833742d6..60741275 100644 --- a/tests/modules/arriba/main.nf +++ b/tests/modules/arriba/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 11'] ) -include { STAR_ALIGN } from '../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'] ) -include { ARRIBA } from '../../../modules/arriba/main.nf' addParams( options: [:] ) +include { STAR_GENOMEGENERATE } from '../../../modules/star/genomegenerate/main.nf' +include { STAR_ALIGN } from '../../../modules/star/align/main.nf' +include { ARRIBA } from '../../../modules/arriba/main.nf' workflow test_arriba_single_end { @@ -14,9 +14,12 @@ workflow test_arriba_single_end { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) } @@ -29,8 +32,11 @@ workflow test_arriba_paired_end { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) ARRIBA ( STAR_ALIGN.out.bam, fasta, gtf ) } diff --git a/tests/modules/arriba/nextflow.config b/tests/modules/arriba/nextflow.config new file mode 100644 index 00000000..1b66d8df --- /dev/null +++ b/tests/modules/arriba/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAR_GENOMEGENERATE { + ext.args = '--genomeSAindexNbases 11' + } + + withName: STAR_ALIGN { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50' + } + +} diff --git a/tests/modules/arriba/test.yml b/tests/modules/arriba/test.yml index c1dc7c1e..52743167 100644 --- a/tests/modules/arriba/test.yml +++ b/tests/modules/arriba/test.yml @@ -1,5 +1,5 @@ - name: arriba test_arriba_single_end - command: nextflow run tests/modules/arriba -entry test_arriba_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/arriba -entry test_arriba_single_end -c ./tests/config/nextflow.config -c ./tests/modules/arriba/nextflow.config tags: - arriba files: @@ -7,46 +7,45 @@ md5sum: cad8c215b938d1e45b747a5b7898a4c2 - path: output/arriba/test.fusions.tsv md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 9f085c626553b1c52f2827421972ac10 - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 9e42067b1ec70b773257529230dd7b3a - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: 29c99195dcc79ff4df1f754ff16aac78 - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out - path: output/star/test.SJ.out.tab - name: arriba test_arriba_paired_end - command: nextflow run tests/modules/arriba -entry test_arriba_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/arriba -entry test_arriba_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/arriba/nextflow.config tags: - arriba files: @@ -54,39 +53,38 @@ md5sum: 85e36c887464e4deaa65f45174d3b8fd - path: output/arriba/test.fusions.tsv md5sum: 7c3383f7eb6d79b84b0bd30a7ef02d70 - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 9f085c626553b1c52f2827421972ac10 - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 9e42067b1ec70b773257529230dd7b3a - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam - md5sum: d724ca90a102347b9c5052a33ea4d308 - path: output/star/test.Log.final.out - path: output/star/test.Log.out - path: output/star/test.Log.progress.out diff --git a/tests/modules/artic/guppyplex/main.nf b/tests/modules/artic/guppyplex/main.nf index 972a6e66..89f67c74 100644 --- a/tests/modules/artic/guppyplex/main.nf +++ b/tests/modules/artic/guppyplex/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ARTIC_GUPPYPLEX } from '../../../../modules/artic/guppyplex/main.nf' addParams( options: [:] ) +include { ARTIC_GUPPYPLEX } from '../../../../modules/artic/guppyplex/main.nf' process STAGE_FASTQ_DIR { input: diff --git a/tests/modules/artic/guppyplex/nextflow.config b/tests/modules/artic/guppyplex/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/artic/guppyplex/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/artic/guppyplex/test.yml b/tests/modules/artic/guppyplex/test.yml index 133f0b15..6fd10898 100644 --- a/tests/modules/artic/guppyplex/test.yml +++ b/tests/modules/artic/guppyplex/test.yml @@ -1,5 +1,5 @@ - name: artic guppyplex - command: nextflow run tests/modules/artic/guppyplex -entry test_artic_guppyplex -c tests/config/nextflow.config + command: nextflow run ./tests/modules/artic/guppyplex -entry test_artic_guppyplex -c ./tests/config/nextflow.config -c ./tests/modules/artic/guppyplex/nextflow.config tags: - artic - artic/guppyplex diff --git a/tests/modules/artic/minion/main.nf b/tests/modules/artic/minion/main.nf index f4993289..ca66ede0 100644 --- a/tests/modules/artic/minion/main.nf +++ b/tests/modules/artic/minion/main.nf @@ -3,17 +3,19 @@ nextflow.enable.dsl = 2 include { UNTAR } from '../../../../modules/untar/main.nf' -include { ARTIC_MINION } from '../../../../modules/artic/minion/main.nf' addParams( fast5_dir: true, sequencing_summary: true, artic_minion_medaka_model:false ) +include { ARTIC_MINION } from '../../../../modules/artic/minion/main.nf' workflow test_artic_minion { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] - fast5_tar = [ file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) ] - sequencing_summary = [ file(params.test_data['sarscov2']['nanopore']['test_sequencing_summary'], checkIfExists: true) ] - fasta = [ file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta', checkIfExists: true) ] - bed = [ file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed', checkIfExists: true) ] - dummy_file = [ ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) + ] + fast5_tar = file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) + sequencing_summary = file(params.test_data['sarscov2']['nanopore']['test_sequencing_summary'], checkIfExists: true) + fasta = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta', checkIfExists: true) + bed = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed', checkIfExists: true) + dummy_file = [] fast5_dir = UNTAR ( fast5_tar ).untar diff --git a/tests/modules/artic/minion/nextflow.config b/tests/modules/artic/minion/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/artic/minion/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/artic/minion/test.yml b/tests/modules/artic/minion/test.yml index b3c5f0f1..8b36b224 100644 --- a/tests/modules/artic/minion/test.yml +++ b/tests/modules/artic/minion/test.yml @@ -1,5 +1,5 @@ - name: artic minion - command: nextflow run tests/modules/artic/minion -entry test_artic_minion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/artic/minion -entry test_artic_minion -c ./tests/config/nextflow.config -c ./tests/modules/artic/minion/nextflow.config tags: - artic - artic/minion diff --git a/tests/modules/assemblyscan/main.nf b/tests/modules/assemblyscan/main.nf index 6f3cbb5e..7cd5f393 100644 --- a/tests/modules/assemblyscan/main.nf +++ b/tests/modules/assemblyscan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' addParams( options: [:] ) +include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' workflow test_assemblyscan { diff --git a/tests/modules/assemblyscan/nextflow.config b/tests/modules/assemblyscan/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/assemblyscan/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/assemblyscan/test.yml b/tests/modules/assemblyscan/test.yml index 0eb4ad66..4a3ba5ec 100644 --- a/tests/modules/assemblyscan/test.yml +++ b/tests/modules/assemblyscan/test.yml @@ -1,5 +1,5 @@ - name: assemblyscan test_assemblyscan - command: nextflow run tests/modules/assemblyscan -entry test_assemblyscan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/assemblyscan -entry test_assemblyscan -c ./tests/config/nextflow.config -c ./tests/modules/assemblyscan/nextflow.config tags: - assemblyscan files: diff --git a/tests/modules/ataqv/ataqv/main.nf b/tests/modules/ataqv/ataqv/main.nf new file mode 100644 index 00000000..b1103350 --- /dev/null +++ b/tests/modules/ataqv/ataqv/main.nf @@ -0,0 +1,69 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ATAQV_ATAQV } from '../../../../modules/ataqv/ataqv/main.nf' +include { ATAQV_ATAQV as ATAQV_ATAQV_PROBLEM_READS} from '../../../../modules/ataqv/ataqv/main.nf' + +workflow test_ataqv_ataqv { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [], + [] + ] + + ATAQV_ATAQV ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_problem_reads { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [], + [] + ] + + ATAQV_ATAQV_PROBLEM_READS ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_peak { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + [], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] + + ATAQV_ATAQV ( input, 'human', [], [], [] ) +} + +workflow test_ataqv_ataqv_tss { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + tss_file = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + + ATAQV_ATAQV ( input, 'human', tss_file, [], [] ) +} + +workflow test_ataqv_ataqv_excluded_regs { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + tss_file = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + excl_regs_file = file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true) + + ATAQV_ATAQV ( input, 'human', tss_file, excl_regs_file, [] ) +} diff --git a/tests/modules/ataqv/ataqv/nextflow.config b/tests/modules/ataqv/ataqv/nextflow.config new file mode 100644 index 00000000..31700510 --- /dev/null +++ b/tests/modules/ataqv/ataqv/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ATAQV_ATAQV_PROBLEM_READS { + ext.args = '--log-problematic-reads' + } + +} diff --git a/tests/modules/ataqv/ataqv/test.yml b/tests/modules/ataqv/ataqv/test.yml new file mode 100644 index 00000000..f9f2a888 --- /dev/null +++ b/tests/modules/ataqv/ataqv/test.yml @@ -0,0 +1,51 @@ +- name: ataqv ataqv test_ataqv_ataqv + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_problem_reads + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_problem_reads -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/1.problems + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_peak + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_peak -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_tss + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_tss -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' + +- name: ataqv ataqv test_ataqv_ataqv_excluded_regs + command: nextflow run ./tests/modules/ataqv/ataqv -entry test_ataqv_ataqv_excluded_regs -c ./tests/config/nextflow.config -c ./tests/modules/ataqv/ataqv/nextflow.config + tags: + - ataqv + - ataqv/ataqv + files: + - path: output/ataqv/test.ataqv.json + contains: + - '"forward_mate_reads": 101' diff --git a/tests/modules/bakta/main.nf b/tests/modules/bakta/main.nf new file mode 100644 index 00000000..1bc00622 --- /dev/null +++ b/tests/modules/bakta/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAKTA } from '../../../modules/bakta/main.nf' + +workflow test_bakta { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + + BAKTA ( input, [], [], [] ) +} diff --git a/tests/modules/bakta/nextflow.config b/tests/modules/bakta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bakta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bakta/test.yml b/tests/modules/bakta/test.yml new file mode 100644 index 00000000..dcfc32bc --- /dev/null +++ b/tests/modules/bakta/test.yml @@ -0,0 +1,25 @@ +## TODO nf-core: Please run the following command to build this file: +# nf-core modules create-test-yml bakta +- name: bakta + command: nextflow run ./tests/modules/bakta -entry test_bakta -c tests/config/nextflow.config -stub-run + tags: + - bakta + files: + - path: output/bakta/test.embl + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.faa + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.ffn + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.fna + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.gbff + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.gff3 + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.hypotheticals.tsv + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.hypotheticals.faa + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/bakta/test.tsv + md5sum: d41d8cd98f00b204e9800998ecf8427e diff --git a/tests/modules/bamaligncleaner/main.nf b/tests/modules/bamaligncleaner/main.nf index 94ee005f..c9d517ae 100644 --- a/tests/modules/bamaligncleaner/main.nf +++ b/tests/modules/bamaligncleaner/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMALIGNCLEANER } from '../../../modules/bamaligncleaner/main.nf' addParams( options: [:] ) +include { BAMALIGNCLEANER } from '../../../modules/bamaligncleaner/main.nf' workflow test_bamaligncleaner { diff --git a/tests/modules/bamaligncleaner/nextflow.config b/tests/modules/bamaligncleaner/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bamaligncleaner/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamaligncleaner/test.yml b/tests/modules/bamaligncleaner/test.yml index 568925b0..4207b8c2 100644 --- a/tests/modules/bamaligncleaner/test.yml +++ b/tests/modules/bamaligncleaner/test.yml @@ -1,5 +1,5 @@ - name: bamaligncleaner - command: nextflow run ./tests/modules/bamaligncleaner -entry test_bamaligncleaner -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamaligncleaner -entry test_bamaligncleaner -c ./tests/config/nextflow.config -c ./tests/modules/bamaligncleaner/nextflow.config tags: - bamaligncleaner files: diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index 5538c86f..eb0bed01 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' addParams( options: [args:"-reference"] ) +include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' workflow test_bamtools_split { diff --git a/tests/modules/bamtools/split/nextflow.config b/tests/modules/bamtools/split/nextflow.config new file mode 100644 index 00000000..e7de5477 --- /dev/null +++ b/tests/modules/bamtools/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BAMTOOLS_SPLIT { + ext.args = '-reference' + } + +} diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index f28a9bcf..4f52e9ce 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -1,5 +1,5 @@ - name: bamtools split test_bamtools_split - command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamtools/split -entry test_bamtools_split -c ./tests/config/nextflow.config -c ./tests/modules/bamtools/split/nextflow.config tags: - bamtools/split - bamtools diff --git a/tests/modules/bamutil/trimbam/main.nf b/tests/modules/bamutil/trimbam/main.nf new file mode 100644 index 00000000..2967b038 --- /dev/null +++ b/tests/modules/bamutil/trimbam/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAMUTIL_TRIMBAM } from '../../../../modules/bamutil/trimbam/main.nf' + +workflow test_bamutil_trimbam { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + 2, + 2 ] + + BAMUTIL_TRIMBAM ( input ) +} diff --git a/tests/modules/bamutil/trimbam/nextflow.config b/tests/modules/bamutil/trimbam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bamutil/trimbam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bamutil/trimbam/test.yml b/tests/modules/bamutil/trimbam/test.yml new file mode 100644 index 00000000..443a4ded --- /dev/null +++ b/tests/modules/bamutil/trimbam/test.yml @@ -0,0 +1,8 @@ +- name: bamutil trimbam test_bamutil_trimbam + command: nextflow run ./tests/modules/bamutil/trimbam -entry test_bamutil_trimbam -c ./tests/config/nextflow.config -c ./tests/modules/bamutil/trimbam/nextflow.config + tags: + - bamutil/trimbam + - bamutil + files: + - path: output/bamutil/test.bam + md5sum: 9ddd0ecca82f7f3433383f3d1308970e diff --git a/tests/modules/bandage/image/main.nf b/tests/modules/bandage/image/main.nf index 524066b0..15f01ab1 100644 --- a/tests/modules/bandage/image/main.nf +++ b/tests/modules/bandage/image/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BANDAGE_IMAGE } from '../../../../modules/bandage/image/main.nf' addParams( options: [:] ) +include { BANDAGE_IMAGE } from '../../../../modules/bandage/image/main.nf' workflow test_bandage_image { input = [ diff --git a/tests/modules/bandage/image/nextflow.config b/tests/modules/bandage/image/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bandage/image/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bandage/image/test.yml b/tests/modules/bandage/image/test.yml index 437eca05..2abdd175 100644 --- a/tests/modules/bandage/image/test.yml +++ b/tests/modules/bandage/image/test.yml @@ -1,5 +1,5 @@ - name: bandage image - command: nextflow run ./tests/modules/bandage/image -entry test_bandage_image -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bandage/image -entry test_bandage_image -c ./tests/config/nextflow.config -c ./tests/modules/bandage/image/nextflow.config tags: - bandage - bandage/image diff --git a/tests/modules/bbmap/align/main.nf b/tests/modules/bbmap/align/main.nf index c3bf43ba..c7a02e2a 100644 --- a/tests/modules/bbmap/align/main.nf +++ b/tests/modules/bbmap/align/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) -include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' addParams( options: [:] ) -include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' addParams( options: [args: "unpigz=t" ] ) +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' +include { BBMAP_ALIGN } from '../../../../modules/bbmap/align/main.nf' +include { BBMAP_ALIGN as BBMAP_ALIGN_PIGZ } from '../../../../modules/bbmap/align/main.nf' workflow test_bbmap_align_paired_end_fasta_ref { diff --git a/tests/modules/bbmap/align/nextflow.config b/tests/modules/bbmap/align/nextflow.config new file mode 100644 index 00000000..fe0afd72 --- /dev/null +++ b/tests/modules/bbmap/align/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BBMAP_ALIGN_PIGZ { + ext.args = 'unpigz=t' + } + +} diff --git a/tests/modules/bbmap/align/test.yml b/tests/modules/bbmap/align/test.yml index 0fcc8ce9..d9f9a862 100644 --- a/tests/modules/bbmap/align/test.yml +++ b/tests/modules/bbmap/align/test.yml @@ -1,35 +1,39 @@ - name: bbmap align paired end fasta ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_fasta_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_fasta_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align files: - path: output/bbmap/test.bam md5sum: e0ec7f1eec537acf146fac1cbdd868d1 + - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align files: - path: output/bbmap/test.bam md5sum: 345a72a0d58366d75dd263b107caa460 + - path: output/bbmap/test.bbmap.log - name: bbmap align single end index ref - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_single_end_index_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align files: - path: output/bbmap/test.bam md5sum: 95f690636581ce9b27cf8568c715ae4d + - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref pigz - command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/align -entry test_bbmap_align_paired_end_index_ref_pigz -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/align/nextflow.config tags: - bbmap - bbmap/align files: - path: output/bbmap/test.bam md5sum: 441c4f196b9a82c7b224903538064308 + - path: output/bbmap/test.bbmap.log diff --git a/tests/modules/bbmap/bbduk/main.nf b/tests/modules/bbmap/bbduk/main.nf index 911ca391..e1f0c2de 100644 --- a/tests/modules/bbmap/bbduk/main.nf +++ b/tests/modules/bbmap/bbduk/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BBMAP_BBDUK } from '../../../../modules/bbmap/bbduk/main.nf' addParams( options: [ 'args' : 'trimq=10 qtrim=r', 'suffix' : '.trim' ] ) +include { BBMAP_BBDUK } from '../../../../modules/bbmap/bbduk/main.nf' workflow test_bbmap_bbduk_single_end { diff --git a/tests/modules/bbmap/bbduk/nextflow.config b/tests/modules/bbmap/bbduk/nextflow.config new file mode 100644 index 00000000..46fc33b4 --- /dev/null +++ b/tests/modules/bbmap/bbduk/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BBMAP_BBDUK { + ext.args = 'trimq=10 qtrim=r' + ext.prefix = { "${meta.id}.trim" } + } + +} diff --git a/tests/modules/bbmap/bbduk/test.yml b/tests/modules/bbmap/bbduk/test.yml index 4d2b8604..7ab5b963 100644 --- a/tests/modules/bbmap/bbduk/test.yml +++ b/tests/modules/bbmap/bbduk/test.yml @@ -1,5 +1,5 @@ - name: bbmap bbduk test_bbmap_bbduk_single_end - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -10,7 +10,7 @@ md5sum: a87d0cbd5ced7df8bf1751e4cb407482 - name: bbmap bbduk test_bbmap_bbduk_paired_end - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -23,7 +23,7 @@ md5sum: 406e068fbe198f02b48e7e210cc0c69f - name: bbmap bbduk test_bbmap_bbduk_se_ref - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_se_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_se_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: @@ -34,7 +34,7 @@ md5sum: 3970e82605c7d109bb348fc94e9eecc0 - name: bbmap bbduk test_bbmap_bbduk_pe_ref - command: nextflow run tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_pe_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbduk -entry test_bbmap_bbduk_pe_ref -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbduk/nextflow.config tags: - bbmap/bbduk files: diff --git a/tests/modules/bbmap/bbsplit/main.nf b/tests/modules/bbmap/bbsplit/main.nf index 1d3c30c1..d1236061 100644 --- a/tests/modules/bbmap/bbsplit/main.nf +++ b/tests/modules/bbmap/bbsplit/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_INDEX } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) -include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_SPLIT } from '../../../../modules/bbmap/bbsplit/main.nf' addParams( options: [:] ) +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_INDEX } from '../../../../modules/bbmap/bbsplit/main.nf' +include { BBMAP_BBSPLIT as BBMAP_BBSPLIT_SPLIT } from '../../../../modules/bbmap/bbsplit/main.nf' workflow test_bbmap_bbsplit { diff --git a/tests/modules/bbmap/bbsplit/nextflow.config b/tests/modules/bbmap/bbsplit/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bbmap/bbsplit/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bbmap/bbsplit/test.yml b/tests/modules/bbmap/bbsplit/test.yml index 87bdebea..add9b519 100644 --- a/tests/modules/bbmap/bbsplit/test.yml +++ b/tests/modules/bbmap/bbsplit/test.yml @@ -1,5 +1,5 @@ - name: bbmap bbsplit test_bbmap_bbsplit - command: nextflow run tests/modules/bbmap/bbsplit -entry test_bbmap_bbsplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/bbsplit -entry test_bbmap_bbsplit -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/bbsplit/nextflow.config tags: - bbmap/bbsplit - bbmap diff --git a/tests/modules/bbmap/index/main.nf b/tests/modules/bbmap/index/main.nf index 0d912615..a6f111f4 100644 --- a/tests/modules/bbmap/index/main.nf +++ b/tests/modules/bbmap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' addParams( options: [:] ) +include { BBMAP_INDEX } from '../../../../modules/bbmap/index/main.nf' workflow test_bbmap_index { diff --git a/tests/modules/bbmap/index/nextflow.config b/tests/modules/bbmap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bbmap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bbmap/index/test.yml b/tests/modules/bbmap/index/test.yml index 32684ad4..4e8d7196 100644 --- a/tests/modules/bbmap/index/test.yml +++ b/tests/modules/bbmap/index/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml bbmap/index - name: bbmap index - command: nextflow run ./tests/modules/bbmap/index -entry test_bbmap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bbmap/index -entry test_bbmap_index -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/index/nextflow.config tags: - bbmap - bbmap/index diff --git a/tests/modules/bcftools/concat/main.nf b/tests/modules/bcftools/concat/main.nf index 8869a3d7..8441d488 100644 --- a/tests/modules/bcftools/concat/main.nf +++ b/tests/modules/bcftools/concat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' workflow test_bcftools_concat { diff --git a/tests/modules/bcftools/concat/nextflow.config b/tests/modules/bcftools/concat/nextflow.config new file mode 100644 index 00000000..3f0d064a --- /dev/null +++ b/tests/modules/bcftools/concat/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_CONCAT { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/concat/test.yml b/tests/modules/bcftools/concat/test.yml index 413fe798..fee6158f 100644 --- a/tests/modules/bcftools/concat/test.yml +++ b/tests/modules/bcftools/concat/test.yml @@ -1,5 +1,5 @@ - name: bcftools concat test_bcftools_concat - command: nextflow run tests/modules/bcftools/concat -entry test_bcftools_concat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/concat -entry test_bcftools_concat -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/concat/nextflow.config tags: - bcftools/concat - bcftools diff --git a/tests/modules/bcftools/consensus/main.nf b/tests/modules/bcftools/consensus/main.nf index 13f7b39e..ab00fbce 100644 --- a/tests/modules/bcftools/consensus/main.nf +++ b/tests/modules/bcftools/consensus/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_CONSENSUS } from '../../../../modules/bcftools/consensus/main.nf' addParams( options: [:] ) +include { BCFTOOLS_CONSENSUS } from '../../../../modules/bcftools/consensus/main.nf' workflow test_bcftools_consensus { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/consensus/nextflow.config b/tests/modules/bcftools/consensus/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bcftools/consensus/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bcftools/consensus/test.yml b/tests/modules/bcftools/consensus/test.yml index b3760fcd..7fa4ecae 100644 --- a/tests/modules/bcftools/consensus/test.yml +++ b/tests/modules/bcftools/consensus/test.yml @@ -1,5 +1,5 @@ - name: bcftools consensus test_bcftools_consensus - command: nextflow run tests/modules/bcftools/consensus -entry test_bcftools_consensus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/consensus -entry test_bcftools_consensus -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/consensus/nextflow.config tags: - bcftools/consensus - bcftools diff --git a/tests/modules/bcftools/filter/main.nf b/tests/modules/bcftools/filter/main.nf index bd419e3a..85fbf950 100644 --- a/tests/modules/bcftools/filter/main.nf +++ b/tests/modules/bcftools/filter/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 //keep --no-verson argument, otherwise md5 will change on each execution -include { BCFTOOLS_FILTER } from '../../../../modules/bcftools/filter/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_FILTER } from '../../../../modules/bcftools/filter/main.nf' workflow test_bcftools_filter { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/filter/nextflow.config b/tests/modules/bcftools/filter/nextflow.config new file mode 100644 index 00000000..68cac7bb --- /dev/null +++ b/tests/modules/bcftools/filter/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_FILTER { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/filter/test.yml b/tests/modules/bcftools/filter/test.yml index 0f8e48eb..da842538 100644 --- a/tests/modules/bcftools/filter/test.yml +++ b/tests/modules/bcftools/filter/test.yml @@ -1,5 +1,5 @@ - name: bcftools filter test_bcftools_filter - command: nextflow run tests/modules/bcftools/filter -entry test_bcftools_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/filter -entry test_bcftools_filter -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/filter/nextflow.config tags: - bcftools/filter - bcftools diff --git a/tests/modules/bcftools/index/main.nf b/tests/modules/bcftools/index/main.nf index 73909d66..839cd988 100644 --- a/tests/modules/bcftools/index/main.nf +++ b/tests/modules/bcftools/index/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_CSI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [:] ) -include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_TBI } from '../../../../modules/bcftools/index/main.nf' addParams( options: [args: '-t'] ) +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_CSI } from '../../../../modules/bcftools/index/main.nf' +include { BCFTOOLS_INDEX as BCFTOOLS_INDEX_TBI } from '../../../../modules/bcftools/index/main.nf' workflow test_bcftools_index_csi { diff --git a/tests/modules/bcftools/index/nextflow.config b/tests/modules/bcftools/index/nextflow.config new file mode 100644 index 00000000..9a060ba2 --- /dev/null +++ b/tests/modules/bcftools/index/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_INDEX_TBI { + ext.args = '-t' + } + +} diff --git a/tests/modules/bcftools/index/test.yml b/tests/modules/bcftools/index/test.yml index 36c5f3c0..f1a29437 100644 --- a/tests/modules/bcftools/index/test.yml +++ b/tests/modules/bcftools/index/test.yml @@ -1,5 +1,5 @@ - name: bcftools index - command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_csi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/index/nextflow.config tags: - bcftools - bcftools/index @@ -8,7 +8,7 @@ md5sum: 5f930522d2b9dcdba2807b7da4dfa3fd - name: bcftools index tbi - command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_tbi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/index -entry test_bcftools_index_tbi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/index/nextflow.config tags: - bcftools - bcftools/index diff --git a/tests/modules/bcftools/isec/main.nf b/tests/modules/bcftools/isec/main.nf index 1b0c2c07..0b8ffc5c 100644 --- a/tests/modules/bcftools/isec/main.nf +++ b/tests/modules/bcftools/isec/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_ISEC } from '../../../../modules/bcftools/isec/main.nf' addParams( options: ['args': '--nfiles +2 --output-type z --no-version'] ) +include { BCFTOOLS_ISEC } from '../../../../modules/bcftools/isec/main.nf' workflow test_bcftools_isec { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/isec/nextflow.config b/tests/modules/bcftools/isec/nextflow.config new file mode 100644 index 00000000..770e4674 --- /dev/null +++ b/tests/modules/bcftools/isec/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_ISEC { + ext.args = '--nfiles +2 --output-type z --no-version' + } + +} diff --git a/tests/modules/bcftools/isec/test.yml b/tests/modules/bcftools/isec/test.yml index 92186c89..fc887d9d 100644 --- a/tests/modules/bcftools/isec/test.yml +++ b/tests/modules/bcftools/isec/test.yml @@ -1,5 +1,5 @@ - name: bcftools isec test_bcftools_isec - command: nextflow run tests/modules/bcftools/isec -entry test_bcftools_isec -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/isec -entry test_bcftools_isec -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/isec/nextflow.config tags: - bcftools - bcftools/isec diff --git a/tests/modules/bcftools/merge/main.nf b/tests/modules/bcftools/merge/main.nf index a672a9a7..119e237a 100644 --- a/tests/modules/bcftools/merge/main.nf +++ b/tests/modules/bcftools/merge/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 //keep --no-verson argument, otherwise md5 will change on each execution -include { BCFTOOLS_MERGE } from '../../../../modules/bcftools/merge/main.nf' addParams( options: ['args': '--force-samples --no-version'] ) +include { BCFTOOLS_MERGE } from '../../../../modules/bcftools/merge/main.nf' workflow test_bcftools_merge { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/merge/nextflow.config b/tests/modules/bcftools/merge/nextflow.config new file mode 100644 index 00000000..e11e50b6 --- /dev/null +++ b/tests/modules/bcftools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_MERGE { + ext.args = '--force-samples --no-version' + } + +} diff --git a/tests/modules/bcftools/merge/test.yml b/tests/modules/bcftools/merge/test.yml index d3cdd74a..6c9dd556 100644 --- a/tests/modules/bcftools/merge/test.yml +++ b/tests/modules/bcftools/merge/test.yml @@ -1,5 +1,5 @@ - name: bcftools merge test_bcftools_merge - command: nextflow run tests/modules/bcftools/merge -entry test_bcftools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/merge -entry test_bcftools_merge -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/merge/nextflow.config tags: - bcftools/merge - bcftools diff --git a/tests/modules/bcftools/mpileup/main.nf b/tests/modules/bcftools/mpileup/main.nf index 2225c5e0..813ca408 100644 --- a/tests/modules/bcftools/mpileup/main.nf +++ b/tests/modules/bcftools/mpileup/main.nf @@ -2,8 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_MPILEUP } from '../../../../modules/bcftools/mpileup/main.nf' addParams( options: ['args2': '--no-version --ploidy 1 --multiallelic-caller', - 'args3': '--no-version' ] ) +include { BCFTOOLS_MPILEUP } from '../../../../modules/bcftools/mpileup/main.nf' workflow test_bcftools_mpileup { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/mpileup/nextflow.config b/tests/modules/bcftools/mpileup/nextflow.config new file mode 100644 index 00000000..c21fef8d --- /dev/null +++ b/tests/modules/bcftools/mpileup/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_MPILEUP { + ext.args2 = '--no-version --ploidy 1 --multiallelic-caller' + ext.args3 = '--no-version' + } + +} diff --git a/tests/modules/bcftools/mpileup/test.yml b/tests/modules/bcftools/mpileup/test.yml index 71877e29..f081c543 100644 --- a/tests/modules/bcftools/mpileup/test.yml +++ b/tests/modules/bcftools/mpileup/test.yml @@ -1,5 +1,5 @@ - name: bcftools mpileup test_bcftools_mpileup - command: nextflow run tests/modules/bcftools/mpileup -entry test_bcftools_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/mpileup -entry test_bcftools_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/mpileup/nextflow.config tags: - bcftools/mpileup - bcftools diff --git a/tests/modules/bcftools/norm/main.nf b/tests/modules/bcftools/norm/main.nf index 046c0b3c..ac056bea 100644 --- a/tests/modules/bcftools/norm/main.nf +++ b/tests/modules/bcftools/norm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_NORM } from '../../../../modules/bcftools/norm/main.nf' addParams( options: ['args': '-m -any --no-version'] ) +include { BCFTOOLS_NORM } from '../../../../modules/bcftools/norm/main.nf' workflow test_bcftools_norm { diff --git a/tests/modules/bcftools/norm/nextflow.config b/tests/modules/bcftools/norm/nextflow.config new file mode 100644 index 00000000..e4d27a73 --- /dev/null +++ b/tests/modules/bcftools/norm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_NORM { + ext.args = '-m -any --no-version' + } + +} diff --git a/tests/modules/bcftools/norm/test.yml b/tests/modules/bcftools/norm/test.yml index 40d0cc7e..bb4f9aca 100644 --- a/tests/modules/bcftools/norm/test.yml +++ b/tests/modules/bcftools/norm/test.yml @@ -1,5 +1,5 @@ - name: bcftools norm - command: nextflow run ./tests/modules/bcftools/norm -entry test_bcftools_norm -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/norm -entry test_bcftools_norm -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/norm/nextflow.config tags: - bcftools - bcftools/norm diff --git a/tests/modules/bcftools/query/main.nf b/tests/modules/bcftools/query/main.nf index a16ceddf..733cae17 100644 --- a/tests/modules/bcftools/query/main.nf +++ b/tests/modules/bcftools/query/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_QUERY } from '../../../../modules/bcftools/query/main.nf' addParams( options: ['args': "-f '%CHROM %POS %REF %ALT[%SAMPLE=%GT]'" ] ) +include { BCFTOOLS_QUERY } from '../../../../modules/bcftools/query/main.nf' workflow test_bcftools_query { diff --git a/tests/modules/bcftools/query/nextflow.config b/tests/modules/bcftools/query/nextflow.config new file mode 100644 index 00000000..e4105006 --- /dev/null +++ b/tests/modules/bcftools/query/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_QUERY { + ext.args = "-f '%CHROM %POS %REF %ALT[%SAMPLE=%GT]'" + } + +} diff --git a/tests/modules/bcftools/query/test.yml b/tests/modules/bcftools/query/test.yml index fbfda92b..aaa9af7b 100644 --- a/tests/modules/bcftools/query/test.yml +++ b/tests/modules/bcftools/query/test.yml @@ -1,5 +1,5 @@ - name: bcftools query - command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/query/nextflow.config tags: - bcftools - bcftools/query @@ -8,7 +8,7 @@ md5sum: c32a6d28f185822d8fe1eeb7e42ec155 - name: bcftools query with optional files - command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query_with_optional_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/query -entry test_bcftools_query_with_optional_files -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/query/nextflow.config tags: - bcftools - bcftools/query diff --git a/tests/modules/bcftools/reheader/main.nf b/tests/modules/bcftools/reheader/main.nf index 40863331..d1dcd8b8 100644 --- a/tests/modules/bcftools/reheader/main.nf +++ b/tests/modules/bcftools/reheader/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_REHEADER } from '../../../../modules/bcftools/reheader/main.nf' addParams( options: [suffix: '.updated'] ) +include { BCFTOOLS_REHEADER } from '../../../../modules/bcftools/reheader/main.nf' workflow test_bcftools_reheader_update_sequences { diff --git a/tests/modules/bcftools/reheader/nextflow.config b/tests/modules/bcftools/reheader/nextflow.config new file mode 100644 index 00000000..55d2cff8 --- /dev/null +++ b/tests/modules/bcftools/reheader/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_REHEADER { + ext.prefix = { "${meta.id}.updated" } + } + +} diff --git a/tests/modules/bcftools/reheader/test.yml b/tests/modules/bcftools/reheader/test.yml index 78337206..1ce0b104 100644 --- a/tests/modules/bcftools/reheader/test.yml +++ b/tests/modules/bcftools/reheader/test.yml @@ -1,5 +1,5 @@ - name: bcftools reheader test_bcftools_reheader_update_sequences - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_update_sequences -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_update_sequences -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools @@ -8,7 +8,7 @@ md5sum: 9e29f28038bfce77ee00022627209ed6 - name: bcftools reheader test_bcftools_reheader_new_header - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools @@ -17,7 +17,7 @@ md5sum: f7f536d889bbf5be40243252c394ee1f - name: bcftools reheader test_bcftools_reheader_new_header_update_sequences - command: nextflow run tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header_update_sequences -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/reheader -entry test_bcftools_reheader_new_header_update_sequences -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/reheader/nextflow.config tags: - bcftools/reheader - bcftools diff --git a/tests/modules/bcftools/stats/main.nf b/tests/modules/bcftools/stats/main.nf index 4039c080..808a3330 100644 --- a/tests/modules/bcftools/stats/main.nf +++ b/tests/modules/bcftools/stats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_STATS } from '../../../../modules/bcftools/stats/main.nf' addParams( options: [:] ) +include { BCFTOOLS_STATS } from '../../../../modules/bcftools/stats/main.nf' workflow test_bcftools_stats { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/bcftools/stats/nextflow.config b/tests/modules/bcftools/stats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bcftools/stats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bcftools/stats/test.yml b/tests/modules/bcftools/stats/test.yml index cd25fe66..d3587f95 100644 --- a/tests/modules/bcftools/stats/test.yml +++ b/tests/modules/bcftools/stats/test.yml @@ -1,5 +1,5 @@ - name: bcftools stats test_bcftools_stats - command: nextflow run tests/modules/bcftools/stats -entry test_bcftools_stats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/stats -entry test_bcftools_stats -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/stats/nextflow.config tags: - bcftools - bcftools/stats diff --git a/tests/modules/bcftools/view/main.nf b/tests/modules/bcftools/view/main.nf index a8ac3b31..f45d0284 100644 --- a/tests/modules/bcftools/view/main.nf +++ b/tests/modules/bcftools/view/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BCFTOOLS_VIEW } from '../../../../modules/bcftools/view/main.nf' addParams( options: ['args': '--no-version'] ) +include { BCFTOOLS_VIEW } from '../../../../modules/bcftools/view/main.nf' workflow test_bcftools_view { diff --git a/tests/modules/bcftools/view/nextflow.config b/tests/modules/bcftools/view/nextflow.config new file mode 100644 index 00000000..e1723b89 --- /dev/null +++ b/tests/modules/bcftools/view/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BCFTOOLS_VIEW { + ext.args = '--no-version' + } + +} diff --git a/tests/modules/bcftools/view/test.yml b/tests/modules/bcftools/view/test.yml index 179e9a1c..fa926dd6 100644 --- a/tests/modules/bcftools/view/test.yml +++ b/tests/modules/bcftools/view/test.yml @@ -1,5 +1,5 @@ - name: bcftools view - command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/view/nextflow.config tags: - bcftools - bcftools/view @@ -8,7 +8,7 @@ md5sum: fc178eb342a91dc0d1d568601ad8f8e2 - name: bcftools view with optional files - command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view_with_optional_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bcftools/view -entry test_bcftools_view_with_optional_files -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/view/nextflow.config tags: - bcftools - bcftools/view diff --git a/tests/modules/bedtools/bamtobed/main.nf b/tests/modules/bedtools/bamtobed/main.nf index 41cf460a..e7635a3d 100644 --- a/tests/modules/bedtools/bamtobed/main.nf +++ b/tests/modules/bedtools/bamtobed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_BAMTOBED } from '../../../../modules/bedtools/bamtobed/main.nf' addParams( options: [:] ) +include { BEDTOOLS_BAMTOBED } from '../../../../modules/bedtools/bamtobed/main.nf' workflow test_bedtools_bamtobed { input = [ [ id:'test'], //meta map diff --git a/tests/modules/bedtools/bamtobed/nextflow.config b/tests/modules/bedtools/bamtobed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/bamtobed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/bamtobed/test.yml b/tests/modules/bedtools/bamtobed/test.yml index 106d125d..b038467c 100644 --- a/tests/modules/bedtools/bamtobed/test.yml +++ b/tests/modules/bedtools/bamtobed/test.yml @@ -1,5 +1,5 @@ - name: bedtools bamtobed - command: nextflow run ./tests/modules/bedtools/bamtobed -entry test_bedtools_bamtobed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/bamtobed -entry test_bedtools_bamtobed -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/bamtobed/nextflow.config tags: - bedtools - bedtools/bamtobed diff --git a/tests/modules/bedtools/complement/main.nf b/tests/modules/bedtools/complement/main.nf index 6456fe60..a1cca033 100644 --- a/tests/modules/bedtools/complement/main.nf +++ b/tests/modules/bedtools/complement/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_COMPLEMENT } from '../../../../modules/bedtools/complement/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_COMPLEMENT } from '../../../../modules/bedtools/complement/main.nf' workflow test_bedtools_complement { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/complement/nextflow.config b/tests/modules/bedtools/complement/nextflow.config new file mode 100644 index 00000000..cb867120 --- /dev/null +++ b/tests/modules/bedtools/complement/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_COMPLEMENT { + ext.prefix = { "${meta.id}_out" } + } + +} diff --git a/tests/modules/bedtools/complement/test.yml b/tests/modules/bedtools/complement/test.yml index 2ebc6419..9dbeb36f 100644 --- a/tests/modules/bedtools/complement/test.yml +++ b/tests/modules/bedtools/complement/test.yml @@ -1,5 +1,5 @@ - name: bedtools complement - command: nextflow run ./tests/modules/bedtools/complement -entry test_bedtools_complement -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/complement -entry test_bedtools_complement -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/complement/nextflow.config tags: - bedtools - bedtools/complement diff --git a/tests/modules/bedtools/genomecov/main.nf b/tests/modules/bedtools/genomecov/main.nf index 445ed078..b507a2cd 100644 --- a/tests/modules/bedtools/genomecov/main.nf +++ b/tests/modules/bedtools/genomecov/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' workflow test_bedtools_genomecov_noscale { input = [ diff --git a/tests/modules/bedtools/genomecov/nextflow.config b/tests/modules/bedtools/genomecov/nextflow.config new file mode 100644 index 00000000..6e1c03e2 --- /dev/null +++ b/tests/modules/bedtools/genomecov/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_GENOMECOV { + ext.prefix = { "${meta.id}_out" } + } + +} diff --git a/tests/modules/bedtools/genomecov/test.yml b/tests/modules/bedtools/genomecov/test.yml index 477e6555..8f63bde9 100644 --- a/tests/modules/bedtools/genomecov/test.yml +++ b/tests/modules/bedtools/genomecov/test.yml @@ -1,5 +1,5 @@ - name: bedtools genomecov test_bedtools_genomecov_noscale - command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -8,7 +8,7 @@ md5sum: 66083198daca6c001d328ba9616e9b53 - name: bedtools genomecov test_bedtools_genomecov_nonbam_noscale - command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -17,7 +17,7 @@ md5sum: f47b58840087426e5b643d8dfd155c1f - name: bedtools genomecov test_bedtools_genomecov_scale - command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov @@ -26,7 +26,7 @@ md5sum: 01291b6e1beab72e046653e709eb0e10 - name: bedtools genomecov test_bedtools_genomecov_nonbam_scale - command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/genomecov/nextflow.config tags: - bedtools - bedtools/genomecov diff --git a/tests/modules/bedtools/getfasta/main.nf b/tests/modules/bedtools/getfasta/main.nf index 194597ae..425c49d5 100644 --- a/tests/modules/bedtools/getfasta/main.nf +++ b/tests/modules/bedtools/getfasta/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_GETFASTA } from '../../../../modules/bedtools/getfasta/main.nf' addParams( options: [:] ) +include { BEDTOOLS_GETFASTA } from '../../../../modules/bedtools/getfasta/main.nf' workflow test_bedtools_getfasta { bed = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) diff --git a/tests/modules/bedtools/getfasta/nextflow.config b/tests/modules/bedtools/getfasta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/getfasta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/getfasta/test.yml b/tests/modules/bedtools/getfasta/test.yml index adf10da5..a455f861 100644 --- a/tests/modules/bedtools/getfasta/test.yml +++ b/tests/modules/bedtools/getfasta/test.yml @@ -1,5 +1,5 @@ - name: bedtools getfasta - command: nextflow run ./tests/modules/bedtools/getfasta -entry test_bedtools_getfasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/getfasta -entry test_bedtools_getfasta -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/getfasta/nextflow.config tags: - bedtools - bedtools/getfasta diff --git a/tests/modules/bedtools/intersect/main.nf b/tests/modules/bedtools/intersect/main.nf index 73a9b30c..c17d03e6 100644 --- a/tests/modules/bedtools/intersect/main.nf +++ b/tests/modules/bedtools/intersect/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_INTERSECT } from '../../../../modules/bedtools/intersect/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_INTERSECT } from '../../../../modules/bedtools/intersect/main.nf' workflow test_bedtools_intersect { input = [ diff --git a/tests/modules/bedtools/intersect/nextflow.config b/tests/modules/bedtools/intersect/nextflow.config new file mode 100644 index 00000000..3aa2593f --- /dev/null +++ b/tests/modules/bedtools/intersect/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_INTERSECT { + ext.prefix = { "${meta.id}_out" } + } + +} diff --git a/tests/modules/bedtools/intersect/test.yml b/tests/modules/bedtools/intersect/test.yml index c8c3ad6e..86fe70cd 100644 --- a/tests/modules/bedtools/intersect/test.yml +++ b/tests/modules/bedtools/intersect/test.yml @@ -1,5 +1,5 @@ - name: bedtools intersect test_bedtools_intersect - command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/intersect/nextflow.config tags: - bedtools - bedtools/intersect @@ -8,7 +8,7 @@ md5sum: afcbf01c2f2013aad71dbe8e34f2c15c - name: bedtools intersect test_bedtools_intersect_bam - command: nextflow run tests/modules/bedtools/intersect -entry test_bedtools_intersect_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/intersect -entry test_bedtools_intersect_bam -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/intersect/nextflow.config tags: - bedtools - bedtools/intersect diff --git a/tests/modules/bedtools/makewindows/main.nf b/tests/modules/bedtools/makewindows/main.nf index 23c40a75..ce37de72 100644 --- a/tests/modules/bedtools/makewindows/main.nf +++ b/tests/modules/bedtools/makewindows/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -test_options = ['args': '-w 50 '] -include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' addParams( options: test_options ) +include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' workflow test_bedtools_makewindows { - - input = [ [ id:'test'], - file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)] + + input = [ + [ id:'test'], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] BEDTOOLS_MAKEWINDOWS ( input, true ) } diff --git a/tests/modules/bedtools/makewindows/nextflow.config b/tests/modules/bedtools/makewindows/nextflow.config new file mode 100644 index 00000000..e8b8c3ea --- /dev/null +++ b/tests/modules/bedtools/makewindows/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_MAKEWINDOWS { + ext.args = '-w 50 ' + } + +} diff --git a/tests/modules/bedtools/makewindows/test.yml b/tests/modules/bedtools/makewindows/test.yml index c39d1c08..8accaa36 100644 --- a/tests/modules/bedtools/makewindows/test.yml +++ b/tests/modules/bedtools/makewindows/test.yml @@ -1,5 +1,5 @@ - name: bedtools makewindows test_bedtools_makewindows - command: nextflow run tests/modules/bedtools/makewindows -entry test_bedtools_makewindows -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/makewindows -entry test_bedtools_makewindows -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/makewindows/nextflow.config tags: - bedtools/makewindows - bedtools diff --git a/tests/modules/bedtools/maskfasta/main.nf b/tests/modules/bedtools/maskfasta/main.nf index 8c30fbdc..0da02ad3 100644 --- a/tests/modules/bedtools/maskfasta/main.nf +++ b/tests/modules/bedtools/maskfasta/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_MASKFASTA } from '../../../../modules/bedtools/maskfasta/main.nf' addParams( options: [:] ) +include { BEDTOOLS_MASKFASTA } from '../../../../modules/bedtools/maskfasta/main.nf' workflow test_bedtools_maskfasta { bed = [ [ id:'test'], diff --git a/tests/modules/bedtools/maskfasta/nextflow.config b/tests/modules/bedtools/maskfasta/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/maskfasta/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/maskfasta/test.yml b/tests/modules/bedtools/maskfasta/test.yml index f536a6eb..f1e8f35a 100644 --- a/tests/modules/bedtools/maskfasta/test.yml +++ b/tests/modules/bedtools/maskfasta/test.yml @@ -1,5 +1,5 @@ - name: bedtools maskfasta - command: nextflow run ./tests/modules/bedtools/maskfasta -entry test_bedtools_maskfasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/maskfasta -entry test_bedtools_maskfasta -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/maskfasta/nextflow.config tags: - bedtools - bedtools/maskfasta diff --git a/tests/modules/bedtools/merge/main.nf b/tests/modules/bedtools/merge/main.nf index f11b804a..5fca0526 100644 --- a/tests/modules/bedtools/merge/main.nf +++ b/tests/modules/bedtools/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_MERGE } from '../../../../modules/bedtools/merge/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_MERGE } from '../../../../modules/bedtools/merge/main.nf' workflow test_bedtools_merge { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/merge/nextflow.config b/tests/modules/bedtools/merge/nextflow.config new file mode 100644 index 00000000..545a523d --- /dev/null +++ b/tests/modules/bedtools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_MERGE { + ext.prefix = { "${meta.id}_out" } + } + +} diff --git a/tests/modules/bedtools/merge/test.yml b/tests/modules/bedtools/merge/test.yml index 62bc6860..5fc8b034 100644 --- a/tests/modules/bedtools/merge/test.yml +++ b/tests/modules/bedtools/merge/test.yml @@ -1,5 +1,5 @@ - name: bedtools merge - command: nextflow run ./tests/modules/bedtools/merge -entry test_bedtools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/merge -entry test_bedtools_merge -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/merge/nextflow.config tags: - bedtools - bedtools/merge diff --git a/tests/modules/bedtools/slop/main.nf b/tests/modules/bedtools/slop/main.nf index 47c19781..e7136fdc 100644 --- a/tests/modules/bedtools/slop/main.nf +++ b/tests/modules/bedtools/slop/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SLOP } from '../../../../modules/bedtools/slop/main.nf' addParams( options: [args: '-l 15 -r 30', suffix: '_out'] ) +include { BEDTOOLS_SLOP } from '../../../../modules/bedtools/slop/main.nf' workflow test_bedtools_slop { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/slop/nextflow.config b/tests/modules/bedtools/slop/nextflow.config new file mode 100644 index 00000000..09abb51a --- /dev/null +++ b/tests/modules/bedtools/slop/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_SLOP { + ext.args = '-l 15 -r 30' + ext.prefix = { "${meta.id}_out" } + } + +} diff --git a/tests/modules/bedtools/slop/test.yml b/tests/modules/bedtools/slop/test.yml index 859b569e..0d49e66b 100644 --- a/tests/modules/bedtools/slop/test.yml +++ b/tests/modules/bedtools/slop/test.yml @@ -1,5 +1,5 @@ - name: bedtools slop - command: nextflow run ./tests/modules/bedtools/slop -entry test_bedtools_slop -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/slop -entry test_bedtools_slop -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/slop/nextflow.config tags: - bedtools - bedtools/slop diff --git a/tests/modules/bedtools/sort/main.nf b/tests/modules/bedtools/sort/main.nf index b5d34e2f..342b4116 100644 --- a/tests/modules/bedtools/sort/main.nf +++ b/tests/modules/bedtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SORT } from '../../../../modules/bedtools/sort/main.nf' addParams( options: [suffix: '_out'] ) +include { BEDTOOLS_SORT } from '../../../../modules/bedtools/sort/main.nf' workflow test_bedtools_sort { input = [ [ id:'test'], diff --git a/tests/modules/bedtools/sort/nextflow.config b/tests/modules/bedtools/sort/nextflow.config new file mode 100644 index 00000000..2ecc295a --- /dev/null +++ b/tests/modules/bedtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_SORT { + ext.prefix = { "${meta.id}_out" } + } + +} diff --git a/tests/modules/bedtools/sort/test.yml b/tests/modules/bedtools/sort/test.yml index 1dd04507..173f0587 100644 --- a/tests/modules/bedtools/sort/test.yml +++ b/tests/modules/bedtools/sort/test.yml @@ -1,5 +1,5 @@ - name: bedtools sort - command: nextflow run ./tests/modules/bedtools/sort -entry test_bedtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/sort -entry test_bedtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/sort/nextflow.config tags: - bedtools - bedtools/sort diff --git a/tests/modules/bedtools/subtract/main.nf b/tests/modules/bedtools/subtract/main.nf index 9997f08c..2a0e6eab 100644 --- a/tests/modules/bedtools/subtract/main.nf +++ b/tests/modules/bedtools/subtract/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BEDTOOLS_SUBTRACT } from '../../../../modules/bedtools/subtract/main.nf' addParams( options: [:] ) +include { BEDTOOLS_SUBTRACT } from '../../../../modules/bedtools/subtract/main.nf' workflow test_bedtools_subtract { input = [ diff --git a/tests/modules/bedtools/subtract/nextflow.config b/tests/modules/bedtools/subtract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bedtools/subtract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bedtools/subtract/test.yml b/tests/modules/bedtools/subtract/test.yml index fd8660fc..52b57436 100644 --- a/tests/modules/bedtools/subtract/test.yml +++ b/tests/modules/bedtools/subtract/test.yml @@ -1,5 +1,5 @@ - name: bedtools subtract - command: nextflow run ./tests/modules/bedtools/subtract -entry test_bedtools_subtract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bedtools/subtract -entry test_bedtools_subtract -c ./tests/config/nextflow.config -c ./tests/modules/bedtools/subtract/nextflow.config tags: - bedtools - bedtools/subtract diff --git a/tests/modules/bismark/align/main.nf b/tests/modules/bismark/align/main.nf index 1f1fcdce..fe6d616a 100644 --- a/tests/modules/bismark/align/main.nf +++ b/tests/modules/bismark/align/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN as BISMARK_ALIGN_SE } from '../../../../modules/bismark/align/main.nf' addParams( options: [ publish_dir:'test_single_end' ] ) -include { BISMARK_ALIGN as BISMARK_ALIGN_PE } from '../../../../modules/bismark/align/main.nf' addParams( options: [ publish_dir:'test_paired_end' ] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN as BISMARK_ALIGN_SE } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_ALIGN as BISMARK_ALIGN_PE } from '../../../../modules/bismark/align/main.nf' // // Test with single-end data diff --git a/tests/modules/bismark/align/nextflow.config b/tests/modules/bismark/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/align/test.yml b/tests/modules/bismark/align/test.yml index 42dc44b3..ffae05af 100644 --- a/tests/modules/bismark/align/test.yml +++ b/tests/modules/bismark/align/test.yml @@ -1,19 +1,19 @@ - name: bismark align single-end test workflow - command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bismark/align/nextflow.config tags: - bismark - bismark/align files: - - path: output/test_single_end/test.methylated_1_bismark_bt2.bam + - path: output/bismark/test.methylated_1_bismark_bt2.bam md5sum: dca4ba9ff705b70446f812e59bdb1a32 - - path: output/test_single_end/test.methylated_1_bismark_bt2_SE_report.txt + - path: output/bismark/test.methylated_1_bismark_bt2_SE_report.txt - name: bismark align paired-end test workflow - command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/align -entry test_bismark_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bismark/align/nextflow.config tags: - bismark - bismark/align files: - - path: output/test_paired_end/test.methylated_1_bismark_bt2_pe.bam + - path: output/bismark/test.methylated_1_bismark_bt2_pe.bam md5sum: 43943b1f30d056fcbd9ed26061ea0583 - - path: output/test_paired_end/test.methylated_1_bismark_bt2_PE_report.txt + - path: output/bismark/test.methylated_1_bismark_bt2_PE_report.txt diff --git a/tests/modules/bismark/deduplicate/main.nf b/tests/modules/bismark/deduplicate/main.nf index fc44745c..ad97d66a 100644 --- a/tests/modules/bismark/deduplicate/main.nf +++ b/tests/modules/bismark/deduplicate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' workflow test_bismark_deduplicate { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/deduplicate/nextflow.config b/tests/modules/bismark/deduplicate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/deduplicate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/deduplicate/test.yml b/tests/modules/bismark/deduplicate/test.yml index 604c1023..12099750 100644 --- a/tests/modules/bismark/deduplicate/test.yml +++ b/tests/modules/bismark/deduplicate/test.yml @@ -1,5 +1,5 @@ - name: bismark deduplicate test workflow - command: nextflow run ./tests/modules/bismark/deduplicate -entry test_bismark_deduplicate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/deduplicate -entry test_bismark_deduplicate -c ./tests/config/nextflow.config -c ./tests/modules/bismark/deduplicate/nextflow.config tags: - bismark - bismark/deduplicate diff --git a/tests/modules/bismark/genomepreparation/main.nf b/tests/modules/bismark/genomepreparation/main.nf index ab847171..a9111af3 100644 --- a/tests/modules/bismark/genomepreparation/main.nf +++ b/tests/modules/bismark/genomepreparation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' workflow test_bismark_genomepreparation { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bismark/genomepreparation/nextflow.config b/tests/modules/bismark/genomepreparation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/genomepreparation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/genomepreparation/test.yml b/tests/modules/bismark/genomepreparation/test.yml index 15a7e7d6..a0d3c072 100644 --- a/tests/modules/bismark/genomepreparation/test.yml +++ b/tests/modules/bismark/genomepreparation/test.yml @@ -1,5 +1,5 @@ - name: bismark genomepreparation test workflow - command: nextflow run ./tests/modules/bismark/genomepreparation -entry test_bismark_genomepreparation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/genomepreparation -entry test_bismark_genomepreparation -c ./tests/config/nextflow.config -c ./tests/modules/bismark/genomepreparation/nextflow.config tags: - bismark - bismark/genomepreparation diff --git a/tests/modules/bismark/methylationextractor/main.nf b/tests/modules/bismark/methylationextractor/main.nf index 0b3f77a1..ed857fe8 100644 --- a/tests/modules/bismark/methylationextractor/main.nf +++ b/tests/modules/bismark/methylationextractor/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' workflow test_bismark_methylationextractor { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/methylationextractor/nextflow.config b/tests/modules/bismark/methylationextractor/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/methylationextractor/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/methylationextractor/test.yml b/tests/modules/bismark/methylationextractor/test.yml index 4505c428..f25b7646 100644 --- a/tests/modules/bismark/methylationextractor/test.yml +++ b/tests/modules/bismark/methylationextractor/test.yml @@ -1,5 +1,5 @@ - name: bismark methylation extractor test workflow - command: nextflow run ./tests/modules/bismark/methylationextractor -entry test_bismark_methylationextractor -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/methylationextractor -entry test_bismark_methylationextractor -c ./tests/config/nextflow.config -c ./tests/modules/bismark/methylationextractor/nextflow.config tags: - bismark - bismark/methylationextractor diff --git a/tests/modules/bismark/report/main.nf b/tests/modules/bismark/report/main.nf index 945d24ed..f80fb2bc 100644 --- a/tests/modules/bismark/report/main.nf +++ b/tests/modules/bismark/report/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' addParams( options: [:] ) -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) -include { BISMARK_REPORT } from '../../../../modules/bismark/report/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' +include { BISMARK_REPORT } from '../../../../modules/bismark/report/main.nf' workflow test_bismark_report { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/report/nextflow.config b/tests/modules/bismark/report/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/report/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/report/test.yml b/tests/modules/bismark/report/test.yml index 7e85e4dd..9195994c 100644 --- a/tests/modules/bismark/report/test.yml +++ b/tests/modules/bismark/report/test.yml @@ -1,5 +1,5 @@ - name: bismark report test workflow - command: nextflow run ./tests/modules/bismark/report -entry test_bismark_report -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/report -entry test_bismark_report -c ./tests/config/nextflow.config -c ./tests/modules/bismark/report/nextflow.config tags: - bismark - bismark/report diff --git a/tests/modules/bismark/summary/main.nf b/tests/modules/bismark/summary/main.nf index 4170d19a..8eabe51f 100644 --- a/tests/modules/bismark/summary/main.nf +++ b/tests/modules/bismark/summary/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' addParams( options: [:] ) -include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' addParams( options: [:] ) -include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' addParams( options: [:] ) -include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' addParams( options: [:] ) -include { BISMARK_SUMMARY } from '../../../../modules/bismark/summary/main.nf' addParams( options: [:] ) +include { BISMARK_GENOMEPREPARATION } from '../../../../modules/bismark/genomepreparation/main.nf' +include { BISMARK_ALIGN } from '../../../../modules/bismark/align/main.nf' +include { BISMARK_DEDUPLICATE } from '../../../../modules/bismark/deduplicate/main.nf' +include { BISMARK_METHYLATIONEXTRACTOR } from '../../../../modules/bismark/methylationextractor/main.nf' +include { BISMARK_SUMMARY } from '../../../../modules/bismark/summary/main.nf' workflow test_bismark_summary { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/bismark/summary/nextflow.config b/tests/modules/bismark/summary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bismark/summary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bismark/summary/test.yml b/tests/modules/bismark/summary/test.yml index 06478873..3b5196b9 100644 --- a/tests/modules/bismark/summary/test.yml +++ b/tests/modules/bismark/summary/test.yml @@ -1,5 +1,5 @@ - name: bismark summary test workflow - command: nextflow run ./tests/modules/bismark/summary -entry test_bismark_summary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bismark/summary -entry test_bismark_summary -c ./tests/config/nextflow.config -c ./tests/modules/bismark/summary/nextflow.config tags: - bismark - bismark/summary diff --git a/tests/modules/blast/blastn/main.nf b/tests/modules/blast/blastn/main.nf index fd690dcc..3c8496dc 100644 --- a/tests/modules/blast/blastn/main.nf +++ b/tests/modules/blast/blastn/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' addParams( options: ['args': '-dbtype nucl'] ) -include { BLAST_BLASTN } from '../../../../modules/blast/blastn/main.nf' addParams( options: [:] ) +include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' +include { BLAST_BLASTN } from '../../../../modules/blast/blastn/main.nf' workflow test_blast_blastn { input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] diff --git a/tests/modules/blast/blastn/nextflow.config b/tests/modules/blast/blastn/nextflow.config new file mode 100644 index 00000000..1d5a2c01 --- /dev/null +++ b/tests/modules/blast/blastn/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BLAST_MAKEBLASTDB { + ext.args = '-dbtype nucl' + } + +} diff --git a/tests/modules/blast/blastn/test.yml b/tests/modules/blast/blastn/test.yml index 98f76921..17522c9b 100644 --- a/tests/modules/blast/blastn/test.yml +++ b/tests/modules/blast/blastn/test.yml @@ -1,5 +1,5 @@ - name: blast_blastn - command: nextflow run ./tests/modules/blast/blastn -entry test_blast_blastn -c tests/config/nextflow.config + command: nextflow run ./tests/modules/blast/blastn -entry test_blast_blastn -c ./tests/config/nextflow.config -c ./tests/modules/blast/blastn/nextflow.config tags: - blast - blast/blastn diff --git a/tests/modules/blast/makeblastdb/main.nf b/tests/modules/blast/makeblastdb/main.nf index 48b39f22..9d778457 100644 --- a/tests/modules/blast/makeblastdb/main.nf +++ b/tests/modules/blast/makeblastdb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' addParams( options: ['args': '-dbtype nucl'] ) +include { BLAST_MAKEBLASTDB } from '../../../../modules/blast/makeblastdb/main.nf' workflow test_blast_makeblastdb { input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] diff --git a/tests/modules/blast/makeblastdb/nextflow.config b/tests/modules/blast/makeblastdb/nextflow.config new file mode 100644 index 00000000..1d5a2c01 --- /dev/null +++ b/tests/modules/blast/makeblastdb/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BLAST_MAKEBLASTDB { + ext.args = '-dbtype nucl' + } + +} diff --git a/tests/modules/blast/makeblastdb/test.yml b/tests/modules/blast/makeblastdb/test.yml index 7df17968..3b59f3f6 100644 --- a/tests/modules/blast/makeblastdb/test.yml +++ b/tests/modules/blast/makeblastdb/test.yml @@ -1,5 +1,5 @@ - name: blast_makeblastdb - command: nextflow run ./tests/modules/blast/makeblastdb -entry test_blast_makeblastdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/blast/makeblastdb -entry test_blast_makeblastdb -c ./tests/config/nextflow.config -c ./tests/modules/blast/makeblastdb/nextflow.config tags: - blast - blast/makeblastdb diff --git a/tests/modules/bowtie/align/main.nf b/tests/modules/bowtie/align/main.nf index b2c8059a..e773cd38 100644 --- a/tests/modules/bowtie/align/main.nf +++ b/tests/modules/bowtie/align/main.nf @@ -2,13 +2,16 @@ nextflow.enable.dsl = 2 -include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [:] ) -include { BOWTIE_ALIGN } from '../../../../modules/bowtie/align/main.nf' addParams( options: [:] ) +include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' +include { BOWTIE_ALIGN } from '../../../../modules/bowtie/align/main.nf' workflow test_bowtie_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE_BUILD ( fasta ) @@ -16,10 +19,13 @@ workflow test_bowtie_align_single_end { } workflow test_bowtie_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE_BUILD ( fasta ) diff --git a/tests/modules/bowtie/align/nextflow.config b/tests/modules/bowtie/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie/align/test.yml b/tests/modules/bowtie/align/test.yml index 76d63d68..1f8d1294 100644 --- a/tests/modules/bowtie/align/test.yml +++ b/tests/modules/bowtie/align/test.yml @@ -1,5 +1,5 @@ - name: bowtie align single-end - command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/align/nextflow.config tags: - bowtie - bowtie/align @@ -7,36 +7,36 @@ - path: ./output/bowtie/test.bam - path: ./output/bowtie/test.out md5sum: 4b9140ceadb8a18ae9330885370f8a0b - - path: ./output/index/bowtie/genome.3.ebwt + - path: ./output/bowtie/bowtie/genome.3.ebwt md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie/genome.2.ebwt + - path: ./output/bowtie/bowtie/genome.2.ebwt md5sum: 02b44af9f94c62ecd3c583048e25d4cf - - path: ./output/index/bowtie/genome.rev.2.ebwt + - path: ./output/bowtie/bowtie/genome.rev.2.ebwt md5sum: 9e6b0c4c1ddb99ae71ff8a4fe5ec6459 - - path: ./output/index/bowtie/genome.4.ebwt + - path: ./output/bowtie/bowtie/genome.4.ebwt md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie/genome.rev.1.ebwt + - path: ./output/bowtie/bowtie/genome.rev.1.ebwt md5sum: b37aaf11853e65a3b13561f27a912b06 - - path: ./output/index/bowtie/genome.1.ebwt + - path: ./output/bowtie/bowtie/genome.1.ebwt md5sum: d9b76ecf9fd0413240173273b38d8199 - name: bowtie align paired-end - command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/align -entry test_bowtie_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/align/nextflow.config tags: - bowtie - bowtie/align files: - path: ./output/bowtie/test.bam - path: ./output/bowtie/test.out - - path: ./output/index/bowtie/genome.3.ebwt + - path: ./output/bowtie/bowtie/genome.3.ebwt md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie/genome.2.ebwt + - path: ./output/bowtie/bowtie/genome.2.ebwt md5sum: 02b44af9f94c62ecd3c583048e25d4cf - - path: ./output/index/bowtie/genome.rev.2.ebwt + - path: ./output/bowtie/bowtie/genome.rev.2.ebwt md5sum: 9e6b0c4c1ddb99ae71ff8a4fe5ec6459 - - path: ./output/index/bowtie/genome.4.ebwt + - path: ./output/bowtie/bowtie/genome.4.ebwt md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie/genome.rev.1.ebwt + - path: ./output/bowtie/bowtie/genome.rev.1.ebwt md5sum: b37aaf11853e65a3b13561f27a912b06 - - path: ./output/index/bowtie/genome.1.ebwt + - path: ./output/bowtie/bowtie/genome.1.ebwt md5sum: d9b76ecf9fd0413240173273b38d8199 diff --git a/tests/modules/bowtie/build_test/main.nf b/tests/modules/bowtie/build_test/main.nf index a89091a8..7a36fb55 100644 --- a/tests/modules/bowtie/build_test/main.nf +++ b/tests/modules/bowtie/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' addParams( options: [publish_dir:'bowtie'] ) +include { BOWTIE_BUILD } from '../../../../modules/bowtie/build/main.nf' workflow test_bowtie_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie/build_test/nextflow.config b/tests/modules/bowtie/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie/build_test/test.yml b/tests/modules/bowtie/build_test/test.yml index c6b765c9..c51d1e8a 100644 --- a/tests/modules/bowtie/build_test/test.yml +++ b/tests/modules/bowtie/build_test/test.yml @@ -1,5 +1,5 @@ - name: bowtie build - command: nextflow run ./tests/modules/bowtie/build_test -entry test_bowtie_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie/build_test -entry test_bowtie_build -c ./tests/config/nextflow.config -c ./tests/modules/bowtie/build/nextflow.config tags: - bowtie - bowtie/build diff --git a/tests/modules/bowtie2/align/main.nf b/tests/modules/bowtie2/align/main.nf index 20602f30..8c8e3ab8 100644 --- a/tests/modules/bowtie2/align/main.nf +++ b/tests/modules/bowtie2/align/main.nf @@ -2,13 +2,16 @@ nextflow.enable.dsl = 2 -include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [:] ) -include { BOWTIE2_ALIGN } from '../../../../modules/bowtie2/align/main.nf' addParams( options: [:] ) +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' +include { BOWTIE2_ALIGN } from '../../../../modules/bowtie2/align/main.nf' workflow test_bowtie2_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BOWTIE2_BUILD ( fasta ) @@ -16,11 +19,15 @@ workflow test_bowtie2_align_single_end { } workflow test_bowtie2_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + BOWTIE2_BUILD ( fasta ) BOWTIE2_ALIGN ( input, BOWTIE2_BUILD.out.index ) } diff --git a/tests/modules/bowtie2/align/nextflow.config b/tests/modules/bowtie2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie2/align/test.yml b/tests/modules/bowtie2/align/test.yml index 05952b76..95d48b88 100644 --- a/tests/modules/bowtie2/align/test.yml +++ b/tests/modules/bowtie2/align/test.yml @@ -1,41 +1,41 @@ - name: bowtie2 align single-end - command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/align/nextflow.config tags: - bowtie2 - bowtie2/align files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log - - path: ./output/index/bowtie2/genome.3.bt2 + - path: ./output/bowtie2/bowtie2/genome.3.bt2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie2/genome.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.2.bt2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: ./output/index/bowtie2/genome.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.1.bt2 md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf - - path: ./output/index/bowtie2/genome.4.bt2 + - path: ./output/bowtie2/bowtie2/genome.4.bt2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie2/genome.rev.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.1.bt2 md5sum: 52be6950579598a990570fbcf5372184 - - path: ./output/index/bowtie2/genome.rev.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.2.bt2 md5sum: e3b4ef343dea4dd571642010a7d09597 - name: bowtie2 align paired-end - command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/align -entry test_bowtie2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/align/nextflow.config tags: - bowtie2 - bowtie2/align files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log - - path: ./output/index/bowtie2/genome.3.bt2 + - path: ./output/bowtie2/bowtie2/genome.3.bt2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: ./output/index/bowtie2/genome.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.2.bt2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: ./output/index/bowtie2/genome.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.1.bt2 md5sum: cbe3d0bbea55bc57c99b4bfa25b5fbdf - - path: ./output/index/bowtie2/genome.4.bt2 + - path: ./output/bowtie2/bowtie2/genome.4.bt2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - - path: ./output/index/bowtie2/genome.rev.1.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.1.bt2 md5sum: 52be6950579598a990570fbcf5372184 - - path: ./output/index/bowtie2/genome.rev.2.bt2 + - path: ./output/bowtie2/bowtie2/genome.rev.2.bt2 md5sum: e3b4ef343dea4dd571642010a7d09597 diff --git a/tests/modules/bowtie2/build_test/main.nf b/tests/modules/bowtie2/build_test/main.nf index 2b41fab2..f1d35083 100644 --- a/tests/modules/bowtie2/build_test/main.nf +++ b/tests/modules/bowtie2/build_test/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' addParams( options: [publish_dir:'bowtie2'] ) +include { BOWTIE2_BUILD } from '../../../../modules/bowtie2/build/main.nf' workflow test_bowtie2_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bowtie2/build_test/nextflow.config b/tests/modules/bowtie2/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bowtie2/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bowtie2/build_test/test.yml b/tests/modules/bowtie2/build_test/test.yml index 3fd049b9..88e6c3ad 100644 --- a/tests/modules/bowtie2/build_test/test.yml +++ b/tests/modules/bowtie2/build_test/test.yml @@ -1,5 +1,5 @@ - name: bowtie2 build - command: nextflow run ./tests/modules/bowtie2/build_test -entry test_bowtie2_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bowtie2/build_test -entry test_bowtie2_build -c ./tests/config/nextflow.config -c ./tests/modules/bowtie2/build/nextflow.config tags: - bowtie2 - bowtie2/build diff --git a/tests/modules/bwa/aln/main.nf b/tests/modules/bwa/aln/main.nf index feb7473d..909e7a2d 100644 --- a/tests/modules/bwa/aln/main.nf +++ b/tests/modules/bwa/aln/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' // // Test with single-end data // workflow test_bwa_aln_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwa_aln_single_end { // Test with paired-end data // workflow test_bwa_aln_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) diff --git a/tests/modules/bwa/aln/nextflow.config b/tests/modules/bwa/aln/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/aln/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/aln/test.yml b/tests/modules/bwa/aln/test.yml index 08848143..c89c47be 100644 --- a/tests/modules/bwa/aln/test.yml +++ b/tests/modules/bwa/aln/test.yml @@ -1,24 +1,24 @@ - name: bwa aln single-end - command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/aln/nextflow.config tags: - bwa - bwa/aln files: - path: ./output/bwa/test.sai md5sum: aaaf39b6814c96ca1a5eacc662adf926 - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - name: bwa aln paired-end - command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/aln -entry test_bwa_aln_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/aln/nextflow.config tags: - bwa - bwa/aln @@ -27,13 +27,13 @@ md5sum: aaaf39b6814c96ca1a5eacc662adf926 - path: ./output/bwa/test.2.sai md5sum: b4f185d9b4cb256dd5c377070a536124 - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwa/index/main.nf b/tests/modules/bwa/index/main.nf index 30d31202..fe040cb2 100644 --- a/tests/modules/bwa/index/main.nf +++ b/tests/modules/bwa/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [publish_dir:'bwa'] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' workflow test_bwa_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwa/index/nextflow.config b/tests/modules/bwa/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/index/test.yml b/tests/modules/bwa/index/test.yml index cdcb5e53..a9dab91d 100644 --- a/tests/modules/bwa/index/test.yml +++ b/tests/modules/bwa/index/test.yml @@ -1,16 +1,16 @@ -- name: bwa index - command: nextflow run ./tests/modules/bwa/index -entry test_bwa_index -c tests/config/nextflow.config +- name: bwa index test_bwa_index + command: nextflow run ./tests/modules/bwa/index -entry test_bwa_index -c ./tests/config/nextflow.config -c ./tests/modules/bwa/index/nextflow.config tags: - bwa - bwa/index files: - - path: ./output/bwa/bwa/genome.bwt - md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/bwa/bwa/genome.amb + - path: output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/bwa/bwa/genome.sa - md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - - path: ./output/bwa/bwa/genome.pac - md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/bwa/bwa/genome.ann + - path: output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwa/mem/main.nf b/tests/modules/bwa/mem/main.nf index bac51d23..c9c57197 100644 --- a/tests/modules/bwa/mem/main.nf +++ b/tests/modules/bwa/mem/main.nf @@ -2,32 +2,71 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_MEM } from '../../../../modules/bwa/mem/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_MEM } from '../../../../modules/bwa/mem/main.nf' // // Test with single-end data // workflow test_bwa_mem_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) - BWA_MEM ( input, BWA_INDEX.out.index ) + BWA_MEM ( input, BWA_INDEX.out.index, false ) +} + +// +// Test with single-end data and sort +// +workflow test_bwa_mem_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_MEM ( input, BWA_INDEX.out.index, true ) } // // Test with paired-end data // workflow test_bwa_mem_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWA_INDEX ( fasta ) - BWA_MEM ( input, BWA_INDEX.out.index ) + BWA_MEM ( input, BWA_INDEX.out.index, false ) +} + +// +// Test with paired-end data and sort +// +workflow test_bwa_mem_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWA_INDEX ( fasta ) + BWA_MEM ( input, BWA_INDEX.out.index, true ) } diff --git a/tests/modules/bwa/mem/nextflow.config b/tests/modules/bwa/mem/nextflow.config new file mode 100644 index 00000000..d15f6939 --- /dev/null +++ b/tests/modules/bwa/mem/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BWA_MEM { + ext.args2 = { sort_bam ? "" : "-bh" } + } + +} diff --git a/tests/modules/bwa/mem/test.yml b/tests/modules/bwa/mem/test.yml index df1988b5..8fe2ee6b 100644 --- a/tests/modules/bwa/mem/test.yml +++ b/tests/modules/bwa/mem/test.yml @@ -1,35 +1,71 @@ - name: bwa mem single-end - command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: - bwa - bwa/mem files: - path: ./output/bwa/test.bam - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 + +- name: bwa mem single-end sort + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config + tags: + - bwa + - bwa/mem + files: + - path: ./output/bwa/test.bam + - path: ./output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/bwa/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwa/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 - name: bwa mem paired-end - command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config tags: - bwa - bwa/mem files: - path: ./output/bwa/test.bam - - path: ./output/index/bwa/genome.bwt + - path: ./output/bwa/bwa/genome.bwt md5sum: 0469c30a1e239dd08f68afe66fde99da - - path: ./output/index/bwa/genome.amb + - path: ./output/bwa/bwa/genome.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwa/genome.ann + - path: ./output/bwa/bwa/genome.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - - path: ./output/index/bwa/genome.pac + - path: ./output/bwa/bwa/genome.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwa/genome.sa + - path: ./output/bwa/bwa/genome.sa + md5sum: ab3952cabf026b48cd3eb5bccbb636d1 + +- name: bwa mem paired-end sort + command: nextflow run ./tests/modules/bwa/mem -entry test_bwa_mem_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwa/mem/nextflow.config + tags: + - bwa + - bwa/mem + files: + - path: ./output/bwa/test.bam + - path: ./output/bwa/bwa/genome.bwt + md5sum: 0469c30a1e239dd08f68afe66fde99da + - path: ./output/bwa/bwa/genome.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwa/bwa/genome.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: ./output/bwa/bwa/genome.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwa/bwa/genome.sa md5sum: ab3952cabf026b48cd3eb5bccbb636d1 diff --git a/tests/modules/bwa/sampe/main.nf b/tests/modules/bwa/sampe/main.nf index 017f27e5..abd25566 100644 --- a/tests/modules/bwa/sampe/main.nf +++ b/tests/modules/bwa/sampe/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) -include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' +include { BWA_SAMPE } from '../../../../modules/bwa/sampe/main.nf' workflow test_bwa_sampe { diff --git a/tests/modules/bwa/sampe/nextflow.config b/tests/modules/bwa/sampe/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/sampe/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/sampe/test.yml b/tests/modules/bwa/sampe/test.yml index ba5e704d..fb6d7708 100644 --- a/tests/modules/bwa/sampe/test.yml +++ b/tests/modules/bwa/sampe/test.yml @@ -1,5 +1,5 @@ - name: bwa sampe - command: nextflow run ./tests/modules/bwa/sampe -entry test_bwa_sampe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/sampe -entry test_bwa_sampe -c ./tests/config/nextflow.config -c ./tests/modules/bwa/sampe/nextflow.config tags: - bwa - bwa/sampe diff --git a/tests/modules/bwa/samse/main.nf b/tests/modules/bwa/samse/main.nf index 87a7c7b1..17912c36 100644 --- a/tests/modules/bwa/samse/main.nf +++ b/tests/modules/bwa/samse/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' addParams( options: [:] ) -include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' addParams( options: [:] ) -include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' addParams( options: [:] ) +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_ALN } from '../../../../modules/bwa/aln/main.nf' +include { BWA_SAMSE } from '../../../../modules/bwa/samse/main.nf' workflow test_bwa_samse { diff --git a/tests/modules/bwa/samse/nextflow.config b/tests/modules/bwa/samse/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwa/samse/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwa/samse/test.yml b/tests/modules/bwa/samse/test.yml index 597844d4..5a2fe1e3 100644 --- a/tests/modules/bwa/samse/test.yml +++ b/tests/modules/bwa/samse/test.yml @@ -1,5 +1,5 @@ - name: bwa samse - command: nextflow run ./tests/modules/bwa/samse -entry test_bwa_samse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwa/samse -entry test_bwa_samse -c ./tests/config/nextflow.config -c ./tests/modules/bwa/samse/nextflow.config tags: - bwa - bwa/samse diff --git a/tests/modules/bwamem2/index/main.nf b/tests/modules/bwamem2/index/main.nf index bb7d0803..fe88f8f7 100644 --- a/tests/modules/bwamem2/index/main.nf +++ b/tests/modules/bwamem2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [publish_dir:'bwamem2'] ) +include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' workflow test_bwamem2_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwamem2/index/nextflow.config b/tests/modules/bwamem2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwamem2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwamem2/index/test.yml b/tests/modules/bwamem2/index/test.yml index d9d15c53..efada6ec 100644 --- a/tests/modules/bwamem2/index/test.yml +++ b/tests/modules/bwamem2/index/test.yml @@ -1,5 +1,5 @@ - name: bwamem2 index - command: nextflow run ./tests/modules/bwamem2/index -entry test_bwamem2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/index -entry test_bwamem2_index -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/index/nextflow.config tags: - bwamem2 - bwamem2/index diff --git a/tests/modules/bwamem2/mem/main.nf b/tests/modules/bwamem2/mem/main.nf index 5abda8bb..b4293dbe 100644 --- a/tests/modules/bwamem2/mem/main.nf +++ b/tests/modules/bwamem2/mem/main.nf @@ -2,32 +2,72 @@ nextflow.enable.dsl = 2 -include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' addParams( options: [:] ) -include { BWAMEM2_MEM } from '../../../../modules/bwamem2/mem/main.nf' addParams( options: [:] ) +include { BWAMEM2_INDEX } from '../../../../modules/bwamem2/index/main.nf' +include { BWAMEM2_MEM } from '../../../../modules/bwamem2/mem/main.nf' // // Test with single-end data // workflow test_bwamem2_mem_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) - BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, false ) } +// +// Test with single-end data and sort +// +workflow test_bwamem2_mem_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWAMEM2_INDEX ( fasta ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, true ) +} + + // // Test with paired-end data // workflow test_bwamem2_mem_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMEM2_INDEX ( fasta ) - BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, false ) +} + +// +// Test with paired-end data and sort +// +workflow test_bwamem2_mem_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BWAMEM2_INDEX ( fasta ) + BWAMEM2_MEM ( input, BWAMEM2_INDEX.out.index, true ) } diff --git a/tests/modules/bwamem2/mem/nextflow.config b/tests/modules/bwamem2/mem/nextflow.config new file mode 100644 index 00000000..b5181865 --- /dev/null +++ b/tests/modules/bwamem2/mem/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BWAMEM2_MEM { + ext.args2 = { sort_bam ? "" : "-bh" } + } + +} diff --git a/tests/modules/bwamem2/mem/test.yml b/tests/modules/bwamem2/mem/test.yml index cc2fe2a8..bf445ebe 100644 --- a/tests/modules/bwamem2/mem/test.yml +++ b/tests/modules/bwamem2/mem/test.yml @@ -1,35 +1,71 @@ - name: bwamem2 mem single-end - command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: - bwamem2 - bwamem2/mem files: - path: ./output/bwamem2/test.bam - - path: ./output/index/bwamem2/genome.fasta.amb + - path: ./output/bwamem2/bwamem2/genome.fasta.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwamem2/genome.fasta.pac + - path: ./output/bwamem2/bwamem2/genome.fasta.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwamem2/genome.fasta.0123 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 md5sum: b02870de80106104abcb03cd9463e7d8 - - path: ./output/index/bwamem2/genome.fasta.bwt.2bit.64 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 md5sum: d097a1b82dee375d41a1ea69895a9216 - - path: ./output/index/bwamem2/genome.fasta.ann + - path: ./output/bwamem2/bwamem2/genome.fasta.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + +- name: bwamem2 mem single-end sort + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config + tags: + - bwamem2 + - bwamem2/mem + files: + - path: ./output/bwamem2/test.bam + - path: ./output/bwamem2/bwamem2/genome.fasta.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwamem2/bwamem2/genome.fasta.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 + md5sum: b02870de80106104abcb03cd9463e7d8 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 + md5sum: d097a1b82dee375d41a1ea69895a9216 + - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 - name: bwamem2 mem paired-end - command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config tags: - bwamem2 - bwamem2/mem files: - path: ./output/bwamem2/test.bam - - path: ./output/index/bwamem2/genome.fasta.amb + - path: ./output/bwamem2/bwamem2/genome.fasta.amb md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e - - path: ./output/index/bwamem2/genome.fasta.pac + - path: ./output/bwamem2/bwamem2/genome.fasta.pac md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 - - path: ./output/index/bwamem2/genome.fasta.0123 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 md5sum: b02870de80106104abcb03cd9463e7d8 - - path: ./output/index/bwamem2/genome.fasta.bwt.2bit.64 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 md5sum: d097a1b82dee375d41a1ea69895a9216 - - path: ./output/index/bwamem2/genome.fasta.ann + - path: ./output/bwamem2/bwamem2/genome.fasta.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + +- name: bwamem2 mem paired-end sort + command: nextflow run ./tests/modules/bwamem2/mem -entry test_bwamem2_mem_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/bwamem2/mem/nextflow.config + tags: + - bwamem2 + - bwamem2/mem + files: + - path: ./output/bwamem2/test.bam + - path: ./output/bwamem2/bwamem2/genome.fasta.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: ./output/bwamem2/bwamem2/genome.fasta.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: ./output/bwamem2/bwamem2/genome.fasta.0123 + md5sum: b02870de80106104abcb03cd9463e7d8 + - path: ./output/bwamem2/bwamem2/genome.fasta.bwt.2bit.64 + md5sum: d097a1b82dee375d41a1ea69895a9216 + - path: ./output/bwamem2/bwamem2/genome.fasta.ann md5sum: c32e11f6c859f166c7525a9c1d583567 diff --git a/tests/modules/bwameth/align/main.nf b/tests/modules/bwameth/align/main.nf index fb8cad6a..8066941c 100644 --- a/tests/modules/bwameth/align/main.nf +++ b/tests/modules/bwameth/align/main.nf @@ -2,16 +2,19 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [:] ) -include { BWAMETH_ALIGN } from '../../../../modules/bwameth/align/main.nf' addParams( options: [:] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' +include { BWAMETH_ALIGN } from '../../../../modules/bwameth/align/main.nf' // // Test with single-end data // workflow test_bwameth_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) @@ -22,10 +25,13 @@ workflow test_bwameth_align_single_end { // Test with paired-end data // workflow test_bwameth_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) BWAMETH_INDEX ( fasta ) diff --git a/tests/modules/bwameth/align/nextflow.config b/tests/modules/bwameth/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwameth/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwameth/align/test.yml b/tests/modules/bwameth/align/test.yml index 5cf4b84d..f921b5f4 100644 --- a/tests/modules/bwameth/align/test.yml +++ b/tests/modules/bwameth/align/test.yml @@ -1,5 +1,5 @@ - name: bwameth align single-end test workflow - command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/align/nextflow.config tags: - bwameth - bwameth/align @@ -7,7 +7,7 @@ - path: output/bwameth/test.bam - name: bwameth align paired-end test workflow - command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/align -entry test_bwameth_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/align/nextflow.config tags: - bwameth - bwameth/align diff --git a/tests/modules/bwameth/index/main.nf b/tests/modules/bwameth/index/main.nf index 46662201..b70fd1f7 100644 --- a/tests/modules/bwameth/index/main.nf +++ b/tests/modules/bwameth/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' addParams( options: [publish_dir:'bwameth'] ) +include { BWAMETH_INDEX } from '../../../../modules/bwameth/index/main.nf' workflow test_bwameth_index { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/bwameth/index/nextflow.config b/tests/modules/bwameth/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/bwameth/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/bwameth/index/test.yml b/tests/modules/bwameth/index/test.yml index 0cc7922e..9783c511 100644 --- a/tests/modules/bwameth/index/test.yml +++ b/tests/modules/bwameth/index/test.yml @@ -1,5 +1,5 @@ - name: bwameth index test workflow - command: nextflow run ./tests/modules/bwameth/index -entry test_bwameth_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bwameth/index -entry test_bwameth_index -c ./tests/config/nextflow.config -c ./tests/modules/bwameth/index/nextflow.config tags: - bwameth - bwameth/index diff --git a/tests/modules/cat/cat/main.nf b/tests/modules/cat/cat/main.nf index a110a8ab..430c71fa 100644 --- a/tests/modules/cat/cat/main.nf +++ b/tests/modules/cat/cat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' addParams( options: [:] ) +include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' workflow test_cat_unzipped_unzipped { diff --git a/tests/modules/cat/cat/nextflow.config b/tests/modules/cat/cat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cat/cat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cat/cat/test.yml b/tests/modules/cat/cat/test.yml index 2f234a01..d7973042 100644 --- a/tests/modules/cat/cat/test.yml +++ b/tests/modules/cat/cat/test.yml @@ -1,5 +1,5 @@ - name: cat unzipped unzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -8,7 +8,7 @@ md5sum: f44b33a0e441ad58b2d3700270e2dbe2 - name: cat zipped zipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -16,7 +16,7 @@ - path: output/cat/cat.txt.gz - name: cat zipped unzipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat @@ -25,7 +25,7 @@ md5sum: c439d3b60e7bc03e8802a451a0d9a5d9 - name: cat unzipped zipped - command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: - cat - cat/cat diff --git a/tests/modules/cat/fastq/main.nf b/tests/modules/cat/fastq/main.nf index 027bd108..1ed23ce5 100644 --- a/tests/modules/cat/fastq/main.nf +++ b/tests/modules/cat/fastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' addParams( options: [publish_dir:'cat'] ) +include { CAT_FASTQ } from '../../../../modules/cat/fastq/main.nf' workflow test_cat_fastq_single_end { input = [ @@ -25,3 +25,25 @@ workflow test_cat_fastq_paired_end { CAT_FASTQ ( input ) } + +workflow test_cat_fastq_single_end_same_name { + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + CAT_FASTQ ( input ) +} + +workflow test_cat_fastq_paired_end_same_name { + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + CAT_FASTQ ( input ) +} diff --git a/tests/modules/cat/fastq/nextflow.config b/tests/modules/cat/fastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cat/fastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cat/fastq/test.yml b/tests/modules/cat/fastq/test.yml index 9a5af25c..56374060 100644 --- a/tests/modules/cat/fastq/test.yml +++ b/tests/modules/cat/fastq/test.yml @@ -1,19 +1,39 @@ - name: cat fastq single-end - command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config tags: - cat - cat/fastq files: - path: ./output/cat/test.merged.fastq.gz - md5sum: 59f6dbe193741bb40f498f254aeb2e99 + md5sum: f9cf5e375f7de81a406144a2c70cc64d - name: cat fastq fastqc_paired_end - command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config tags: - cat - cat/fastq files: - path: ./output/cat/test_2.merged.fastq.gz - md5sum: d2b1a836eef1058738ecab36c907c5ba + md5sum: 77c8e966e130d8c6b6ec9be52fcb2bda - path: ./output/cat/test_1.merged.fastq.gz - md5sum: 59f6dbe193741bb40f498f254aeb2e99 + md5sum: f9cf5e375f7de81a406144a2c70cc64d + +- name: cat fastq single-end-same-name + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_single_end_same_name -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config + tags: + - cat + - cat/fastq + files: + - path: ./output/cat/test.merged.fastq.gz + md5sum: 63f817db7a29a03eb538104495556f66 + +- name: cat fastq fastqc_paired_end_same_name + command: nextflow run ./tests/modules/cat/fastq -entry test_cat_fastq_paired_end_same_name -c ./tests/config/nextflow.config -c ./tests/modules/cat/fastq/nextflow.config + tags: + - cat + - cat/fastq + files: + - path: ./output/cat/test_1.merged.fastq.gz + md5sum: 63f817db7a29a03eb538104495556f66 + - path: ./output/cat/test_2.merged.fastq.gz + md5sum: fe9f266f43a6fc3dcab690a18419a56e diff --git a/tests/modules/cellranger/count/main.nf b/tests/modules/cellranger/count/main.nf new file mode 100644 index 00000000..bb9e11d1 --- /dev/null +++ b/tests/modules/cellranger/count/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CELLRANGER_MKGTF } from '../../../../modules/cellranger/mkgtf/main.nf' +include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' +include { CELLRANGER_COUNT } from '../../../../modules/cellranger/count/main.nf' + +workflow test_cellranger_count { + + input = [ [ id:'test', single_end:true, strandedness:'forward', gem: '123', samples: ["test_10x"] ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_10x_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_10x_2_fastq_gz'], checkIfExists: true) + ] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + reference_name = "homo_sapiens_chr22_reference" + + CELLRANGER_MKGTF ( gtf ) + + CELLRANGER_MKREF ( + fasta, + CELLRANGER_MKGTF.out.gtf, + reference_name + ) + + CELLRANGER_COUNT( + input, + CELLRANGER_MKREF.out.reference + ) +} diff --git a/tests/modules/cellranger/count/nextflow.config b/tests/modules/cellranger/count/nextflow.config new file mode 100644 index 00000000..16419fce --- /dev/null +++ b/tests/modules/cellranger/count/nextflow.config @@ -0,0 +1,31 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CELLRANGER_MKGTF { + ext.args = '--attribute=gene_biotype:protein_coding \ + --attribute=gene_biotype:lincRNA \ + --attribute=gene_biotype:antisense \ + --attribute=gene_biotype:IG_LV_gene \ + --attribute=gene_biotype:IG_V_gene \ + --attribute=gene_biotype:IG_V_pseudogene \ + --attribute=gene_biotype:IG_D_gene \ + --attribute=gene_biotype:IG_J_gene \ + --attribute=gene_biotype:IG_J_pseudogene \ + --attribute=gene_biotype:IG_C_gene \ + --attribute=gene_biotype:IG_C_pseudogene \ + --attribute=gene_biotype:TR_V_gene \ + --attribute=gene_biotype:TR_V_pseudogene \ + --attribute=gene_biotype:TR_D_gene \ + --attribute=gene_biotype:TR_J_gene \ + --attribute=gene_biotype:TR_J_pseudogene \ + --attribute=gene_biotype:TR_C_gene' + + + } + + withName: CELLRANGER_COUNT { + ext.args = '--chemistry SC3Pv3' + } + +} diff --git a/tests/modules/cellranger/count/test.yml b/tests/modules/cellranger/count/test.yml new file mode 100644 index 00000000..6b151a2a --- /dev/null +++ b/tests/modules/cellranger/count/test.yml @@ -0,0 +1,19 @@ +- name: cellranger count test_cellranger_count + command: nextflow run tests/modules/cellranger/count -entry test_cellranger_count -c tests/config/nextflow.config -c tests/modules/cellranger/count/nextflow.config + tags: + - cellranger + - cellranger/count + files: + - path: output/cellranger/sample-123/outs/filtered_feature_bc_matrix.h5 + - path: output/cellranger/sample-123/outs/metrics_summary.csv + md5sum: 707df0f101d479d93f412ca74f9c4131 + - path: output/cellranger/sample-123/outs/molecule_info.h5 + md5sum: cf03b2b3ca776a1c37aa3518e91268ba + - path: output/cellranger/sample-123/outs/possorted_genome_bam.bam + md5sum: 15441da9cfceea0bb48c8b66b1b860df + - path: output/cellranger/sample-123/outs/possorted_genome_bam.bam.bai + md5sum: 7c3d49c77016a09535aff61a027f750c + - path: output/cellranger/sample-123/outs/raw_feature_bc_matrix + - path: output/cellranger/sample-123/outs/raw_feature_bc_matrix.h5 + md5sum: 40c8df814eb8723b7317b234dc8222e9 + - path: output/cellranger/sample-123/outs/web_summary.html diff --git a/tests/modules/cellranger/mkfastq/main.nf b/tests/modules/cellranger/mkfastq/main.nf new file mode 100644 index 00000000..5e594fd1 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/main.nf @@ -0,0 +1,26 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' +include { CELLRANGER_MKFASTQ } from '../../../../modules/cellranger/mkfastq/main.nf' + +workflow test_cellranger_mkfastq_simple { + + simple_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-simple-1.2.0.csv", checkIfExists: true) + tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + + UNTAR ( tiny_bcl ) + + CELLRANGER_MKFASTQ ( UNTAR.out.untar, simple_csv) +} + +workflow test_cellranger_mkfastq_illumina { + + samplesheet_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-samplesheet-1.2.0.csv", checkIfExists: true) + tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + + UNTAR ( tiny_bcl ) + + CELLRANGER_MKFASTQ ( UNTAR.out.untar, samplesheet_csv) +} diff --git a/tests/modules/cellranger/mkfastq/nextflow.config b/tests/modules/cellranger/mkfastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cellranger/mkfastq/test.yml b/tests/modules/cellranger/mkfastq/test.yml new file mode 100644 index 00000000..bdd32187 --- /dev/null +++ b/tests/modules/cellranger/mkfastq/test.yml @@ -0,0 +1,13 @@ +- name: cellranger mkfastq test_cellranger_mkfastq_simple + command: nextflow run tests/modules/cellranger/mkfastq -entry test_cellranger_mkfastq_simple -c tests/config/nextflow.config -c ./tests/modules/cellranger/mkfastq/nextflow.config + tags: + - cellranger + - cellranger/mkfastq + # files: + # - path: output/cellranger/genome.filtered.gtf + # md5sum: a8b8a7b5039e05d3a9cf9151ea138b5b +- name: cellranger mkfastq test_cellranger_mkfastq_illumina + command: nextflow run tests/modules/cellranger/mkfastq -entry test_cellranger_mkfastq_illumina -c tests/config/nextflow.config -c ./tests/modules/cellranger/mkfastq/nextflow.config + tags: + - cellranger + - cellranger/mkfastq diff --git a/tests/modules/cellranger/mkgtf/main.nf b/tests/modules/cellranger/mkgtf/main.nf new file mode 100644 index 00000000..19e2cba0 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/main.nf @@ -0,0 +1,11 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CELLRANGER_MKGTF } from '../../../../modules/cellranger/mkgtf/main.nf' + +workflow test_cellranger_mkgtf { + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + + CELLRANGER_MKGTF ( gtf ) +} diff --git a/tests/modules/cellranger/mkgtf/nextflow.config b/tests/modules/cellranger/mkgtf/nextflow.config new file mode 100644 index 00000000..03fd9e09 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/nextflow.config @@ -0,0 +1,27 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CELLRANGER_MKGTF { + ext.args = '--attribute=gene_biotype:protein_coding \ + --attribute=gene_biotype:lincRNA \ + --attribute=gene_biotype:antisense \ + --attribute=gene_biotype:IG_LV_gene \ + --attribute=gene_biotype:IG_V_gene \ + --attribute=gene_biotype:IG_V_pseudogene \ + --attribute=gene_biotype:IG_D_gene \ + --attribute=gene_biotype:IG_J_gene \ + --attribute=gene_biotype:IG_J_pseudogene \ + --attribute=gene_biotype:IG_C_gene \ + --attribute=gene_biotype:IG_C_pseudogene \ + --attribute=gene_biotype:TR_V_gene \ + --attribute=gene_biotype:TR_V_pseudogene \ + --attribute=gene_biotype:TR_D_gene \ + --attribute=gene_biotype:TR_J_gene \ + --attribute=gene_biotype:TR_J_pseudogene \ + --attribute=gene_biotype:TR_C_gene' + + + } + +} diff --git a/tests/modules/cellranger/mkgtf/test.yml b/tests/modules/cellranger/mkgtf/test.yml new file mode 100644 index 00000000..2130afd2 --- /dev/null +++ b/tests/modules/cellranger/mkgtf/test.yml @@ -0,0 +1,8 @@ +- name: cellranger mkgtf test_cellranger_mkgtf + command: nextflow run tests/modules/cellranger/mkgtf -entry test_cellranger_mkgtf -c tests/config/nextflow.config -c tests/modules/cellranger/mkgtf/nextflow.config + tags: + - cellranger + - cellranger/mkgtf + files: + - path: output/cellranger/genome.filtered.gtf + md5sum: a8b8a7b5039e05d3a9cf9151ea138b5b diff --git a/tests/modules/cellranger/mkref/main.nf b/tests/modules/cellranger/mkref/main.nf index b20a68db..ad98ed1a 100644 --- a/tests/modules/cellranger/mkref/main.nf +++ b/tests/modules/cellranger/mkref/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' addParams( options: [:] ) +include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' workflow test_cellranger_mkref { diff --git a/tests/modules/cellranger/mkref/nextflow.config b/tests/modules/cellranger/mkref/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cellranger/mkref/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cellranger/mkref/test.yml b/tests/modules/cellranger/mkref/test.yml index 5e60819e..eb01e9e2 100644 --- a/tests/modules/cellranger/mkref/test.yml +++ b/tests/modules/cellranger/mkref/test.yml @@ -1,5 +1,5 @@ - name: cellranger mkref test_cellranger_mkref - command: nextflow run tests/modules/cellranger/mkref -entry test_cellranger_mkref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cellranger/mkref -entry test_cellranger_mkref -c ./tests/config/nextflow.config -c ./tests/modules/cellranger/mkref/nextflow.config tags: - cellranger - cellranger/mkref diff --git a/tests/modules/checkm/lineagewf/main.nf b/tests/modules/checkm/lineagewf/main.nf index 94309896..e914774c 100644 --- a/tests/modules/checkm/lineagewf/main.nf +++ b/tests/modules/checkm/lineagewf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CHECKM_LINEAGEWF } from '../../../../modules/checkm/lineagewf/main.nf' addParams( options: [:] ) +include { CHECKM_LINEAGEWF } from '../../../../modules/checkm/lineagewf/main.nf' workflow test_checkm_lineagewf { diff --git a/tests/modules/checkm/lineagewf/nextflow.config b/tests/modules/checkm/lineagewf/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/checkm/lineagewf/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/checkm/lineagewf/test.yml b/tests/modules/checkm/lineagewf/test.yml index 768601b0..6749f6aa 100644 --- a/tests/modules/checkm/lineagewf/test.yml +++ b/tests/modules/checkm/lineagewf/test.yml @@ -1,5 +1,5 @@ - name: checkm lineagewf - command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf -c ./tests/config/nextflow.config -c ./tests/modules/checkm/lineagewf/nextflow.config tags: - checkm - checkm/lineagewf @@ -16,7 +16,7 @@ - "UID1" - name: checkm lineagewf_multi - command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf_multi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/checkm/lineagewf -entry test_checkm_lineagewf_multi -c ./tests/config/nextflow.config -c ./tests/modules/checkm/lineagewf/nextflow.config tags: - checkm - checkm/lineagewf diff --git a/tests/modules/chromap/chromap/main.nf b/tests/modules/chromap/chromap/main.nf index a5a1fc86..5522f2b5 100644 --- a/tests/modules/chromap/chromap/main.nf +++ b/tests/modules/chromap/chromap/main.nf @@ -2,19 +2,20 @@ nextflow.enable.dsl = 2 -include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) -include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_BASE } from '../../../../modules/chromap/chromap/main.nf' addParams( options: [:] ) -include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_SAM } from '../../../../modules/chromap/chromap/main.nf' addParams( options: ['args': '--SAM'] ) +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_BASE } from '../../../../modules/chromap/chromap/main.nf' +include { CHROMAP_CHROMAP as CHROMAP_CHROMAP_SAM } from '../../../../modules/chromap/chromap/main.nf' workflow test_chromap_chromap_single_end { // Test single-end and gz compressed output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_BASE ( @@ -31,8 +32,6 @@ workflow test_chromap_chromap_single_end { workflow test_chromap_chromap_paired_end { // Test paired-end and gz compressed output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:false ], // meta map [ @@ -40,6 +39,7 @@ workflow test_chromap_chromap_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_BASE ( @@ -56,8 +56,6 @@ workflow test_chromap_chromap_paired_end { workflow test_chromap_chromap_paired_bam { // Test paired-end and bam output - - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) input = [ [ id:'test', single_end:false ], // meta map [ @@ -65,6 +63,7 @@ workflow test_chromap_chromap_paired_bam { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) CHROMAP_INDEX ( fasta ) CHROMAP_CHROMAP_SAM ( diff --git a/tests/modules/chromap/chromap/nextflow.config b/tests/modules/chromap/chromap/nextflow.config new file mode 100644 index 00000000..1e979bb9 --- /dev/null +++ b/tests/modules/chromap/chromap/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CHROMAP_CHROMAP_SAM { + ext.args = '--SAM' + } + +} diff --git a/tests/modules/chromap/chromap/test.yml b/tests/modules/chromap/chromap/test.yml index b2ce8137..20a51e2b 100644 --- a/tests/modules/chromap/chromap/test.yml +++ b/tests/modules/chromap/chromap/test.yml @@ -1,5 +1,5 @@ - name: chromap chromap test_chromap_chromap_single_end - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_single_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap @@ -10,7 +10,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: chromap chromap test_chromap_chromap_paired_end - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap @@ -21,7 +21,7 @@ md5sum: cafd8fb21977f5ae69e9008b220ab169 - name: chromap chromap test_chromap_chromap_paired_bam - command: nextflow run tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config tags: - chromap/chromap - chromap diff --git a/tests/modules/chromap/index/main.nf b/tests/modules/chromap/index/main.nf index 997baba1..18b42006 100644 --- a/tests/modules/chromap/index/main.nf +++ b/tests/modules/chromap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' addParams( options: [:] ) +include { CHROMAP_INDEX } from '../../../../modules/chromap/index/main.nf' workflow test_chromap_index { diff --git a/tests/modules/chromap/index/nextflow.config b/tests/modules/chromap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/chromap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/chromap/index/test.yml b/tests/modules/chromap/index/test.yml index 0a99a3a0..74cfadfc 100644 --- a/tests/modules/chromap/index/test.yml +++ b/tests/modules/chromap/index/test.yml @@ -1,5 +1,5 @@ - name: chromap index test_chromap_index - command: nextflow run tests/modules/chromap/index -entry test_chromap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/chromap/index -entry test_chromap_index -c ./tests/config/nextflow.config -c ./tests/modules/chromap/index/nextflow.config tags: - chromap/index - chromap diff --git a/tests/modules/clonalframeml/main.nf b/tests/modules/clonalframeml/main.nf new file mode 100644 index 00000000..73773113 --- /dev/null +++ b/tests/modules/clonalframeml/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CLONALFRAMEML } from '../../../modules/clonalframeml/main.nf' + +workflow test_clonalframeml { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_aln_nwk'], checkIfExists: true), + file(params.test_data['haemophilus_influenzae']['genome']['genome_aln_gz'], checkIfExists: true) + ] + + CLONALFRAMEML ( input ) +} diff --git a/tests/modules/clonalframeml/nextflow.config b/tests/modules/clonalframeml/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/clonalframeml/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/clonalframeml/test.yml b/tests/modules/clonalframeml/test.yml new file mode 100644 index 00000000..8ea11d16 --- /dev/null +++ b/tests/modules/clonalframeml/test.yml @@ -0,0 +1,15 @@ +- name: clonalframeml test_clonalframeml + command: nextflow run ./tests/modules/clonalframeml -entry test_clonalframeml -c ./tests/config/nextflow.config -c ./tests/modules/clonalframeml/nextflow.config + tags: + - clonalframeml + files: + - path: output/clonalframeml/test.ML_sequence.fasta + md5sum: 1b75cdaea78f5920ebb92125422a2589 + - path: output/clonalframeml/test.em.txt + md5sum: 5439d59897a9a90390bb175207bf2b9b + - path: output/clonalframeml/test.importation_status.txt + md5sum: 6ce9dbc7746b1c884af042fa02311fba + - path: output/clonalframeml/test.labelled_tree.newick + md5sum: aa47754eea8a3b6bab56bd7c83ba78db + - path: output/clonalframeml/test.position_cross_reference.txt + md5sum: 8ff60768b348fc6f7a1e787aca72f596 diff --git a/tests/modules/cmseq/polymut/main.nf b/tests/modules/cmseq/polymut/main.nf new file mode 100644 index 00000000..df6a0ac1 --- /dev/null +++ b/tests/modules/cmseq/polymut/main.nf @@ -0,0 +1,38 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CMSEQ_POLYMUT } from '../../../../modules/cmseq/polymut/main.nf' + +workflow test_cmseq_polymut_1 { + + input_1 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + [], + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + [] ] + + CMSEQ_POLYMUT( input_1 ) + +} + +workflow test_cmseq_polymut_2 { + input_2 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + [] ] + + CMSEQ_POLYMUT( input_2 ) +} + +workflow test_cmseq_polymut_3 { + input_3 = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), ] + + CMSEQ_POLYMUT( input_3 ) +} + diff --git a/tests/modules/cmseq/polymut/nextflow.config b/tests/modules/cmseq/polymut/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cmseq/polymut/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cmseq/polymut/test.yml b/tests/modules/cmseq/polymut/test.yml new file mode 100644 index 00000000..05887fa8 --- /dev/null +++ b/tests/modules/cmseq/polymut/test.yml @@ -0,0 +1,26 @@ +- name: cmseq polymut test_cmseq_polymut_1 + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_1 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc + +- name: cmseq polymut test_cmseq_polymut_2 + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_2 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc + +- name: cmseq polymut test_cmseq_polymut_3 + command: nextflow run ./tests/modules/cmseq/polymut -entry test_cmseq_polymut_3 -c ./tests/config/nextflow.config -c ./tests/modules/cmseq/polymut/nextflow.config + tags: + - cmseq/polymut + - cmseq + files: + - path: output/cmseq/test.txt + md5sum: fd325c1724ee23d132a9115c64494efc diff --git a/tests/modules/cnvkit/batch/main.nf b/tests/modules/cnvkit/batch/main.nf new file mode 100755 index 00000000..6b40dec6 --- /dev/null +++ b/tests/modules/cnvkit/batch/main.nf @@ -0,0 +1,57 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CNVKIT_BATCH as CNVKIT_HYBRID } from '../../../../modules/cnvkit/batch/main.nf' +include { CNVKIT_BATCH as CNVKIT_WGS } from '../../../../modules/cnvkit/batch/main.nf' +include { CNVKIT_BATCH as CNVKIT_TUMORONLY } from '../../../../modules/cnvkit/batch/main.nf' + +workflow test_cnvkit_hybrid { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) + + CNVKIT_HYBRID ( input, fasta, targets, [] ) +} + +workflow test_cnvkit_wgs { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + CNVKIT_WGS ( input, fasta, [], [] ) +} + +workflow test_cnvkit_cram { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + CNVKIT_WGS ( input, fasta, [], [] ) +} + +workflow test_cnvkit_tumoronly { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + [] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + reference = file(params.test_data['generic']['cnn']['reference'], checkIfExists: true) + + CNVKIT_TUMORONLY ( input, [], [], reference ) +} diff --git a/tests/modules/cnvkit/batch/nextflow.config b/tests/modules/cnvkit/batch/nextflow.config new file mode 100644 index 00000000..b8a8fc3f --- /dev/null +++ b/tests/modules/cnvkit/batch/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CNVKIT_HYBRID { + ext.args = '--output-reference reference.cnn' + } + + withName: CNVKIT_WGS { + ext.args = '--output-reference reference.cnn --method wgs' + } + + withName: CNVKIT_TUMORONLY { + ext.args = '--method wgs' + } + +} diff --git a/tests/modules/cnvkit/batch/test.yml b/tests/modules/cnvkit/batch/test.yml new file mode 100755 index 00000000..57af3603 --- /dev/null +++ b/tests/modules/cnvkit/batch/test.yml @@ -0,0 +1,101 @@ +- name: cnvkit batch test_cnvkit_hybrid + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_hybrid -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/baits.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/baits.target.bed + md5sum: 26d25ff2d6c45b6d92169b3559c6acdb + - path: output/cnvkit/reference.cnn + md5sum: ac99c1ad8b917b96ae15119146c91ab9 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test.paired_end.sorted.call.cns + md5sum: f2ca59b4d50b0c317adc526c1b99b622 + - path: output/cnvkit/test.paired_end.sorted.cnr + md5sum: 7e37d73ab604dbc3fe4ebb56aca9bdc3 + - path: output/cnvkit/test.paired_end.sorted.cns + md5sum: 060af1aa637ed51812af19bcce24fcfe + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: 3fe80b6013ffc3e9968345e810158215 + - path: output/cnvkit/test.single_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.single_end.sorted.targetcoverage.cnn + md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 + +- name: cnvkit batch test_cnvkit_wgs + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_wgs -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/genome.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/genome.bed + md5sum: 87a15eb9c2ff20ccd5cd8735a28708f7 + - path: output/cnvkit/genome.target.bed + md5sum: a13353ae9c8405e701390c069255bbd2 + - path: output/cnvkit/reference.cnn + md5sum: 05c6211e0179885b8a83e44fd21d5f86 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: ff526714696aa49bdc1dc8d00d965266 + - path: output/cnvkit/test2.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test2.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test2.paired_end.sorted.call.cns + md5sum: f6de754c34f780e6befee5b3ff0893f8 + - path: output/cnvkit/test2.paired_end.sorted.cnr + md5sum: 80318d06c6b095945a0fb0e85e887cbc + - path: output/cnvkit/test2.paired_end.sorted.cns + md5sum: 76afa47afc4bd5de35aee8fdb54d3d3a + - path: output/cnvkit/test2.paired_end.sorted.targetcoverage.cnn + md5sum: 6ae6b3fce7299eedca6133d911c38fe1 + +- name: cnvkit batch test_cnvkit_cram + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_cram -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/genome.antitarget.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/genome.bed + md5sum: 87a15eb9c2ff20ccd5cd8735a28708f7 + - path: output/cnvkit/genome.target.bed + md5sum: a13353ae9c8405e701390c069255bbd2 + - path: output/cnvkit/reference.cnn + md5sum: 05c6211e0179885b8a83e44fd21d5f86 + - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn + md5sum: ff526714696aa49bdc1dc8d00d965266 + - path: output/cnvkit/test2.paired_end.sorted.antitargetcoverage.cnn + md5sum: 203caf8cef6935bb50b4138097955cb8 + - path: output/cnvkit/test2.paired_end.sorted.bintest.cns + md5sum: 6544d979475def8a9f69ba42a985668d + - path: output/cnvkit/test2.paired_end.sorted.call.cns + md5sum: f6de754c34f780e6befee5b3ff0893f8 + - path: output/cnvkit/test2.paired_end.sorted.cnr + md5sum: 80318d06c6b095945a0fb0e85e887cbc + - path: output/cnvkit/test2.paired_end.sorted.cns + md5sum: 76afa47afc4bd5de35aee8fdb54d3d3a + - path: output/cnvkit/test2.paired_end.sorted.targetcoverage.cnn + md5sum: 6ae6b3fce7299eedca6133d911c38fe1 + +- name: cnvkit batch test_cnvkit_tumoronly + command: nextflow run ./tests/modules/cnvkit/batch -entry test_cnvkit_tumoronly -c ./tests/config/nextflow.config -c ./tests/modules/cnvkit/batch/nextflow.config + tags: + - cnvkit/batch + - cnvkit + files: + - path: output/cnvkit/reference.antitarget-tmp.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvkit/reference.target-tmp.bed + md5sum: 26d25ff2d6c45b6d92169b3559c6acdb diff --git a/tests/modules/cnvkit/main.nf b/tests/modules/cnvkit/main.nf deleted file mode 100755 index 6ee959ab..00000000 --- a/tests/modules/cnvkit/main.nf +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env nextflow - -nextflow.enable.dsl = 2 - -include { CNVKIT } from '../../../modules/cnvkit/main.nf' addParams( options: [ 'args': '--output-reference reference.cnn' ] ) - -workflow test_cnvkit { - tumourbam = file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - normalbam = file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) - - input = [ [ id:'test' ], // meta map - tumourbam, - normalbam - ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - targets = file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true) - - CNVKIT ( input, fasta, targets ) -} diff --git a/tests/modules/cnvkit/test.yml b/tests/modules/cnvkit/test.yml deleted file mode 100755 index 6e09d6f3..00000000 --- a/tests/modules/cnvkit/test.yml +++ /dev/null @@ -1,27 +0,0 @@ -- name: cnvkit - command: nextflow run ./tests/modules/cnvkit/ -entry test_cnvkit -c tests/config/nextflow.config - tags: - - cnvkit - files: - - path: output/cnvkit/baits.target.bed - md5sum: 26d25ff2d6c45b6d92169b3559c6acdb - - path: output/cnvkit/baits.antitarget.bed - md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/cnvkit/reference.cnn - md5sum: ac99c1ad8b917b96ae15119146c91ab9 - - path: output/cnvkit/test.paired_end.sorted.targetcoverage.cnn - md5sum: 3fe80b6013ffc3e9968345e810158215 - - path: output/cnvkit/test.paired_end.sorted.antitargetcoverage.cnn - md5sum: 203caf8cef6935bb50b4138097955cb8 - - path: output/cnvkit/test.single_end.sorted.targetcoverage.cnn - md5sum: aa8a018b1d4d1e688c9f9f6ae01bf4d7 - - path: output/cnvkit/test.single_end.sorted.antitargetcoverage.cnn - md5sum: 203caf8cef6935bb50b4138097955cb8 - - path: output/cnvkit/test.paired_end.sorted.cnr - md5sum: 7e37d73ab604dbc3fe4ebb56aca9bdc3 - - path: output/cnvkit/test.paired_end.sorted.cns - md5sum: 060af1aa637ed51812af19bcce24fcfe - - path: output/cnvkit/test.paired_end.sorted.bintest.cns - md5sum: 6544d979475def8a9f69ba42a985668d - - path: output/cnvkit/test.paired_end.sorted.call.cns - md5sum: f2ca59b4d50b0c317adc526c1b99b622 diff --git a/tests/modules/cooler/cload/main.nf b/tests/modules/cooler/cload/main.nf new file mode 100644 index 00000000..170b7e11 --- /dev/null +++ b/tests/modules/cooler/cload/main.nf @@ -0,0 +1,52 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_CLOAD } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_CLOAD as COOLER_CLOAD_PAIRS } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_CLOAD as COOLER_CLOAD_TABIX } from '../../../../modules/cooler/cload/main.nf' +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' +include { COOLER_DUMP as COOLER_DUMP_PAIRS} from '../../../../modules/cooler/dump/main.nf' +include { COOLER_DUMP as COOLER_DUMP_TABIX} from '../../../../modules/cooler/dump/main.nf' + +workflow test_cooler_cload_pairix { + + input = [ [ id:'test_pairix', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_pairix_pair_gz'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_pairix_pair_gz_px2'], checkIfExists: true)] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD ( input, bin_size, sizes ) + COOLER_DUMP(COOLER_CLOAD.out.cool.map{[it[0], it[2]]}, []) + +} + +workflow test_cooler_cload_pairs { + + input = [ [ id:'test_pairs', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_pairs_pair'], checkIfExists: true), + []] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD_PAIRS ( input, bin_size, sizes ) + COOLER_DUMP_PAIRS(COOLER_CLOAD_PAIRS.out.cool.map{[it[0], it[2]]}, []) + +} + +workflow test_cooler_cload_tabix { + + input = [ [ id:'test_tabix', single_end:false ], // meta map + file(params.test_data['generic']['cooler']['test_tabix_pair_gz'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_tabix_pair_gz_tbi'], checkIfExists: true)] + + sizes = file(params.test_data['generic']['cooler']['hg19_chrom_sizes'], checkIfExists: true) + bin_size = 2000000 + + COOLER_CLOAD_TABIX ( input, bin_size, sizes ) + COOLER_DUMP_TABIX(COOLER_CLOAD_TABIX.out.cool.map{[it[0], it[2]]}, []) + +} diff --git a/tests/modules/cooler/cload/nextflow.config b/tests/modules/cooler/cload/nextflow.config new file mode 100644 index 00000000..610a5425 --- /dev/null +++ b/tests/modules/cooler/cload/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: COOLER_CLOAD { + ext.args = 'pairix' + } + + withName: COOLER_CLOAD_PAIRS { + ext.args = 'pairs --chrom1 1 --pos1 2 --chrom2 4 --pos2 5 -N' + } + + withName: COOLER_CLOAD_TABIX { + ext.args = 'tabix' + } + +} diff --git a/tests/modules/cooler/cload/test.yml b/tests/modules/cooler/cload/test.yml new file mode 100644 index 00000000..f99f4624 --- /dev/null +++ b/tests/modules/cooler/cload/test.yml @@ -0,0 +1,29 @@ +- name: cooler cload test_cooler_cload_pairix + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_pairix -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_pairix.2000000.cool + - path: output/cooler/test_pairix.bedpe + md5sum: 0cd85311089669688ec17468eae02111 + +- name: cooler cload test_cooler_cload_pairs + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_pairs -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_pairs.2000000.cool + - path: output/cooler/test_pairs.bedpe + md5sum: 7f832733fc7853ebb1937b33e4c1e0de + +- name: cooler cload test_cooler_cload_tabix + command: nextflow run ./tests/modules/cooler/cload -entry test_cooler_cload_tabix -c ./tests/config/nextflow.config -c ./tests/modules/cooler/cload/nextflow.config + tags: + - cooler/cload + - cooler + files: + - path: output/cooler/test_tabix.2000000.cool + - path: output/cooler/test_tabix.bedpe + md5sum: 0cd85311089669688ec17468eae02111 diff --git a/tests/modules/cooler/digest/main.nf b/tests/modules/cooler/digest/main.nf index 817c9081..4dfa25be 100644 --- a/tests/modules/cooler/digest/main.nf +++ b/tests/modules/cooler/digest/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { COOLER_DIGEST } from '../../../../modules/cooler/digest/main.nf' addParams( options: [:] ) +include { COOLER_DIGEST } from '../../../../modules/cooler/digest/main.nf' workflow test_cooler_digest { diff --git a/tests/modules/cooler/digest/nextflow.config b/tests/modules/cooler/digest/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/digest/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/digest/test.yml b/tests/modules/cooler/digest/test.yml index b594a232..80430ed7 100644 --- a/tests/modules/cooler/digest/test.yml +++ b/tests/modules/cooler/digest/test.yml @@ -1,5 +1,5 @@ - name: cooler digest test_cooler_digest - command: nextflow run tests/modules/cooler/digest -entry test_cooler_digest -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/digest -entry test_cooler_digest -c ./tests/config/nextflow.config -c ./tests/modules/cooler/digest/nextflow.config tags: - cooler/digest - cooler diff --git a/tests/modules/cooler/dump/main.nf b/tests/modules/cooler/dump/main.nf index e2a647c5..d80ee0d7 100644 --- a/tests/modules/cooler/dump/main.nf +++ b/tests/modules/cooler/dump/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' addParams( options: [:] ) +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' workflow test_cooler_dump { input = [ [ id:'test' ], // meta map file("https://raw.githubusercontent.com/open2c/cooler/master/tests/data/toy.asymm.16.cool", checkIfExists: true) ] - COOLER_DUMP ( input ) + COOLER_DUMP ( input, [:] ) } diff --git a/tests/modules/cooler/dump/nextflow.config b/tests/modules/cooler/dump/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/dump/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/dump/test.yml b/tests/modules/cooler/dump/test.yml index ccfc5f47..6f81c7a9 100644 --- a/tests/modules/cooler/dump/test.yml +++ b/tests/modules/cooler/dump/test.yml @@ -1,5 +1,5 @@ - name: cooler dump test_cooler_dump - command: nextflow run tests/modules/cooler/dump -entry test_cooler_dump -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cooler/dump -entry test_cooler_dump -c ./tests/config/nextflow.config -c ./tests/modules/cooler/dump/nextflow.config tags: - cooler/dump - cooler diff --git a/tests/modules/cooler/merge/main.nf b/tests/modules/cooler/merge/main.nf new file mode 100644 index 00000000..81336984 --- /dev/null +++ b/tests/modules/cooler/merge/main.nf @@ -0,0 +1,20 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_MERGE } from '../../../../modules/cooler/merge/main.nf' +include { COOLER_DUMP } from '../../../../modules/cooler/dump/main.nf' + +workflow test_cooler_merge { + + input = [ + [ id:'test' ], // meta map + [ + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true), + file(params.test_data['generic']['cooler']['test_merge_cool_cp2'], checkIfExists: true) + ] + ] + + COOLER_MERGE ( input ) + COOLER_DUMP ( COOLER_MERGE.out.cool, "" ) +} diff --git a/tests/modules/cooler/merge/nextflow.config b/tests/modules/cooler/merge/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/cooler/merge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/cooler/merge/test.yml b/tests/modules/cooler/merge/test.yml new file mode 100644 index 00000000..c884ba5e --- /dev/null +++ b/tests/modules/cooler/merge/test.yml @@ -0,0 +1,8 @@ +- name: cooler merge test_cooler_merge + command: nextflow run ./tests/modules/cooler/merge -entry test_cooler_merge -c ./tests/config/nextflow.config -c ./tests/modules/cooler/merge/nextflow.config + tags: + - cooler/merge + - cooler + files: + - path: output/cooler/test.bedpe + md5sum: 0ce5e715bfc4674cdda02f2d7e7e3170 diff --git a/tests/modules/cooler/zoomify/main.nf b/tests/modules/cooler/zoomify/main.nf new file mode 100644 index 00000000..42edadb8 --- /dev/null +++ b/tests/modules/cooler/zoomify/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { COOLER_ZOOMIFY } from '../../../../modules/cooler/zoomify/main.nf' + +workflow test_cooler_zoomify { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['generic']['cooler']['test_merge_cool'], checkIfExists: true) + ] + + COOLER_ZOOMIFY ( input ) +} diff --git a/tests/modules/cooler/zoomify/nextflow.config b/tests/modules/cooler/zoomify/nextflow.config new file mode 100644 index 00000000..d4c3503f --- /dev/null +++ b/tests/modules/cooler/zoomify/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: COOLER_ZOOMIFY { + ext.args = '-r 2,4,8' + } + +} diff --git a/tests/modules/cooler/zoomify/test.yml b/tests/modules/cooler/zoomify/test.yml new file mode 100644 index 00000000..3afdb8a6 --- /dev/null +++ b/tests/modules/cooler/zoomify/test.yml @@ -0,0 +1,8 @@ +- name: cooler zoomify test_cooler_zoomify + command: nextflow run ./tests/modules/cooler/zoomify -entry test_cooler_zoomify -c ./tests/config/nextflow.config -c ./tests/modules/cooler/zoomify/nextflow.config + tags: + - cooler + - cooler/zoomify + files: + - path: output/cooler/test.bedpe + md5sum: 0ce5e715bfc4674cdda02f2d7e7e3170 diff --git a/tests/modules/csvtk/concat/main.nf b/tests/modules/csvtk/concat/main.nf index 22b0205f..aee31679 100644 --- a/tests/modules/csvtk/concat/main.nf +++ b/tests/modules/csvtk/concat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CSVTK_CONCAT } from '../../../../modules/csvtk/concat/main.nf' addParams( options: [:] ) +include { CSVTK_CONCAT } from '../../../../modules/csvtk/concat/main.nf' workflow test_csvtk_concat { diff --git a/tests/modules/csvtk/concat/nextflow.config b/tests/modules/csvtk/concat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/csvtk/concat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/csvtk/concat/test.yml b/tests/modules/csvtk/concat/test.yml index 0fe9c604..11a2af67 100644 --- a/tests/modules/csvtk/concat/test.yml +++ b/tests/modules/csvtk/concat/test.yml @@ -1,5 +1,5 @@ - name: csvtk concat - command: nextflow run ./tests/modules/csvtk/concat -entry test_csvtk_concat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/csvtk/concat -entry test_csvtk_concat -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/concat/nextflow.config tags: - csvtk - csvtk/concat diff --git a/tests/modules/csvtk/split/main.nf b/tests/modules/csvtk/split/main.nf new file mode 100644 index 00000000..31d24d61 --- /dev/null +++ b/tests/modules/csvtk/split/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CSVTK_SPLIT } from '../../../../modules/csvtk/split/main.nf' + +workflow test_csvtk_split_tsv { + + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['generic']['tsv']['test_tsv'], checkIfExists: true) ] + ] + in_format = "tsv" + out_format = "tsv" + CSVTK_SPLIT ( input, in_format, out_format ) +} + +workflow test_csvtk_split_csv { + + input = [ + [ id:'test' ], // meta map + [ file(params.test_data['generic']['csv']['test_csv'], checkIfExists: true) ] + ] + in_format = "csv" + out_format = "csv" + CSVTK_SPLIT( input, in_format, out_format ) +} diff --git a/tests/modules/csvtk/split/nextflow.config b/tests/modules/csvtk/split/nextflow.config new file mode 100644 index 00000000..1dbd7615 --- /dev/null +++ b/tests/modules/csvtk/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CSVTK_SPLIT { + ext.args = "-C \'&\' --fields \'first_name\' " + } + +} diff --git a/tests/modules/csvtk/split/test.yml b/tests/modules/csvtk/split/test.yml new file mode 100644 index 00000000..bd13cca6 --- /dev/null +++ b/tests/modules/csvtk/split/test.yml @@ -0,0 +1,25 @@ +- name: csvtk split test_csvtk_split_tsv + command: nextflow run ./tests/modules/csvtk/split -entry test_csvtk_split_tsv -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/split/nextflow.config + tags: + - csvtk/split + - csvtk + files: + - path: output/csvtk/test-Ken.tsv + md5sum: 589a2add7f0b8e998d4959e5d883e7d5 + - path: output/csvtk/test-Rob.tsv + md5sum: 6c5555d689c4e685d35d6e394ad6e1e6 + - path: output/csvtk/test-Robert.tsv + md5sum: 45ae6da8111096746d1736d34220a3ec + +- name: csvtk split test_csvtk_split_csv + command: nextflow run ./tests/modules/csvtk/split -entry test_csvtk_split_csv -c ./tests/config/nextflow.config -c ./tests/modules/csvtk/split/nextflow.config + tags: + - csvtk/split + - csvtk + files: + - path: output/csvtk/test-Ken.csv + md5sum: 71a931dae6f15f5ddb0318c7d4afe81e + - path: output/csvtk/test-Rob.csv + md5sum: efc4bc507021043a3bf2fb0724c4a216 + - path: output/csvtk/test-Robert.csv + md5sum: 8de2f076e64252c2abed69b9c2a3a386 diff --git a/tests/modules/custom/dumpsoftwareversions/main.nf b/tests/modules/custom/dumpsoftwareversions/main.nf index 020b19bd..95a43a82 100644 --- a/tests/modules/custom/dumpsoftwareversions/main.nf +++ b/tests/modules/custom/dumpsoftwareversions/main.nf @@ -2,23 +2,54 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../../modules/fastqc/main.nf' addParams( options: [:] ) -include { MULTIQC } from '../../../../modules/multiqc/main.nf' addParams( options: [:] ) -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../../../modules/custom/dumpsoftwareversions/main.nf' addParams( options: [publish_dir:'custom'] ) +include { FASTQC } from '../../../../modules/fastqc/main.nf' +include { MULTIQC } from '../../../../modules/multiqc/main.nf' +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../../../../modules/custom/dumpsoftwareversions/main.nf' + +workflow fastqc1 { + take: + input + + main: + FASTQC ( input ) + + emit: + versions = FASTQC.out.versions +} + +workflow fastqc2 { + take: + input + + main: + FASTQC ( input ) + + emit: + versions = FASTQC.out.versions + zip = FASTQC.out.zip +} workflow test_custom_dumpsoftwareversions { input = [ [ id: 'test', single_end: false ], - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] ] - FASTQC ( input ) - MULTIQC ( FASTQC.out.zip.collect { it[1] } ) + // Using subworkflows to ensure that the script can properly handle + // cases where subworkflows have a module with the same name. + fastqc1 ( input ) + fastqc2 ( input ) + MULTIQC ( fastqc2.out.zip.collect { it[1] } ) - ch_software_versions = Channel.empty() - ch_software_versions = ch_software_versions.mix(FASTQC.out.versions) - ch_software_versions = ch_software_versions.mix(MULTIQC.out.versions) + fastqc1 + .out + .versions + .mix(fastqc2.out.versions) + .mix(MULTIQC.out.versions) + .set { ch_software_versions } CUSTOM_DUMPSOFTWAREVERSIONS ( ch_software_versions.collectFile() ) } diff --git a/tests/modules/custom/dumpsoftwareversions/nextflow.config b/tests/modules/custom/dumpsoftwareversions/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/custom/dumpsoftwareversions/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/custom/dumpsoftwareversions/test.yml b/tests/modules/custom/dumpsoftwareversions/test.yml index 1815c0ba..363a1218 100644 --- a/tests/modules/custom/dumpsoftwareversions/test.yml +++ b/tests/modules/custom/dumpsoftwareversions/test.yml @@ -1,8 +1,14 @@ - name: custom dumpsoftwareversions - command: nextflow run ./tests/modules/custom/dumpsoftwareversions -entry test_custom_dumpsoftwareversions -c tests/config/nextflow.config + command: nextflow run ./tests/modules/custom/dumpsoftwareversions -entry test_custom_dumpsoftwareversions -c ./tests/config/nextflow.config -c ./tests/modules/custom/dumpsoftwareversions/nextflow.config tags: - custom - custom/dumpsoftwareversions files: - path: output/custom/software_versions.yml + contains: + - FASTQC + - MULTIQC + must_not_contain: + - fastqc1 + - fastqc2 - path: output/custom/software_versions_mqc.yml diff --git a/tests/modules/custom/getchromsizes/main.nf b/tests/modules/custom/getchromsizes/main.nf new file mode 100644 index 00000000..b4f9fb9f --- /dev/null +++ b/tests/modules/custom/getchromsizes/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CUSTOM_GETCHROMSIZES } from '../../../../modules/custom/getchromsizes/main.nf' + +workflow test_custom_getchromsizes { + + input = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + CUSTOM_GETCHROMSIZES ( input ) +} diff --git a/tests/modules/custom/getchromsizes/nextflow.config b/tests/modules/custom/getchromsizes/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/custom/getchromsizes/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/custom/getchromsizes/test.yml b/tests/modules/custom/getchromsizes/test.yml new file mode 100644 index 00000000..9a770ad4 --- /dev/null +++ b/tests/modules/custom/getchromsizes/test.yml @@ -0,0 +1,10 @@ +- name: custom getchromsizes + command: nextflow run ./tests/modules/custom/getchromsizes -entry test_custom_getchromsizes -c ./tests/config/nextflow.config -c ./tests/modules/custom/getchromsizes/nextflow.config + tags: + - custom + - custom/getchromsizes + files: + - path: output/custom/genome.fasta.fai + md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 + - path: output/custom/genome.fasta.sizes + md5sum: a57c401f27ae5133823fb09fb21c8a3c diff --git a/tests/modules/cutadapt/main.nf b/tests/modules/cutadapt/main.nf index 8e060398..a47feebb 100644 --- a/tests/modules/cutadapt/main.nf +++ b/tests/modules/cutadapt/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { CUTADAPT } from '../../../modules/cutadapt/main.nf' addParams( options: [ args:'-q 25' ] ) +include { CUTADAPT } from '../../../modules/cutadapt/main.nf' // // Test with single-end data diff --git a/tests/modules/cutadapt/nextflow.config b/tests/modules/cutadapt/nextflow.config new file mode 100644 index 00000000..2af532cc --- /dev/null +++ b/tests/modules/cutadapt/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CUTADAPT { + ext.args = '-q 25' + } + +} diff --git a/tests/modules/cutadapt/test.yml b/tests/modules/cutadapt/test.yml index 40710dc5..6fa0eb4f 100644 --- a/tests/modules/cutadapt/test.yml +++ b/tests/modules/cutadapt/test.yml @@ -1,5 +1,5 @@ - name: cutadapt single-end - command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_single_end -c ./tests/config/nextflow.config -c ./tests/modules/cutadapt/nextflow.config tags: - cutadapt files: @@ -7,7 +7,7 @@ - path: ./output/cutadapt/test.trim.fastq.gz - name: cutadapt paired-end - command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/cutadapt -entry test_cutadapt_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/cutadapt/nextflow.config tags: - cutadapt files: diff --git a/tests/modules/damageprofiler/main.nf b/tests/modules/damageprofiler/main.nf index 36ae7b24..9207caf1 100644 --- a/tests/modules/damageprofiler/main.nf +++ b/tests/modules/damageprofiler/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DAMAGEPROFILER } from '../../../modules/damageprofiler/main.nf' addParams( options: [:] ) +include { DAMAGEPROFILER } from '../../../modules/damageprofiler/main.nf' workflow test_damageprofiler { diff --git a/tests/modules/damageprofiler/nextflow.config b/tests/modules/damageprofiler/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/damageprofiler/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/damageprofiler/test.yml b/tests/modules/damageprofiler/test.yml index 9ef964dc..4a560ce1 100644 --- a/tests/modules/damageprofiler/test.yml +++ b/tests/modules/damageprofiler/test.yml @@ -1,5 +1,5 @@ - name: damageprofiler - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: @@ -36,7 +36,7 @@ md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c - name: damageprofiler_reference - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_reference -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_reference -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: @@ -73,7 +73,7 @@ md5sum: bec0c5fc2fa9c82b04949e2d8b6e979c - name: damageprofiler_specieslist - command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_specieslist -c tests/config/nextflow.config -dump-channels + command: nextflow run ./tests/modules/damageprofiler -entry test_damageprofiler_specieslist -c ./tests/config/nextflow.config -dump-channels -c ./tests/modules/damageprofiler/nextflow.config tags: - damageprofiler files: diff --git a/tests/modules/dastool/dastool/main.nf b/tests/modules/dastool/dastool/main.nf new file mode 100644 index 00000000..f6f6becf --- /dev/null +++ b/tests/modules/dastool/dastool/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' +include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' + +workflow test_dastool_dastool { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") + + Channel.of([ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)]) + .join(DASTOOL_SCAFFOLDS2BIN.out.scaffolds2bin) + .set {input_dastool} + + + DASTOOL_DASTOOL ( input_dastool, [], [], [] ) +} diff --git a/tests/modules/dastool/dastool/nextflow.config b/tests/modules/dastool/dastool/nextflow.config new file mode 100644 index 00000000..e306b4b4 --- /dev/null +++ b/tests/modules/dastool/dastool/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + + withName: DASTOOL_DASTOOL { + ext.args = '--score_threshold 0 --debug' + } + +} diff --git a/tests/modules/dastool/dastool/test.yml b/tests/modules/dastool/dastool/test.yml new file mode 100644 index 00000000..e2161890 --- /dev/null +++ b/tests/modules/dastool/dastool/test.yml @@ -0,0 +1,29 @@ +- name: dastool dastool test_dastool_dastool + command: nextflow run ./tests/modules/dastool/dastool -entry test_dastool_dastool -c ./tests/config/nextflow.config -c ./tests/modules/dastool/dastool/nextflow.config + tags: + - dastool + - dastool/dastool + files: + - path: output/dastool/test.seqlength + md5sum: b815a5811008c36808a59b1d0dcfab24 + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/test_DASTool.log + contains: + - 'DAS Tool run on' + - path: output/dastool/test_DASTool_scaffolds2bin.txt + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/test_DASTool_summary.txt + md5sum: a3efa8717b30dfada78dc5ae9a3dc396 + - path: output/dastool/test_proteins.faa.archaea.scg + md5sum: e79d82eecee25821d1658ea4f082601d + - path: output/dastool/test_proteins.faa.bacteria.scg + md5sum: 8132cfb17cf398d41c036ead55c96ffe + - path: output/dastool/test_test.tsv.eval + md5sum: a3efa8717b30dfada78dc5ae9a3dc396 + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 2b297bf557cc3831b800348859331268 + - path: output/metabat2/test.tsv.gz + md5sum: 619338fa5019e361d5545ce385a6961f + - path: output/metabat2/test.txt.gz + md5sum: 745a0446af6ef68b930975e9ce5a95d6 diff --git a/tests/modules/dastool/scaffolds2bin/main.nf b/tests/modules/dastool/scaffolds2bin/main.nf new file mode 100644 index 00000000..a0cd6726 --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' + +workflow test_dastool_scaffolds2bin { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") +} \ No newline at end of file diff --git a/tests/modules/dastool/scaffolds2bin/nextflow.config b/tests/modules/dastool/scaffolds2bin/nextflow.config new file mode 100644 index 00000000..83754d8b --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + +} diff --git a/tests/modules/dastool/scaffolds2bin/test.yml b/tests/modules/dastool/scaffolds2bin/test.yml new file mode 100644 index 00000000..26f528c9 --- /dev/null +++ b/tests/modules/dastool/scaffolds2bin/test.yml @@ -0,0 +1,14 @@ +- name: dastool scaffolds2bin test_dastool_scaffolds2bin + command: nextflow run ./tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c ./tests/config/nextflow.config -c ./tests/modules/dastool/scaffolds2bin/nextflow.config + tags: + - dastool + - dastool/scaffolds2bin + files: + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 2b297bf557cc3831b800348859331268 + - path: output/metabat2/test.tsv.gz + md5sum: 619338fa5019e361d5545ce385a6961f + - path: output/metabat2/test.txt.gz + md5sum: 745a0446af6ef68b930975e9ce5a95d6 diff --git a/tests/modules/dedup/main.nf b/tests/modules/dedup/main.nf index 37e8e5c2..4a397eaa 100644 --- a/tests/modules/dedup/main.nf +++ b/tests/modules/dedup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEDUP } from '../../../modules/dedup/main.nf' addParams( options: [args: "-m"] ) +include { DEDUP } from '../../../modules/dedup/main.nf' workflow test_dedup { diff --git a/tests/modules/dedup/nextflow.config b/tests/modules/dedup/nextflow.config new file mode 100644 index 00000000..80a42463 --- /dev/null +++ b/tests/modules/dedup/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DEDUP { + ext.args = '-m' + } + +} diff --git a/tests/modules/dedup/test.yml b/tests/modules/dedup/test.yml index b35cfafd..077aac0d 100644 --- a/tests/modules/dedup/test.yml +++ b/tests/modules/dedup/test.yml @@ -1,5 +1,5 @@ - name: dedup test_dedup - command: nextflow run tests/modules/dedup -entry test_dedup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dedup -entry test_dedup -c ./tests/config/nextflow.config -c ./tests/modules/dedup/nextflow.config tags: - dedup files: diff --git a/tests/modules/deeptools/computematrix/main.nf b/tests/modules/deeptools/computematrix/main.nf index 116bc851..35e49f59 100644 --- a/tests/modules/deeptools/computematrix/main.nf +++ b/tests/modules/deeptools/computematrix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_COMPUTEMATRIX } from '../../../../modules/deeptools/computematrix/main.nf' addParams( options: ['args' : 'scale-regions -b 1000'] ) +include { DEEPTOOLS_COMPUTEMATRIX } from '../../../../modules/deeptools/computematrix/main.nf' workflow test_deeptools_computematrix { diff --git a/tests/modules/deeptools/computematrix/nextflow.config b/tests/modules/deeptools/computematrix/nextflow.config new file mode 100644 index 00000000..285b2165 --- /dev/null +++ b/tests/modules/deeptools/computematrix/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DEEPTOOLS_COMPUTEMATRIX { + ext.args = 'scale-regions -b 1000' + } + +} diff --git a/tests/modules/deeptools/computematrix/test.yml b/tests/modules/deeptools/computematrix/test.yml index fb2fa9e1..88657de3 100644 --- a/tests/modules/deeptools/computematrix/test.yml +++ b/tests/modules/deeptools/computematrix/test.yml @@ -1,5 +1,5 @@ - name: deeptools computematrix - command: nextflow run tests/modules/deeptools/computematrix -entry test_deeptools_computematrix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/computematrix -entry test_deeptools_computematrix -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/computematrix/nextflow.config tags: - deeptools - deeptools/computematrix diff --git a/tests/modules/deeptools/plotfingerprint/main.nf b/tests/modules/deeptools/plotfingerprint/main.nf index e84adc39..bcef970e 100644 --- a/tests/modules/deeptools/plotfingerprint/main.nf +++ b/tests/modules/deeptools/plotfingerprint/main.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl = 2 params.fragment_size = 1000 -include { DEEPTOOLS_PLOTFINGERPRINT } from '../../../../modules/deeptools/plotfingerprint/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTFINGERPRINT } from '../../../../modules/deeptools/plotfingerprint/main.nf' workflow test_deeptools_plotfingerprint { diff --git a/tests/modules/deeptools/plotfingerprint/nextflow.config b/tests/modules/deeptools/plotfingerprint/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotfingerprint/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotfingerprint/test.yml b/tests/modules/deeptools/plotfingerprint/test.yml index b7803a6e..11d4ae7b 100644 --- a/tests/modules/deeptools/plotfingerprint/test.yml +++ b/tests/modules/deeptools/plotfingerprint/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotfingerprint - command: nextflow run tests/modules/deeptools/plotfingerprint -entry test_deeptools_plotfingerprint -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotfingerprint -entry test_deeptools_plotfingerprint -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotfingerprint/nextflow.config tags: - deeptools - deeptools/plotfingerprint diff --git a/tests/modules/deeptools/plotheatmap/main.nf b/tests/modules/deeptools/plotheatmap/main.nf index 93e7d373..86005b2c 100644 --- a/tests/modules/deeptools/plotheatmap/main.nf +++ b/tests/modules/deeptools/plotheatmap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_PLOTHEATMAP } from '../../../../modules/deeptools/plotheatmap/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTHEATMAP } from '../../../../modules/deeptools/plotheatmap/main.nf' workflow test_deeptools_plotheatmap { diff --git a/tests/modules/deeptools/plotheatmap/nextflow.config b/tests/modules/deeptools/plotheatmap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotheatmap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotheatmap/test.yml b/tests/modules/deeptools/plotheatmap/test.yml index 641d5121..9273f840 100644 --- a/tests/modules/deeptools/plotheatmap/test.yml +++ b/tests/modules/deeptools/plotheatmap/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotheatmap - command: nextflow run tests/modules/deeptools/plotheatmap -entry test_deeptools_plotheatmap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotheatmap -entry test_deeptools_plotheatmap -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotheatmap/nextflow.config tags: - deeptools - deeptools/plotheatmap diff --git a/tests/modules/deeptools/plotprofile/main.nf b/tests/modules/deeptools/plotprofile/main.nf index ac91f0c5..63ee47cd 100644 --- a/tests/modules/deeptools/plotprofile/main.nf +++ b/tests/modules/deeptools/plotprofile/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DEEPTOOLS_PLOTPROFILE } from '../../../../modules/deeptools/plotprofile/main.nf' addParams( options: [:] ) +include { DEEPTOOLS_PLOTPROFILE } from '../../../../modules/deeptools/plotprofile/main.nf' workflow test_deeptools_plotprofile { diff --git a/tests/modules/deeptools/plotprofile/nextflow.config b/tests/modules/deeptools/plotprofile/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/deeptools/plotprofile/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/deeptools/plotprofile/test.yml b/tests/modules/deeptools/plotprofile/test.yml index efe02ce5..4b6c5b9a 100644 --- a/tests/modules/deeptools/plotprofile/test.yml +++ b/tests/modules/deeptools/plotprofile/test.yml @@ -1,5 +1,5 @@ - name: deeptools plotprofile - command: nextflow run tests/modules/deeptools/plotprofile -entry test_deeptools_plotprofile -c tests/config/nextflow.config + command: nextflow run ./tests/modules/deeptools/plotprofile -entry test_deeptools_plotprofile -c ./tests/config/nextflow.config -c ./tests/modules/deeptools/plotprofile/nextflow.config tags: - deeptools - deeptools/plotprofile diff --git a/tests/modules/delly/call/main.nf b/tests/modules/delly/call/main.nf index f41dda95..f4583e05 100644 --- a/tests/modules/delly/call/main.nf +++ b/tests/modules/delly/call/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DELLY_CALL } from '../../../../modules/delly/call/main.nf' addParams( options: [:] ) +include { DELLY_CALL } from '../../../../modules/delly/call/main.nf' workflow test_delly_call { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/delly/call/nextflow.config b/tests/modules/delly/call/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/delly/call/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/delly/call/test.yml b/tests/modules/delly/call/test.yml index d8750892..a770d213 100644 --- a/tests/modules/delly/call/test.yml +++ b/tests/modules/delly/call/test.yml @@ -1,10 +1,9 @@ - name: delly call test_delly_call - command: nextflow run tests/modules/delly/call -entry test_delly_call -c tests/config/nextflow.config + command: nextflow run ./tests/modules/delly/call -entry test_delly_call -c ./tests/config/nextflow.config -c ./tests/modules/delly/call/nextflow.config tags: - delly - delly/call files: - path: output/delly/test.bcf - md5sum: 360c1bf6867f33bd2a868ddfb4d957fc - path: output/delly/test.bcf.csi md5sum: 19e0cdf06c415f4942f6d4dbd5fb7271 diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf index ab131a86..87d05bf9 100644 --- a/tests/modules/diamond/blastp/main.nf +++ b/tests/modules/diamond/blastp/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) -include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' addParams( options: [ suffix: '.diamond_blastp' ] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' +include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' workflow test_diamond_blastp { diff --git a/tests/modules/diamond/blastp/nextflow.config b/tests/modules/diamond/blastp/nextflow.config new file mode 100644 index 00000000..5a9aacad --- /dev/null +++ b/tests/modules/diamond/blastp/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DIAMOND_BLASTP { + ext.prefix = { "${meta.id}.diamond_blastp" } + } + +} diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index ae62ea51..673563cb 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -1,5 +1,5 @@ - name: diamond blastp - command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastp/nextflow.config tags: - diamond - diamond/blastp diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index c0e437d7..77eb08ea 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) -include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' addParams( options: [ suffix: '.diamond_blastx' ] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' +include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' workflow test_diamond_blastx { diff --git a/tests/modules/diamond/blastx/nextflow.config b/tests/modules/diamond/blastx/nextflow.config new file mode 100644 index 00000000..25320af3 --- /dev/null +++ b/tests/modules/diamond/blastx/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DIAMOND_BLASTX { + ext.prefix = { "${meta.id}.diamond_blastx" } + } + +} diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml index 91a6eb4f..ee94802f 100644 --- a/tests/modules/diamond/blastx/test.yml +++ b/tests/modules/diamond/blastx/test.yml @@ -1,5 +1,5 @@ - name: diamond blastx - command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastx/nextflow.config tags: - diamond - diamond/blastx diff --git a/tests/modules/diamond/makedb/main.nf b/tests/modules/diamond/makedb/main.nf index bcd7691e..70982ae9 100644 --- a/tests/modules/diamond/makedb/main.nf +++ b/tests/modules/diamond/makedb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' addParams( options: [:] ) +include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' workflow test_diamond_makedb { diff --git a/tests/modules/diamond/makedb/nextflow.config b/tests/modules/diamond/makedb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/diamond/makedb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/diamond/makedb/test.yml b/tests/modules/diamond/makedb/test.yml index 335b571f..c8f2d79e 100644 --- a/tests/modules/diamond/makedb/test.yml +++ b/tests/modules/diamond/makedb/test.yml @@ -1,5 +1,5 @@ - name: diamond makedb test_diamond_makedb - command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c ./tests/config/nextflow.config -c ./tests/modules/diamond/makedb/nextflow.config tags: - diamond - diamond/makedb diff --git a/tests/modules/dragmap/align/main.nf b/tests/modules/dragmap/align/main.nf new file mode 100644 index 00000000..4376602c --- /dev/null +++ b/tests/modules/dragmap/align/main.nf @@ -0,0 +1,60 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' +include { DRAGMAP_ALIGN } from '../../../../modules/dragmap/align/main.nf' + +workflow test_dragmap_align_single_end { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, false ) +} + +workflow test_dragmap_align_single_end_sort { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, true ) +} + +workflow test_dragmap_align_paired_end { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, false ) +} + +workflow test_dragmap_align_paired_end_sort { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) + DRAGMAP_ALIGN ( input, DRAGMAP_HASHTABLE.out.hashmap, true ) +} diff --git a/tests/modules/dragmap/align/nextflow.config b/tests/modules/dragmap/align/nextflow.config new file mode 100644 index 00000000..b968c357 --- /dev/null +++ b/tests/modules/dragmap/align/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DRAGMAP_ALIGN { + ext.args2 = { sort_bam ? "" : "-bh" } + } + +} diff --git a/tests/modules/dragmap/align/test.yml b/tests/modules/dragmap/align/test.yml new file mode 100644 index 00000000..b0196e55 --- /dev/null +++ b/tests/modules/dragmap/align/test.yml @@ -0,0 +1,35 @@ +- name: dragmap align single-end + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log + +- name: dragmap align single-end_sort + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_single_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log + +- name: dragmap align paired-end + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log + +- name: dragmap align paired-end_sort + command: nextflow run ./tests/modules/dragmap/align -entry test_dragmap_align_paired_end_sort -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/align/nextflow.config + tags: + - dragmap + - dragmap/align + files: + - path: output/dragmap/test.bam + - path: output/dragmap/test.dragmap.log diff --git a/tests/modules/dragmap/hashtable/main.nf b/tests/modules/dragmap/hashtable/main.nf new file mode 100644 index 00000000..91b43caa --- /dev/null +++ b/tests/modules/dragmap/hashtable/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { DRAGMAP_HASHTABLE } from '../../../../modules/dragmap/hashtable/main.nf' + +workflow test_dragmap_hashtable { + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + DRAGMAP_HASHTABLE ( fasta ) +} + +// TODO Add test using alt-masked bed file +// https://github.com/Illumina/dragmap#build-hash-table-using-an-alt-masked-bed-file diff --git a/tests/modules/dragmap/hashtable/nextflow.config b/tests/modules/dragmap/hashtable/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/dragmap/hashtable/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/dragmap/hashtable/test.yml b/tests/modules/dragmap/hashtable/test.yml new file mode 100644 index 00000000..59a3ed55 --- /dev/null +++ b/tests/modules/dragmap/hashtable/test.yml @@ -0,0 +1,19 @@ +- name: dragmap hashtable + command: nextflow run ./tests/modules/dragmap/hashtable -entry test_dragmap_hashtable -c ./tests/config/nextflow.config -c ./tests/modules/dragmap/hashtable/nextflow.config + tags: + - dragmap + - dragmap/hashtable + files: + - path: output/dragmap/dragmap/hash_table.cfg + - path: output/dragmap/dragmap/hash_table.cfg.bin + - path: output/dragmap/dragmap/hash_table.cmp + md5sum: bc210e5358fd65656f9aea297b59ec7d + - path: output/dragmap/dragmap/hash_table_stats.txt + - path: output/dragmap/dragmap/reference.bin + md5sum: b6b5c12a42416b990cd2844de8f33c5d + - path: output/dragmap/dragmap/ref_index.bin + md5sum: 8470be9566ecee77eb4aea6a38922a66 + - path: output/dragmap/dragmap/repeat_mask.bin + md5sum: 2439259a2fd32a1d0f4c53d585f3da3a + - path: output/dragmap/dragmap/str_table.bin + md5sum: 302e2b30993973527e69c6bcd1f093d0 diff --git a/tests/modules/dragonflye/main.nf b/tests/modules/dragonflye/main.nf index 4d3ac6e5..3d59bb21 100644 --- a/tests/modules/dragonflye/main.nf +++ b/tests/modules/dragonflye/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { DRAGONFLYE } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler miniasm --gsize 5000000'] ) -include { DRAGONFLYE as DRAGONFLYE_RAVEN } from '../../../modules/dragonflye/main.nf' addParams( options: [args: '--assembler raven --gsize 5000000'] ) +include { DRAGONFLYE } from '../../../modules/dragonflye/main.nf' +include { DRAGONFLYE as DRAGONFLYE_RAVEN } from '../../../modules/dragonflye/main.nf' workflow test_dragonflye { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/dragonflye/nextflow.config b/tests/modules/dragonflye/nextflow.config new file mode 100644 index 00000000..fea43da4 --- /dev/null +++ b/tests/modules/dragonflye/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DRAGONFLYE { + ext.args = '--assembler miniasm --gsize 5000000' + } + + withName: DRAGONFLYE_RAVEN { + ext.args = '--assembler raven --gsize 5000000' + } + +} diff --git a/tests/modules/dragonflye/test.yml b/tests/modules/dragonflye/test.yml index fe6283c0..ef9121ba 100644 --- a/tests/modules/dragonflye/test.yml +++ b/tests/modules/dragonflye/test.yml @@ -1,5 +1,5 @@ - name: dragonflye with miniasm - command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye -c ./tests/config/nextflow.config -c ./tests/modules/dragonflye/nextflow.config tags: - dragonflye files: @@ -12,7 +12,7 @@ - path: output/dragonflye/dragonflye.log - name: dragonflye with raven - command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye_raven -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dragonflye -entry test_dragonflye_raven -c ./tests/config/nextflow.config -c ./tests/modules/dragonflye/nextflow.config tags: - dragonflye files: diff --git a/tests/modules/dshbio/exportsegments/main.nf b/tests/modules/dshbio/exportsegments/main.nf index 6eef1046..c213dc54 100644 --- a/tests/modules/dshbio/exportsegments/main.nf +++ b/tests/modules/dshbio/exportsegments/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_EXPORTSEGMENTS } from '../../../../modules/dshbio/exportsegments/main.nf' addParams( options: [:] ) +include { DSHBIO_EXPORTSEGMENTS } from '../../../../modules/dshbio/exportsegments/main.nf' workflow test_dshbio_exportsegments { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/exportsegments/nextflow.config b/tests/modules/dshbio/exportsegments/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/dshbio/exportsegments/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/dshbio/exportsegments/test.yml b/tests/modules/dshbio/exportsegments/test.yml index 453e1cba..c811df03 100644 --- a/tests/modules/dshbio/exportsegments/test.yml +++ b/tests/modules/dshbio/exportsegments/test.yml @@ -1,5 +1,5 @@ - name: dshbio exportsegments - command: nextflow run ./tests/modules/dshbio/exportsegments -entry test_dshbio_exportsegments -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/exportsegments -entry test_dshbio_exportsegments -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/exportsegments/nextflow.config tags: - dshbio - dshbio/exportsegments diff --git a/tests/modules/dshbio/filterbed/main.nf b/tests/modules/dshbio/filterbed/main.nf index 722c88d2..454a03be 100644 --- a/tests/modules/dshbio/filterbed/main.nf +++ b/tests/modules/dshbio/filterbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_FILTERBED } from '../../../../modules/dshbio/filterbed/main.nf' addParams( options: [suffix: '.filtered', args: '--range chr1:0-1000'] ) +include { DSHBIO_FILTERBED } from '../../../../modules/dshbio/filterbed/main.nf' workflow test_dshbio_filterbed { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/filterbed/nextflow.config b/tests/modules/dshbio/filterbed/nextflow.config new file mode 100644 index 00000000..3937a184 --- /dev/null +++ b/tests/modules/dshbio/filterbed/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_FILTERBED { + ext.args = '--range chr1:0-1000' + ext.prefix = { "${meta.id}.filtered" } + } +} diff --git a/tests/modules/dshbio/filterbed/test.yml b/tests/modules/dshbio/filterbed/test.yml index ad1cde66..278fd5a3 100644 --- a/tests/modules/dshbio/filterbed/test.yml +++ b/tests/modules/dshbio/filterbed/test.yml @@ -1,5 +1,5 @@ - name: dshbio filterbed - command: nextflow run ./tests/modules/dshbio/filterbed -entry test_dshbio_filterbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/filterbed -entry test_dshbio_filterbed -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/filterbed/nextflow.config tags: - dshbio - dshbio/filterbed diff --git a/tests/modules/dshbio/filtergff3/main.nf b/tests/modules/dshbio/filtergff3/main.nf index 3156d091..7c803781 100644 --- a/tests/modules/dshbio/filtergff3/main.nf +++ b/tests/modules/dshbio/filtergff3/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_FILTERGFF3 } from '../../../../modules/dshbio/filtergff3/main.nf' addParams( options: [suffix: '.filtered', args: '--range MT192765.1:0-1000'] ) +include { DSHBIO_FILTERGFF3 } from '../../../../modules/dshbio/filtergff3/main.nf' workflow test_dshbio_filtergff3 { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/filtergff3/nextflow.config b/tests/modules/dshbio/filtergff3/nextflow.config new file mode 100644 index 00000000..80dcd28c --- /dev/null +++ b/tests/modules/dshbio/filtergff3/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_FILTERGFF3 { + ext.args = '--range MT192765.1:0-1000' + ext.prefix = { "${meta.id}.filtered" } + } + +} diff --git a/tests/modules/dshbio/filtergff3/test.yml b/tests/modules/dshbio/filtergff3/test.yml index 95d1b446..43238333 100644 --- a/tests/modules/dshbio/filtergff3/test.yml +++ b/tests/modules/dshbio/filtergff3/test.yml @@ -1,5 +1,5 @@ - name: dshbio filtergff3 - command: nextflow run ./tests/modules/dshbio/filtergff3 -entry test_dshbio_filtergff3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/filtergff3 -entry test_dshbio_filtergff3 -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/filtergff3/nextflow.config tags: - dshbio - dshbio/filtergff3 diff --git a/tests/modules/dshbio/splitbed/main.nf b/tests/modules/dshbio/splitbed/main.nf index d7f3d004..517baad0 100644 --- a/tests/modules/dshbio/splitbed/main.nf +++ b/tests/modules/dshbio/splitbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_SPLITBED } from '../../../../modules/dshbio/splitbed/main.nf' addParams( options: [suffix: '.', args: '--records 2'] ) +include { DSHBIO_SPLITBED } from '../../../../modules/dshbio/splitbed/main.nf' workflow test_dshbio_splitbed { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/splitbed/nextflow.config b/tests/modules/dshbio/splitbed/nextflow.config new file mode 100644 index 00000000..ad9c045b --- /dev/null +++ b/tests/modules/dshbio/splitbed/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_SPLITBED { + ext.prefix = { "${meta.id}." } + ext.args = '--records 2' + } + +} diff --git a/tests/modules/dshbio/splitbed/test.yml b/tests/modules/dshbio/splitbed/test.yml index 04f5b150..ab14648e 100644 --- a/tests/modules/dshbio/splitbed/test.yml +++ b/tests/modules/dshbio/splitbed/test.yml @@ -1,5 +1,5 @@ - name: dshbio splitbed - command: nextflow run ./tests/modules/dshbio/splitbed -entry test_dshbio_splitbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/splitbed -entry test_dshbio_splitbed -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/splitbed/nextflow.config tags: - dshbio - dshbio/splitbed diff --git a/tests/modules/dshbio/splitgff3/main.nf b/tests/modules/dshbio/splitgff3/main.nf index dd58201a..03aa5394 100644 --- a/tests/modules/dshbio/splitgff3/main.nf +++ b/tests/modules/dshbio/splitgff3/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { DSHBIO_SPLITGFF3 } from '../../../../modules/dshbio/splitgff3/main.nf' addParams( options: [suffix: '.', args: '--records 15'] ) +include { DSHBIO_SPLITGFF3 } from '../../../../modules/dshbio/splitgff3/main.nf' workflow test_dshbio_splitgff3 { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/dshbio/splitgff3/nextflow.config b/tests/modules/dshbio/splitgff3/nextflow.config new file mode 100644 index 00000000..f6a0b921 --- /dev/null +++ b/tests/modules/dshbio/splitgff3/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: DSHBIO_SPLITGFF3 { + ext.prefix = { "${meta.id}." } + ext.args = '--records 15' + } + +} diff --git a/tests/modules/dshbio/splitgff3/test.yml b/tests/modules/dshbio/splitgff3/test.yml index fe5b1bed..6087ce11 100644 --- a/tests/modules/dshbio/splitgff3/test.yml +++ b/tests/modules/dshbio/splitgff3/test.yml @@ -1,5 +1,5 @@ - name: dshbio splitgff3 - command: nextflow run ./tests/modules/dshbio/splitgff3 -entry test_dshbio_splitgff3 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/dshbio/splitgff3 -entry test_dshbio_splitgff3 -c ./tests/config/nextflow.config -c ./tests/modules/dshbio/splitgff3/nextflow.config tags: - dshbio - dshbio/splitgff3 diff --git a/tests/modules/ectyper/main.nf b/tests/modules/ectyper/main.nf new file mode 100644 index 00000000..dd359fa2 --- /dev/null +++ b/tests/modules/ectyper/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ECTYPER } from '../../../modules/ectyper/main.nf' + +workflow test_ectyper { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + + ECTYPER ( input ) +} diff --git a/tests/modules/ectyper/nextflow.config b/tests/modules/ectyper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ectyper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ectyper/test.yml b/tests/modules/ectyper/test.yml new file mode 100644 index 00000000..4f909bd9 --- /dev/null +++ b/tests/modules/ectyper/test.yml @@ -0,0 +1,11 @@ +- name: ectyper test_ectyper + command: nextflow run ./tests/modules/ectyper -entry test_ectyper -c ./tests/config/nextflow.config -c ./tests/modules/ectyper/nextflow.config + tags: + - ectyper + files: + - path: output/ectyper/blast_output_alleles.txt + md5sum: 27f3f5e84f7da451b2948d61589cdb06 + - path: output/ectyper/ectyper.log + contains: ['Serotype', 'RefSeq', 'O-type', 'finished'] + - path: output/ectyper/test.tsv + md5sum: ba923d7c7ee7d1047466aafc9a9df208 diff --git a/tests/modules/emmtyper/main.nf b/tests/modules/emmtyper/main.nf new file mode 100644 index 00000000..ee96fc32 --- /dev/null +++ b/tests/modules/emmtyper/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { EMMTYPER } from '../../../modules/emmtyper/main.nf' + +workflow test_emmtyper { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + EMMTYPER ( input ) +} diff --git a/tests/modules/emmtyper/nextflow.config b/tests/modules/emmtyper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/emmtyper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/emmtyper/test.yml b/tests/modules/emmtyper/test.yml new file mode 100644 index 00000000..81854eb6 --- /dev/null +++ b/tests/modules/emmtyper/test.yml @@ -0,0 +1,7 @@ +- name: emmtyper test_emmtyper + command: nextflow run ./tests/modules/emmtyper -entry test_emmtyper -c ./tests/config/nextflow.config -c ./tests/modules/emmtyper/nextflow.config + tags: + - emmtyper + files: + - path: output/emmtyper/test.tsv + md5sum: c727ba859adec9ca8ff0e091ecf79c62 diff --git a/tests/modules/ensemblvep/main.nf b/tests/modules/ensemblvep/main.nf index 3cbb26f1..223847c7 100644 --- a/tests/modules/ensemblvep/main.nf +++ b/tests/modules/ensemblvep/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { ENSEMBLVEP } from '../../../modules/ensemblvep/main.nf' addParams( vep_tag: '104.3.WBcel235', use_cache: false ) +include { ENSEMBLVEP } from '../../../modules/ensemblvep/main.nf' workflow test_ensemblvep { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + ENSEMBLVEP ( input, "WBcel235", "caenorhabditis_elegans", "104", [] ) } diff --git a/tests/modules/ensemblvep/nextflow.config b/tests/modules/ensemblvep/nextflow.config new file mode 100644 index 00000000..f13d62e9 --- /dev/null +++ b/tests/modules/ensemblvep/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ENSEMBLVEP { + container = 'nfcore/vep:104.3.WBcel235' + } + +} diff --git a/tests/modules/ensemblvep/test.yml b/tests/modules/ensemblvep/test.yml index a6e33cae..42384d6e 100644 --- a/tests/modules/ensemblvep/test.yml +++ b/tests/modules/ensemblvep/test.yml @@ -1,5 +1,5 @@ - name: ensemblvep test_ensemblvep - command: nextflow run tests/modules/ensemblvep -entry test_ensemblvep -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ensemblvep -entry test_ensemblvep -c ./tests/config/nextflow.config -c ./tests/modules/ensemblvep/nextflow.config tags: - ensemblvep files: diff --git a/tests/modules/expansionhunter/main.nf b/tests/modules/expansionhunter/main.nf index a7acbff4..91faeeb8 100644 --- a/tests/modules/expansionhunter/main.nf +++ b/tests/modules/expansionhunter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' addParams( options: [:] ) +include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' workflow test_expansionhunter { diff --git a/tests/modules/expansionhunter/nextflow.config b/tests/modules/expansionhunter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/expansionhunter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/expansionhunter/test.yml b/tests/modules/expansionhunter/test.yml index 78d5c002..19403588 100644 --- a/tests/modules/expansionhunter/test.yml +++ b/tests/modules/expansionhunter/test.yml @@ -1,5 +1,5 @@ - name: expansionhunter test_expansionhunter - command: nextflow run tests/modules/expansionhunter -entry test_expansionhunter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/expansionhunter -entry test_expansionhunter -c ./tests/config/nextflow.config -c ./tests/modules/expansionhunter/nextflow.config tags: - expansionhunter files: diff --git a/tests/modules/fargene/main.nf b/tests/modules/fargene/main.nf new file mode 100644 index 00000000..471862e1 --- /dev/null +++ b/tests/modules/fargene/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GUNZIP } from '../../../modules/gunzip/main.nf' +include { FARGENE } from '../../../modules/fargene/main.nf' + +workflow test_fargene { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) ] + hmm_model = 'class_a' + + GUNZIP ( input ) + FARGENE ( GUNZIP.out.gunzip, hmm_model ) +} diff --git a/tests/modules/fargene/nextflow.config b/tests/modules/fargene/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fargene/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fargene/test.yml b/tests/modules/fargene/test.yml new file mode 100644 index 00000000..d97e2257 --- /dev/null +++ b/tests/modules/fargene/test.yml @@ -0,0 +1,14 @@ +- name: fargene + command: nextflow run ./tests/modules/fargene -entry test_fargene -c ./tests/config/nextflow.config -c ./tests/modules/fargene/nextflow.config + tags: + - fargene + files: + - path: output/fargene/fargene_analysis.log + - path: output/fargene/test/hmmsearchresults/test1.contigs-class_A-hmmsearched.out + - path: output/fargene/test/results_summary.txt + md5sum: 690d351cfc52577263ef4cfab1c81f50 + - path: output/fargene/test/tmpdir/test1.contigs-positives.out + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/fargene/test/tmpdir/tmp.out + - path: output/gunzip/test1.contigs.fa + md5sum: 80c4d78f2810f6d9e90fa6da9bb9c4f9 diff --git a/tests/modules/fastani/main.nf b/tests/modules/fastani/main.nf index a5548e20..0395f6a9 100644 --- a/tests/modules/fastani/main.nf +++ b/tests/modules/fastani/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTANI } from '../../../modules/fastani/main.nf' addParams( options: [:] ) +include { FASTANI } from '../../../modules/fastani/main.nf' workflow test_fastani { diff --git a/tests/modules/fastani/nextflow.config b/tests/modules/fastani/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastani/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastani/test.yml b/tests/modules/fastani/test.yml index cd411d06..f3748d25 100644 --- a/tests/modules/fastani/test.yml +++ b/tests/modules/fastani/test.yml @@ -1,5 +1,5 @@ - name: fastani - command: nextflow run ./tests/modules/fastani -entry test_fastani -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastani -entry test_fastani -c ./tests/config/nextflow.config -c ./tests/modules/fastani/nextflow.config tags: - fastani files: diff --git a/tests/modules/fastp/main.nf b/tests/modules/fastp/main.nf index c8e5112f..d1540974 100644 --- a/tests/modules/fastp/main.nf +++ b/tests/modules/fastp/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTP } from '../../../modules/fastp/main.nf' addParams( options: [:] ) +include { FASTP } from '../../../modules/fastp/main.nf' // // Test with single-end data diff --git a/tests/modules/fastp/nextflow.config b/tests/modules/fastp/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastp/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastp/test.yml b/tests/modules/fastp/test.yml index 365ce025..cd7ddeed 100644 --- a/tests/modules/fastp/test.yml +++ b/tests/modules/fastp/test.yml @@ -1,5 +1,5 @@ - name: fastp test_fastp_single_end - command: nextflow run tests/modules/fastp -entry test_fastp_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_single_end -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -17,7 +17,7 @@ md5sum: e0d856ebb3da9e4462c3ce9683efe01d - name: fastp test_fastp_paired_end - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -38,7 +38,7 @@ md5sum: 9eff7203596580cc5e42aceab4a469df - name: fastp test_fastp_single_end_trim_fail - command: nextflow run tests/modules/fastp -entry test_fastp_single_end_trim_fail -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_single_end_trim_fail -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -58,7 +58,7 @@ md5sum: de315d397c994d8e66bafc7a8dc11070 - name: fastp test_fastp_paired_end_trim_fail - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_trim_fail -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end_trim_fail -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: @@ -83,7 +83,7 @@ md5sum: f52309b35a7c15cbd56a9c3906ef98a5 - name: fastp test_fastp_paired_end_merged - command: nextflow run tests/modules/fastp -entry test_fastp_paired_end_merged -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastp -entry test_fastp_paired_end_merged -c ./tests/config/nextflow.config -c ./tests/modules/fastp/nextflow.config tags: - fastp files: diff --git a/tests/modules/fastqc/main.nf b/tests/modules/fastqc/main.nf index d95befec..f7db9b7c 100644 --- a/tests/modules/fastqc/main.nf +++ b/tests/modules/fastqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../modules/fastqc/main.nf' addParams( options: [:] ) +include { FASTQC } from '../../../modules/fastqc/main.nf' // // Test with single-end data diff --git a/tests/modules/fastqc/nextflow.config b/tests/modules/fastqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fastqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fastqc/test.yml b/tests/modules/fastqc/test.yml index 794e63fe..fa830cbc 100644 --- a/tests/modules/fastqc/test.yml +++ b/tests/modules/fastqc/test.yml @@ -1,5 +1,5 @@ - name: fastqc single-end - command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_single_end -c ./tests/config/nextflow.config -c ./tests/modules/fastqc/nextflow.config -c ./tests/modules/fastqc/nextflow.config tags: - fastqc files: @@ -7,7 +7,7 @@ - path: ./output/fastqc/test_fastqc.zip - name: fastqc paired-end - command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fastqc/ -entry test_fastqc_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/fastqc/nextflow.config -c ./tests/modules/fastqc/nextflow.config tags: - fastqc files: diff --git a/tests/modules/fastqscan/main.nf b/tests/modules/fastqscan/main.nf new file mode 100644 index 00000000..b9a321fe --- /dev/null +++ b/tests/modules/fastqscan/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { FASTQSCAN } from '../../../modules/fastqscan/main.nf' + +workflow test_fastqscan { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + + FASTQSCAN ( input ) +} diff --git a/tests/modules/fastqscan/nextflow.config b/tests/modules/fastqscan/nextflow.config new file mode 100644 index 00000000..f688ecb6 --- /dev/null +++ b/tests/modules/fastqscan/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FASTQSCAN { + ext.args = '-g 30000' + } + +} diff --git a/tests/modules/fastqscan/test.yml b/tests/modules/fastqscan/test.yml new file mode 100644 index 00000000..d538804c --- /dev/null +++ b/tests/modules/fastqscan/test.yml @@ -0,0 +1,7 @@ +- name: fastqscan test_fastqscan + command: nextflow run ./tests/modules/fastqscan -entry test_fastqscan -c ./tests/config/nextflow.config -c ./tests/modules/fastqscan/nextflow.config + tags: + - fastqscan + files: + - path: output/fastqscan/test.json + md5sum: b9d59a36fe85e556b5a80573ea0b0266 diff --git a/tests/modules/fasttree/main.nf b/tests/modules/fasttree/main.nf index 109aaa77..e33228a9 100644 --- a/tests/modules/fasttree/main.nf +++ b/tests/modules/fasttree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FASTTREE } from '../../../modules/fasttree/main.nf' addParams( options: [:] ) +include { FASTTREE } from '../../../modules/fasttree/main.nf' workflow test_fasttree { diff --git a/tests/modules/fasttree/nextflow.config b/tests/modules/fasttree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fasttree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fasttree/test.yml b/tests/modules/fasttree/test.yml index b30590c7..7e344cff 100644 --- a/tests/modules/fasttree/test.yml +++ b/tests/modules/fasttree/test.yml @@ -1,5 +1,5 @@ - name: fasttree - command: nextflow run ./tests/modules/fasttree -entry test_fasttree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fasttree -entry test_fasttree -c ./tests/config/nextflow.config -c ./tests/modules/fasttree/nextflow.config tags: - fasttree files: diff --git a/tests/modules/fgbio/callmolecularconsensusreads/main.nf b/tests/modules/fgbio/callmolecularconsensusreads/main.nf index 8ce34eca..e31fdf39 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/main.nf +++ b/tests/modules/fgbio/callmolecularconsensusreads/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' addParams( options: [args: '-s TemplateCoordinate', suffix: '_out'] ) -include { FGBIO_CALLMOLECULARCONSENSUSREADS } from '../../../../modules/fgbio/callmolecularconsensusreads/main.nf' addParams( options: [args: '-M 1', suffix: '_molreads'] ) +include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' +include { FGBIO_CALLMOLECULARCONSENSUSREADS } from '../../../../modules/fgbio/callmolecularconsensusreads/main.nf' workflow test_fgbio_callmolecularconsensusreads { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config new file mode 100644 index 00000000..e6721ff6 --- /dev/null +++ b/tests/modules/fgbio/callmolecularconsensusreads/nextflow.config @@ -0,0 +1,15 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FGBIO_SORTBAM { + ext.args = '-s TemplateCoordinate' + ext.prefix = { "${meta.id}_out" } + } + + withName: FGBIO_CALLMOLECULARCONSENSUSREADS { + ext.args = '-M 1' + ext.prefix = { "${meta.id}_molreads" } + } + +} diff --git a/tests/modules/fgbio/callmolecularconsensusreads/test.yml b/tests/modules/fgbio/callmolecularconsensusreads/test.yml index ac53957c..5e26cd01 100644 --- a/tests/modules/fgbio/callmolecularconsensusreads/test.yml +++ b/tests/modules/fgbio/callmolecularconsensusreads/test.yml @@ -1,5 +1,5 @@ - name: fgbio callmolecularconsensusreads - command: nextflow run tests/modules/fgbio/callmolecularconsensusreads -entry test_fgbio_callmolecularconsensusreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/callmolecularconsensusreads -entry test_fgbio_callmolecularconsensusreads -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/callmolecularconsensusreads/nextflow.config tags: - fgbio - fgbio/callmolecularconsensusreads diff --git a/tests/modules/fgbio/fastqtobam/main.nf b/tests/modules/fgbio/fastqtobam/main.nf index ce2f7efc..f01a17fa 100644 --- a/tests/modules/fgbio/fastqtobam/main.nf +++ b/tests/modules/fgbio/fastqtobam/main.nf @@ -1,16 +1,19 @@ #!/usr/bin/env nextflow nextflow.enable.dsl = 2 -params.read_structure = "+T 12M11S+T" -include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' addParams( options: [:] ) +include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' workflow test_fgbio_fastqtobam { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + read_structure = "+T 12M11S+T" - FGBIO_FASTQTOBAM ( input, "${params.read_structure}" ) + FGBIO_FASTQTOBAM ( input, read_structure ) } diff --git a/tests/modules/fgbio/fastqtobam/nextflow.config b/tests/modules/fgbio/fastqtobam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/fastqtobam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/fastqtobam/test.yml b/tests/modules/fgbio/fastqtobam/test.yml index 6f2554e9..ab73f425 100644 --- a/tests/modules/fgbio/fastqtobam/test.yml +++ b/tests/modules/fgbio/fastqtobam/test.yml @@ -1,10 +1,8 @@ - name: fgbio fastqtobam test_fgbio_fastqtobam - command: nextflow run tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/fastqtobam/nextflow.config tags: - fgbio/fastqtobam - fgbio files: - path: output/fgbio/test_umi_converted.bam md5sum: 9510735554e5eff29244077a72075fb6 - - path: output/fgbio/versions.yml - md5sum: 524815093b96759060d0d800fc6a3f25 diff --git a/tests/modules/fgbio/groupreadsbyumi/main.nf b/tests/modules/fgbio/groupreadsbyumi/main.nf index 31f55724..b9bb350a 100644 --- a/tests/modules/fgbio/groupreadsbyumi/main.nf +++ b/tests/modules/fgbio/groupreadsbyumi/main.nf @@ -2,14 +2,14 @@ nextflow.enable.dsl = 2 -include { FGBIO_GROUPREADSBYUMI } from '../../../../modules/fgbio/groupreadsbyumi/main.nf' addParams( options: [:] ) +include { FGBIO_GROUPREADSBYUMI } from '../../../../modules/fgbio/groupreadsbyumi/main.nf' workflow test_fgbio_groupreadsbyumi { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_tagged_bam'], checkIfExists: true) + ] - strategy = "Adjacency" - - FGBIO_GROUPREADSBYUMI ( input, strategy ) + FGBIO_GROUPREADSBYUMI ( input, 'Adjacency' ) } diff --git a/tests/modules/fgbio/groupreadsbyumi/nextflow.config b/tests/modules/fgbio/groupreadsbyumi/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/groupreadsbyumi/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/groupreadsbyumi/test.yml b/tests/modules/fgbio/groupreadsbyumi/test.yml index ce70f129..c1cfd4f0 100644 --- a/tests/modules/fgbio/groupreadsbyumi/test.yml +++ b/tests/modules/fgbio/groupreadsbyumi/test.yml @@ -1,5 +1,5 @@ - name: fgbio groupreadsbyumi test_fgbio_groupreadsbyumi - command: nextflow run tests/modules/fgbio/groupreadsbyumi -entry test_fgbio_groupreadsbyumi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/groupreadsbyumi -entry test_fgbio_groupreadsbyumi -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/groupreadsbyumi/nextflow.config tags: - fgbio - fgbio/groupreadsbyumi diff --git a/tests/modules/fgbio/sortbam/main.nf b/tests/modules/fgbio/sortbam/main.nf index 65bea1d7..ada99d0f 100644 --- a/tests/modules/fgbio/sortbam/main.nf +++ b/tests/modules/fgbio/sortbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' addParams( options: [:] ) +include { FGBIO_SORTBAM } from '../../../../modules/fgbio/sortbam/main.nf' workflow test_fgbio_sortbam { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/fgbio/sortbam/nextflow.config b/tests/modules/fgbio/sortbam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/fgbio/sortbam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/fgbio/sortbam/test.yml b/tests/modules/fgbio/sortbam/test.yml index 68183cd2..6789aed8 100644 --- a/tests/modules/fgbio/sortbam/test.yml +++ b/tests/modules/fgbio/sortbam/test.yml @@ -1,5 +1,5 @@ - name: fgbio sortbam - command: nextflow run tests/modules/fgbio/sortbam -entry test_fgbio_sortbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/fgbio/sortbam -entry test_fgbio_sortbam -c ./tests/config/nextflow.config -c ./tests/modules/fgbio/sortbam/nextflow.config tags: - fgbio - fgbio/sortbam diff --git a/tests/modules/filtlong/main.nf b/tests/modules/filtlong/main.nf index cd037623..df7892aa 100644 --- a/tests/modules/filtlong/main.nf +++ b/tests/modules/filtlong/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FILTLONG } from '../../../modules/filtlong/main.nf' addParams( options: [:] ) +include { FILTLONG } from '../../../modules/filtlong/main.nf' workflow test_filtlong { diff --git a/tests/modules/filtlong/nextflow.config b/tests/modules/filtlong/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/filtlong/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/filtlong/test.yml b/tests/modules/filtlong/test.yml index 30779d45..dc5fa5a9 100644 --- a/tests/modules/filtlong/test.yml +++ b/tests/modules/filtlong/test.yml @@ -1,5 +1,5 @@ - name: filtlong test_filtlong - command: nextflow run tests/modules/filtlong -entry test_filtlong -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: @@ -7,7 +7,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: filtlong test_filtlong_illumina_se - command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_se -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_se -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: @@ -15,7 +15,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: filtlong test_filtlong_illumina_pe - command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_pe -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config tags: - filtlong files: diff --git a/tests/modules/flash/main.nf b/tests/modules/flash/main.nf index 2128650d..4afcb8fc 100644 --- a/tests/modules/flash/main.nf +++ b/tests/modules/flash/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FLASH } from '../../../modules/flash/main.nf' addParams( options: [args:'-m 20 -M 100'] ) +include { FLASH } from '../../../modules/flash/main.nf' workflow test_flash { input = [ diff --git a/tests/modules/flash/nextflow.config b/tests/modules/flash/nextflow.config new file mode 100644 index 00000000..2845f9d9 --- /dev/null +++ b/tests/modules/flash/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: FLASH { + ext.args = '-m 20 -M 100' + } + +} diff --git a/tests/modules/flash/test.yml b/tests/modules/flash/test.yml index 31cdaeff..e5ed49ca 100644 --- a/tests/modules/flash/test.yml +++ b/tests/modules/flash/test.yml @@ -1,5 +1,5 @@ - name: flash test_flash - command: nextflow run tests/modules/flash -entry test_flash -c tests/config/nextflow.config + command: nextflow run ./tests/modules/flash -entry test_flash -c ./tests/config/nextflow.config -c ./tests/modules/flash/nextflow.config tags: - flash files: diff --git a/tests/modules/freebayes/main.nf b/tests/modules/freebayes/main.nf index c6f5641f..f8ae0ecb 100644 --- a/tests/modules/freebayes/main.nf +++ b/tests/modules/freebayes/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { FREEBAYES } from '../../../modules/freebayes/main.nf' addParams( options: [:] ) +include { FREEBAYES } from '../../../modules/freebayes/main.nf' workflow test_freebayes { diff --git a/tests/modules/freebayes/nextflow.config b/tests/modules/freebayes/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/freebayes/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/freebayes/test.yml b/tests/modules/freebayes/test.yml index 22fd0e88..c9aa78da 100644 --- a/tests/modules/freebayes/test.yml +++ b/tests/modules/freebayes/test.yml @@ -1,33 +1,33 @@ - name: freebayes test_freebayes - command: nextflow run tests/modules/freebayes -entry test_freebayes -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_bed - command: nextflow run tests/modules/freebayes -entry test_freebayes_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_bed -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_cram - command: nextflow run tests/modules/freebayes -entry test_freebayes_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_cram -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_somatic - command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_somatic -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: - path: output/freebayes/test.vcf.gz - name: freebayes test_freebayes_somatic_cram_intervals - command: nextflow run tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/freebayes -entry test_freebayes_somatic_cram_intervals -c ./tests/config/nextflow.config -c ./tests/modules/freebayes/nextflow.config tags: - freebayes files: diff --git a/tests/modules/gatk4/applybqsr/main.nf b/tests/modules/gatk4/applybqsr/main.nf index 80b51015..da85b11b 100644 --- a/tests/modules/gatk4/applybqsr/main.nf +++ b/tests/modules/gatk4/applybqsr/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_APPLYBQSR } from '../../../../modules/gatk4/applybqsr/main.nf' addParams( options: [:] ) +include { GATK4_APPLYBQSR } from '../../../../modules/gatk4/applybqsr/main.nf' workflow test_gatk4_applybqsr { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/applybqsr/nextflow.config b/tests/modules/gatk4/applybqsr/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/applybqsr/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/applybqsr/test.yml b/tests/modules/gatk4/applybqsr/test.yml index ed89c6ff..d0b07d94 100644 --- a/tests/modules/gatk4/applybqsr/test.yml +++ b/tests/modules/gatk4/applybqsr/test.yml @@ -1,26 +1,26 @@ - name: gatk4 applybqsr test_gatk4_applybqsr - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 files: - path: output/gatk4/test.bam - md5sum: 87a2eabae2b7b41574f966612b5addae + md5sum: af56f5dd81b95070079d54670507f530 - name: gatk4 applybqsr test_gatk4_applybqsr_intervals - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 files: - path: output/gatk4/test.bam - md5sum: 9c015d3c1dbd9eee793b7386f432b6aa + md5sum: 0cbfa4be143e988d56ce741b5077510e - name: gatk4 applybqsr test_gatk4_applybqsr_cram - command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config tags: - gatk4/applybqsr - gatk4 files: - path: output/gatk4/test.bam - md5sum: 02f84815fdbc99c21c8d42ebdcabbbf7 + md5sum: 720ef7453fc3c9def18bbe396062346c diff --git a/tests/modules/gatk4/baserecalibrator/main.nf b/tests/modules/gatk4/baserecalibrator/main.nf index a50c09e3..2675d04b 100644 --- a/tests/modules/gatk4/baserecalibrator/main.nf +++ b/tests/modules/gatk4/baserecalibrator/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_BASERECALIBRATOR } from '../../../../modules/gatk4/baserecalibrator/main.nf' addParams( options: [:] ) +include { GATK4_BASERECALIBRATOR } from '../../../../modules/gatk4/baserecalibrator/main.nf' workflow test_gatk4_baserecalibrator { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/baserecalibrator/nextflow.config b/tests/modules/gatk4/baserecalibrator/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/baserecalibrator/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/baserecalibrator/test.yml b/tests/modules/gatk4/baserecalibrator/test.yml index a15c9ee3..163fac08 100644 --- a/tests/modules/gatk4/baserecalibrator/test.yml +++ b/tests/modules/gatk4/baserecalibrator/test.yml @@ -1,5 +1,5 @@ - name: gatk4 baserecalibrator test_gatk4_baserecalibrator - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -8,7 +8,7 @@ md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_cram - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -17,7 +17,7 @@ md5sum: 35d89a3811aa31711fc9815b6b80e6ec - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_intervals - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator @@ -26,7 +26,7 @@ md5sum: 9ecb5f00a2229291705addc09c0ec231 - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_multiple_sites - command: nextflow run tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config tags: - gatk4 - gatk4/baserecalibrator diff --git a/tests/modules/gatk4/bedtointervallist/main.nf b/tests/modules/gatk4/bedtointervallist/main.nf index 1ca4be58..2dd72904 100644 --- a/tests/modules/gatk4/bedtointervallist/main.nf +++ b/tests/modules/gatk4/bedtointervallist/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' addParams( options: [:] ) +include { GATK4_BEDTOINTERVALLIST } from '../../../../modules/gatk4/bedtointervallist/main.nf' workflow test_gatk4_bedtointervallist { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/gatk4/bedtointervallist/nextflow.config b/tests/modules/gatk4/bedtointervallist/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/bedtointervallist/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/bedtointervallist/test.yml b/tests/modules/gatk4/bedtointervallist/test.yml index 83c3a574..3482fa6c 100644 --- a/tests/modules/gatk4/bedtointervallist/test.yml +++ b/tests/modules/gatk4/bedtointervallist/test.yml @@ -1,5 +1,5 @@ - name: gatk4 bedtointervallist test_gatk4_bedtointervallist - command: nextflow run tests/modules/gatk4/bedtointervallist -entry test_gatk4_bedtointervallist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/bedtointervallist -entry test_gatk4_bedtointervallist -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/bedtointervallist/nextflow.config tags: - gatk4 - gatk4/bedtointervallist diff --git a/tests/modules/gatk4/calculatecontamination/main.nf b/tests/modules/gatk4/calculatecontamination/main.nf index f93f66fb..4b659ed3 100644 --- a/tests/modules/gatk4/calculatecontamination/main.nf +++ b/tests/modules/gatk4/calculatecontamination/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' addParams( options: [:] ) +include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' workflow test_gatk4_calculatecontamination_tumor_only { diff --git a/tests/modules/gatk4/calculatecontamination/nextflow.config b/tests/modules/gatk4/calculatecontamination/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/calculatecontamination/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/calculatecontamination/test.yml b/tests/modules/gatk4/calculatecontamination/test.yml index 8736bc32..0c489bff 100644 --- a/tests/modules/gatk4/calculatecontamination/test.yml +++ b/tests/modules/gatk4/calculatecontamination/test.yml @@ -1,28 +1,28 @@ - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_tumor_only - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_tumor_only -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_tumor_only -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_matched_pair - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_segmentation - command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config tags: - gatk4/calculatecontamination - gatk4 files: - path: output/gatk4/test.contamination.table - md5sum: ff348a26dd09404239a7ed0da7d98874 + md5sum: 5fdcf1728cf98985ce31c038eb24e05c - path: output/gatk4/test.segmentation.table - md5sum: 478cb4f69ec001944b9cd0e7e4de01ef + md5sum: 91f28bfe4727a3256810927fc5eba92f diff --git a/tests/modules/gatk4/createsequencedictionary/main.nf b/tests/modules/gatk4/createsequencedictionary/main.nf index 443d77bc..b304b043 100644 --- a/tests/modules/gatk4/createsequencedictionary/main.nf +++ b/tests/modules/gatk4/createsequencedictionary/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../../modules/gatk4/createsequencedictionary/main.nf' addParams( options: [:] ) +include { GATK4_CREATESEQUENCEDICTIONARY } from '../../../../modules/gatk4/createsequencedictionary/main.nf' workflow test_gatk4_createsequencedictionary { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/gatk4/createsequencedictionary/nextflow.config b/tests/modules/gatk4/createsequencedictionary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/createsequencedictionary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/createsequencedictionary/test.yml b/tests/modules/gatk4/createsequencedictionary/test.yml index 7788d16a..134a9d74 100644 --- a/tests/modules/gatk4/createsequencedictionary/test.yml +++ b/tests/modules/gatk4/createsequencedictionary/test.yml @@ -1,5 +1,5 @@ - name: gatk4 createsequencedictionary test_gatk4_createsequencedictionary - command: nextflow run tests/modules/gatk4/createsequencedictionary -entry test_gatk4_createsequencedictionary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/createsequencedictionary -entry test_gatk4_createsequencedictionary -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/createsequencedictionary/nextflow.config tags: - gatk4 - gatk4/createsequencedictionary diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf index 6e5366f5..5e1d1904 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' addParams( options: [suffix:'.pon'] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' workflow test_gatk4_createsomaticpanelofnormals { db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config new file mode 100644 index 00000000..d73e78ad --- /dev/null +++ b/tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_CREATESOMATICPANELOFNORMALS { + ext.prefix = { "${meta.id}.pon" } + } + +} diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml index d3e6c537..a0e2bf26 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml +++ b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml @@ -1,9 +1,9 @@ - name: gatk4 createsomaticpanelofnormals test_gatk4_createsomaticpanelofnormals - command: nextflow run tests/modules/gatk4/createsomaticpanelofnormals -entry test_gatk4_createsomaticpanelofnormals -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/createsomaticpanelofnormals -entry test_gatk4_createsomaticpanelofnormals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/createsomaticpanelofnormals/nextflow.config tags: - gatk4 - gatk4/createsomaticpanelofnormals files: - path: output/gatk4/test.pon.vcf.gz - path: output/gatk4/test.pon.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/main.nf b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf index 72772318..398a6c79 100644 --- a/tests/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/tests/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' addParams( options: [:] ) +include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' workflow test_gatk4_estimatelibrarycomplexity { diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config b/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml index ca949c00..a33e4ec1 100644 --- a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml +++ b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml @@ -1,5 +1,5 @@ - name: gatk4 estimatelibrarycomplexity test_gatk4_estimatelibrarycomplexity - command: nextflow run tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/estimatelibrarycomplexity/nextflow.config tags: - gatk4/estimatelibrarycomplexity - gatk4 diff --git a/tests/modules/gatk4/fastqtosam/main.nf b/tests/modules/gatk4/fastqtosam/main.nf index 64694d9f..4f53c791 100644 --- a/tests/modules/gatk4/fastqtosam/main.nf +++ b/tests/modules/gatk4/fastqtosam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GATK4_FASTQTOSAM } from '../../../../modules/gatk4/fastqtosam/main.nf' addParams( options: [:] ) +include { GATK4_FASTQTOSAM } from '../../../../modules/gatk4/fastqtosam/main.nf' workflow test_gatk4_fastqtosam_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/gatk4/fastqtosam/nextflow.config b/tests/modules/gatk4/fastqtosam/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/fastqtosam/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/fastqtosam/test.yml b/tests/modules/gatk4/fastqtosam/test.yml index f6597b66..d5d23f94 100644 --- a/tests/modules/gatk4/fastqtosam/test.yml +++ b/tests/modules/gatk4/fastqtosam/test.yml @@ -1,17 +1,17 @@ - name: gatk4 fastqtosam test_gatk4_fastqtosam_single_end - command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_single_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/fastqtosam/nextflow.config tags: - gatk4/fastqtosam - gatk4 files: - path: output/gatk4/test.bam - md5sum: 4967100b2e4912c0e4ce0976d946bafb + md5sum: 0a0d308b219837977b8df9daa26db7de - name: gatk4 fastqtosam test_gatk4_fastqtosam_paired_end - command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/fastqtosam/nextflow.config tags: - gatk4 - gatk4/fastqtosam files: - path: output/gatk4/test.bam - md5sum: 4967100b2e4912c0e4ce0976d946bafb + md5sum: 0a0d308b219837977b8df9daa26db7de diff --git a/tests/modules/gatk4/filtermutectcalls/main.nf b/tests/modules/gatk4/filtermutectcalls/main.nf index a425238b..fa0acff9 100644 --- a/tests/modules/gatk4/filtermutectcalls/main.nf +++ b/tests/modules/gatk4/filtermutectcalls/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutectcalls/main.nf' addParams( options: [suffix:'.filtered'] ) +include { GATK4_FILTERMUTECTCALLS } from '../../../../modules/gatk4/filtermutectcalls/main.nf' workflow test_gatk4_filtermutectcalls_base { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -18,15 +18,15 @@ workflow test_gatk4_filtermutectcalls_base { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } workflow test_gatk4_filtermutectcalls_with_files { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -38,15 +38,15 @@ workflow test_gatk4_filtermutectcalls_with_files { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } workflow test_gatk4_filtermutectcalls_use_val { - input = [ + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_test2_paired_mutect2_calls_vcf_gz_tbi'], checkIfExists: true), @@ -58,8 +58,8 @@ workflow test_gatk4_filtermutectcalls_use_val { ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_FILTERMUTECTCALLS ( input, fasta, fastaidx, dict ) + GATK4_FILTERMUTECTCALLS ( input, fasta, fai, dict ) } diff --git a/tests/modules/gatk4/filtermutectcalls/nextflow.config b/tests/modules/gatk4/filtermutectcalls/nextflow.config new file mode 100644 index 00000000..3d4148d2 --- /dev/null +++ b/tests/modules/gatk4/filtermutectcalls/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_FILTERMUTECTCALLS { + ext.prefix = { "${meta.id}.filtered" } + } + +} diff --git a/tests/modules/gatk4/filtermutectcalls/test.yml b/tests/modules/gatk4/filtermutectcalls/test.yml index b17a306c..72504e66 100644 --- a/tests/modules/gatk4/filtermutectcalls/test.yml +++ b/tests/modules/gatk4/filtermutectcalls/test.yml @@ -1,5 +1,5 @@ - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_base - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_base -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls @@ -8,10 +8,10 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_with_files - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls @@ -20,10 +20,10 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_use_val - command: nextflow run tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config tags: - gatk4 - gatk4/filtermutectcalls @@ -32,4 +32,4 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 98e1b87a52999eb8f429ef4a7877eb3f - path: output/gatk4/test.filtered.vcf.gz.tbi - md5sum: d88d2b745c9226ddf284e3494db8b9d2 + md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 diff --git a/tests/modules/gatk4/gatherbqsrreports/main.nf b/tests/modules/gatk4/gatherbqsrreports/main.nf new file mode 100644 index 00000000..2693a06a --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/main.nf @@ -0,0 +1,27 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_GATHERBQSRREPORTS } from '../../../../modules/gatk4/gatherbqsrreports/main.nf' + +workflow test_gatk4_gatherbqsrreports { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true) + ] + + GATK4_GATHERBQSRREPORTS ( input ) +} + +workflow test_gatk4_gatherbqsrreports_multiple { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_baserecalibrator_table'], checkIfExists: true) + ] + ] + + GATK4_GATHERBQSRREPORTS ( input ) +} diff --git a/tests/modules/gatk4/gatherbqsrreports/nextflow.config b/tests/modules/gatk4/gatherbqsrreports/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/gatk4/gatherbqsrreports/test.yml b/tests/modules/gatk4/gatherbqsrreports/test.yml new file mode 100644 index 00000000..76c90120 --- /dev/null +++ b/tests/modules/gatk4/gatherbqsrreports/test.yml @@ -0,0 +1,21 @@ +- name: gatk4 gatherbqsrreports test_gatk4_gatherbqsrreports + command: nextflow run tests/modules/gatk4/gatherbqsrreports -entry test_gatk4_gatherbqsrreports -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/gatherbqsrreports + files: + - path: output/gatk4/test.table + md5sum: 9603b69fdc3b5090de2e0dd78bfcc4bf + - path: output/gatk4/versions.yml + md5sum: 50238fd0f3b6f4efb2b5335b6324f905 + +- name: gatk4 gatherbqsrreports test_gatk4_gatherbqsrreports_multiple + command: nextflow run tests/modules/gatk4/gatherbqsrreports -entry test_gatk4_gatherbqsrreports_multiple -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/gatherbqsrreports + files: + - path: output/gatk4/test.table + md5sum: 0c1257eececf95db8ca378272d0f21f9 + - path: output/gatk4/versions.yml + md5sum: c6ce163062dd3609848fc5bc10660427 diff --git a/tests/modules/gatk4/genomicsdbimport/main.nf b/tests/modules/gatk4/genomicsdbimport/main.nf index ef67b04a..417a08a4 100644 --- a/tests/modules/gatk4/genomicsdbimport/main.nf +++ b/tests/modules/gatk4/genomicsdbimport/main.nf @@ -2,12 +2,12 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimport/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { GATK4_GENOMICSDBIMPORT } from '../../../../modules/gatk4/genomicsdbimport/main.nf' workflow test_gatk4_genomicsdbimport_create_genomicsdb { - input = [ [ id:'test_genomicsdb'], // meta map + input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true) , file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) , file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) , @@ -26,7 +26,7 @@ workflow test_gatk4_genomicsdbimport_get_intervalslist { UNTAR ( db ) - def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + def input = Channel.of([ [ id:'test'], // meta map [] , [] , [] , @@ -45,7 +45,7 @@ workflow test_gatk4_genomicsdbimport_update_genomicsdb { UNTAR ( db ) - def input = Channel.of([ [ id:'test_genomicsdb'], // meta map + def input = Channel.of([ [ id:'test'], // meta map file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'] , checkIfExists: true) , file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz_tbi'] , checkIfExists: true) , [] , diff --git a/tests/modules/gatk4/genomicsdbimport/nextflow.config b/tests/modules/gatk4/genomicsdbimport/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/genomicsdbimport/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/genomicsdbimport/test.yml b/tests/modules/gatk4/genomicsdbimport/test.yml index 68f5ae7a..94a1a35e 100644 --- a/tests/modules/gatk4/genomicsdbimport/test.yml +++ b/tests/modules/gatk4/genomicsdbimport/test.yml @@ -1,35 +1,35 @@ - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_create_genomicsdb - command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_create_genomicsdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_create_genomicsdb -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config tags: - gatk4/genomicsdbimport - gatk4 files: - - path: output/gatk4/test_genomicsdb/__tiledb_workspace.tdb + - path: output/gatk4/test/__tiledb_workspace.tdb md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/gatk4/test_genomicsdb/callset.json + - path: output/gatk4/test/callset.json md5sum: a7d07d1c86449bbb1091ff29368da07a - - path: output/gatk4/test_genomicsdb/chr22$1$40001/.__consolidation_lock + - path: output/gatk4/test/chr22$1$40001/.__consolidation_lock md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/gatk4/test_genomicsdb/chr22$1$40001/__array_schema.tdb - - path: output/gatk4/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json + - path: output/gatk4/test/chr22$1$40001/__array_schema.tdb + - path: output/gatk4/test/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json md5sum: 2502f79658bc000578ebcfddfc1194c0 - - path: output/gatk4/test_genomicsdb/vcfheader.vcf - contains: + - path: output/gatk4/test/vcfheader.vcf + contains: - "FORMAT= 0"', 'suffix': '.filtered'] -include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' addParams( options: test_options ) +include { GATK4_VARIANTFILTRATION } from '../../../../modules/gatk4/variantfiltration/main.nf' -workflow test_gatk4_variantfiltration { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] - fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] - fai = [ file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) ] - genome_dict = [ file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) ] +// Basic parameters with uncompressed VCF input +workflow test_gatk4_variantfiltration_vcf_input { + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true) + ] - GATK4_VARIANTFILTRATION ( input, fasta, fai, genome_dict ) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fasta_index = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fasta_dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_VARIANTFILTRATION ( input, fasta, fasta_index, fasta_dict ) } + +// Basic parameters with compressed VCF input +workflow test_gatk4_variantfiltration_gz_input { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fasta_index = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + fasta_dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_VARIANTFILTRATION ( input, fasta, fasta_index, fasta_dict ) +} + + diff --git a/tests/modules/gatk4/variantfiltration/nextflow.config b/tests/modules/gatk4/variantfiltration/nextflow.config new file mode 100644 index 00000000..4b930f28 --- /dev/null +++ b/tests/modules/gatk4/variantfiltration/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GATK4_VARIANTFILTRATION { + ext.args = "--filter-name \'test_filter\' --filter-expression \'MQ0 > 0\'" + ext.prefix = { "${meta.id}.filtered" } + } + +} diff --git a/tests/modules/gatk4/variantfiltration/test.yml b/tests/modules/gatk4/variantfiltration/test.yml index 1a2bf6d2..b5da0e5c 100644 --- a/tests/modules/gatk4/variantfiltration/test.yml +++ b/tests/modules/gatk4/variantfiltration/test.yml @@ -1,9 +1,19 @@ -- name: gatk4 variantfiltration test_gatk4_variantfiltration - command: nextflow run tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration -c tests/config/nextflow.config +- name: gatk4 variantfiltration test_gatk4_variantfiltration_vcf_input + command: nextflow run ./tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_vcf_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/variantfiltration/nextflow.config tags: - gatk4/variantfiltration - gatk4 files: - - path: output/gatk4/test.filtered.vcf - contains: - - "AC=2;AN=2;DP=1;DP4=0,0,1,0;MQ=60;MQ0F=0;SGB=-0.379885" + - path: output/gatk4/test.filtered.vcf.gz + contains: ['BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365'] + - path: output/gatk4/test.filtered.vcf.gz.tbi + +- name: gatk4 variantfiltration test_gatk4_variantfiltration_gz_input + command: nextflow run ./tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/variantfiltration/nextflow.config + tags: + - gatk4/variantfiltration + - gatk4 + files: + - path: output/gatk4/test.filtered.vcf.gz + contains: ['BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365'] + - path: output/gatk4/test.filtered.vcf.gz.tbi diff --git a/tests/modules/genmap/index/main.nf b/tests/modules/genmap/index/main.nf index 358ebb35..06106640 100644 --- a/tests/modules/genmap/index/main.nf +++ b/tests/modules/genmap/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [publish_dir:'genmap'] ) +include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' workflow test_genmap_index { diff --git a/tests/modules/genmap/index/nextflow.config b/tests/modules/genmap/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/genmap/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/genmap/index/test.yml b/tests/modules/genmap/index/test.yml index c5078014..ce2098ce 100644 --- a/tests/modules/genmap/index/test.yml +++ b/tests/modules/genmap/index/test.yml @@ -1,5 +1,5 @@ - name: genmap index test_genmap_index - command: nextflow run tests/modules/genmap/index -entry test_genmap_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genmap/index -entry test_genmap_index -c ./tests/config/nextflow.config -c ./tests/modules/genmap/index/nextflow.config tags: - genmap - genmap/index diff --git a/tests/modules/genmap/mappability/main.nf b/tests/modules/genmap/mappability/main.nf index 636ec0e4..eb6a34fa 100644 --- a/tests/modules/genmap/mappability/main.nf +++ b/tests/modules/genmap/mappability/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' addParams( options: [:] ) -include { GENMAP_MAPPABILITY } from '../../../../modules/genmap/mappability/main.nf' addParams( options: [args : '-K 50 -E 2 -w -t -bg'] ) +include { GENMAP_INDEX } from '../../../../modules/genmap/index/main.nf' +include { GENMAP_MAPPABILITY } from '../../../../modules/genmap/mappability/main.nf' workflow test_genmap_map { diff --git a/tests/modules/genmap/mappability/nextflow.config b/tests/modules/genmap/mappability/nextflow.config new file mode 100644 index 00000000..6936b9ea --- /dev/null +++ b/tests/modules/genmap/mappability/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GENMAP_MAPPABILITY { + ext.args = '-K 50 -E 2 -w -t -bg' + } + +} diff --git a/tests/modules/genmap/mappability/test.yml b/tests/modules/genmap/mappability/test.yml index 29a12de1..94c1d501 100644 --- a/tests/modules/genmap/mappability/test.yml +++ b/tests/modules/genmap/mappability/test.yml @@ -1,5 +1,5 @@ - name: genmap mappability test_genmap_map - command: nextflow run tests/modules/genmap/mappability -entry test_genmap_map -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genmap/mappability -entry test_genmap_map -c ./tests/config/nextflow.config -c ./tests/modules/genmap/mappability/nextflow.config tags: - genmap - genmap/mappability diff --git a/tests/modules/genrich/main.nf b/tests/modules/genrich/main.nf index aa1a2d49..34db589e 100644 --- a/tests/modules/genrich/main.nf +++ b/tests/modules/genrich/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { GENRICH } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.1"] ) -include { GENRICH as GENRICH_CTRL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-p 0.9"] ) -include { GENRICH as GENRICH_ALL } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-r -p 0.1"] ) -include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' addParams( options: ["args": "-j -p 0.1"] ) +include { GENRICH } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_CTRL } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_ALL } from '../../../modules/genrich/main.nf' +include { GENRICH as GENRICH_ATACSEQ } from '../../../modules/genrich/main.nf' workflow test_genrich { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/genrich/nextflow.config b/tests/modules/genrich/nextflow.config new file mode 100644 index 00000000..8f79d7be --- /dev/null +++ b/tests/modules/genrich/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GENRICH { + ext.args = '-p 0.1' + } + + withName: GENRICH_CTRL { + ext.args = '-p 0.9' + } + + withName: GENRICH_ALL { + ext.args = '-r -p 0.1' + } + + withName: GENRICH_ATACSEQ { + ext.args = '-j -p 0.1' + } + +} diff --git a/tests/modules/genrich/test.yml b/tests/modules/genrich/test.yml index 63bf2927..972335c4 100644 --- a/tests/modules/genrich/test.yml +++ b/tests/modules/genrich/test.yml @@ -1,5 +1,5 @@ - name: genrich test_genrich - command: nextflow run tests/modules/genrich -entry test_genrich -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -7,7 +7,7 @@ md5sum: 6afabdd3f691c7c84c66ff8a23984681 - name: genrich test_genrich_ctrl - command: nextflow run tests/modules/genrich -entry test_genrich_ctrl -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_ctrl -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -15,7 +15,7 @@ md5sum: 2fcc392360b317f5ebee88cdbc149e05 - name: genrich test_genrich_all_outputs - command: nextflow run tests/modules/genrich -entry test_genrich_all_outputs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_all_outputs -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -31,7 +31,7 @@ md5sum: b14feef34b6d2379a173a734ca963cde - name: genrich test_genrich_blacklist - command: nextflow run tests/modules/genrich -entry test_genrich_blacklist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_blacklist -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: @@ -39,7 +39,7 @@ md5sum: 6afabdd3f691c7c84c66ff8a23984681 - name: genrich test_genrich_atacseq - command: nextflow run tests/modules/genrich -entry test_genrich_atacseq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/genrich -entry test_genrich_atacseq -c ./tests/config/nextflow.config -c ./tests/modules/genrich/nextflow.config tags: - genrich files: diff --git a/tests/modules/gffread/main.nf b/tests/modules/gffread/main.nf index 87e95275..6ab7922b 100644 --- a/tests/modules/gffread/main.nf +++ b/tests/modules/gffread/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GFFREAD } from '../../../modules/gffread/main.nf' addParams( options: [suffix: '.out'] ) +include { GFFREAD } from '../../../modules/gffread/main.nf' workflow test_gffread { input = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) diff --git a/tests/modules/gffread/nextflow.config b/tests/modules/gffread/nextflow.config new file mode 100644 index 00000000..0714a6e8 --- /dev/null +++ b/tests/modules/gffread/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GFFREAD { + ext.prefix = { "${gff.baseName}.out" } + } + +} diff --git a/tests/modules/gffread/test.yml b/tests/modules/gffread/test.yml index 48096f1e..c5a16132 100644 --- a/tests/modules/gffread/test.yml +++ b/tests/modules/gffread/test.yml @@ -1,5 +1,5 @@ - name: gffread - command: nextflow run ./tests/modules/gffread/ -entry test_gffread -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gffread/ -entry test_gffread -c ./tests/config/nextflow.config -c ./tests/modules/gffread/nextflow.config tags: - gffread files: diff --git a/tests/modules/glnexus/main.nf b/tests/modules/glnexus/main.nf index 2a79b2fa..aeb7c7e2 100644 --- a/tests/modules/glnexus/main.nf +++ b/tests/modules/glnexus/main.nf @@ -2,12 +2,16 @@ nextflow.enable.dsl = 2 -include { GLNEXUS } from '../../../modules/glnexus/main.nf' addParams( options: [:] ) +include { GLNEXUS } from '../../../modules/glnexus/main.nf' workflow test_glnexus { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz'], checkIfExists: true) + ] + ] + GLNEXUS ( input ) } diff --git a/tests/modules/glnexus/nextflow.config b/tests/modules/glnexus/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/glnexus/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/glnexus/test.yml b/tests/modules/glnexus/test.yml index c7b255ee..bfca4529 100644 --- a/tests/modules/glnexus/test.yml +++ b/tests/modules/glnexus/test.yml @@ -1,7 +1,7 @@ - name: glnexus test_glnexus - command: nextflow run tests/modules/glnexus -entry test_glnexus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/glnexus -entry test_glnexus -c ./tests/config/nextflow.config -c ./tests/modules/glnexus/nextflow.config tags: - glnexus files: - path: output/glnexus/test.bcf - md5sum: 33ac8c9f3ff54e6a23177ba94a449173 + md5sum: 62b2cea9c1b92ac63645cb031eea46fc diff --git a/tests/modules/graphmap2/align/main.nf b/tests/modules/graphmap2/align/main.nf index 0cd885ab..96b95166 100644 --- a/tests/modules/graphmap2/align/main.nf +++ b/tests/modules/graphmap2/align/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' addParams( options: [:] ) -include { GRAPHMAP2_ALIGN } from '../../../../modules/graphmap2/align/main.nf' addParams( options: [:] ) +include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' +include { GRAPHMAP2_ALIGN } from '../../../../modules/graphmap2/align/main.nf' workflow test_graphmap2_align { diff --git a/tests/modules/graphmap2/align/nextflow.config b/tests/modules/graphmap2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/graphmap2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/graphmap2/align/test.yml b/tests/modules/graphmap2/align/test.yml index 7e90b8d4..90e52dd1 100644 --- a/tests/modules/graphmap2/align/test.yml +++ b/tests/modules/graphmap2/align/test.yml @@ -1,5 +1,5 @@ - name: graphmap2 align - command: nextflow run ./tests/modules/graphmap2/align -entry test_graphmap2_align -c tests/config/nextflow.config + command: nextflow run ./tests/modules/graphmap2/align -entry test_graphmap2_align -c ./tests/config/nextflow.config -c ./tests/modules/graphmap2/align/nextflow.config tags: - graphmap2 - graphmap2/align diff --git a/tests/modules/graphmap2/index/main.nf b/tests/modules/graphmap2/index/main.nf index 66347f06..3c449c6b 100644 --- a/tests/modules/graphmap2/index/main.nf +++ b/tests/modules/graphmap2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' addParams( options: [:] ) +include { GRAPHMAP2_INDEX } from '../../../../modules/graphmap2/index/main.nf' workflow test_graphmap2_index { diff --git a/tests/modules/graphmap2/index/nextflow.config b/tests/modules/graphmap2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/graphmap2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/graphmap2/index/test.yml b/tests/modules/graphmap2/index/test.yml index 15042e97..0bff487e 100644 --- a/tests/modules/graphmap2/index/test.yml +++ b/tests/modules/graphmap2/index/test.yml @@ -1,5 +1,5 @@ - name: graphmap2 index - command: nextflow run ./tests/modules/graphmap2/index -entry test_graphmap2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/graphmap2/index -entry test_graphmap2_index -c ./tests/config/nextflow.config -c ./tests/modules/graphmap2/index/nextflow.config tags: - graphmap2 - graphmap2/index diff --git a/tests/modules/gstama/collapse/main.nf b/tests/modules/gstama/collapse/main.nf index 70b3c741..3eb97767 100644 --- a/tests/modules/gstama/collapse/main.nf +++ b/tests/modules/gstama/collapse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GSTAMA_COLLAPSE } from '../../../../modules/gstama/collapse/main.nf' addParams( options: [ args:"-x capped -b BAM", suffix:'_tc' ] ) +include { GSTAMA_COLLAPSE } from '../../../../modules/gstama/collapse/main.nf' workflow test_gstama_collapse { diff --git a/tests/modules/gstama/collapse/nextflow.config b/tests/modules/gstama/collapse/nextflow.config new file mode 100644 index 00000000..a68f33f2 --- /dev/null +++ b/tests/modules/gstama/collapse/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GSTAMA_COLLAPSE { + ext.args = '-x capped -b BAM' + ext.prefix = { "${meta.id}_tc" } + } + +} diff --git a/tests/modules/gstama/collapse/test.yml b/tests/modules/gstama/collapse/test.yml index 98de6bb3..50d3775e 100644 --- a/tests/modules/gstama/collapse/test.yml +++ b/tests/modules/gstama/collapse/test.yml @@ -1,10 +1,10 @@ - name: gstama collapse test_gstama_collapse - command: nextflow run tests/modules/gstama/collapse -entry test_gstama_collapse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gstama/collapse -entry test_gstama_collapse -c ./tests/config/nextflow.config -c ./tests/modules/gstama/collapse/nextflow.config tags: - gstama - gstama/collapse files: - - path: output/gstama/test_tc.bed + - path: output/gstama/test_tc_collapsed.bed md5sum: e5105198ed970a33ae0ecaa7bff421d9 - path: output/gstama/test_tc_local_density_error.txt md5sum: b917ac1f14eccd590b6881a686f324d5 @@ -18,5 +18,7 @@ md5sum: 0ca1a32f33ef05242d897d913802554b - path: output/gstama/test_tc_trans_report.txt md5sum: 33a86c15ca2acce36b2a5962f4c1adc4 + - path: output/gstama/test_tc_varcov.txt + md5sum: 587fd899ff658eb66b1770a35283bfcb - path: output/gstama/test_tc_variants.txt md5sum: 5b1165e9f33faba4f7207013fc27257e diff --git a/tests/modules/gstama/merge/main.nf b/tests/modules/gstama/merge/main.nf index f9a8e05f..4a9102a2 100644 --- a/tests/modules/gstama/merge/main.nf +++ b/tests/modules/gstama/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GSTAMA_MERGE } from '../../../../modules/gstama/merge/main' addParams( options: [suffix:'_merged'] ) +include { GSTAMA_MERGE } from '../../../../modules/gstama/merge/main' workflow test_gstama_merge { diff --git a/tests/modules/gstama/merge/nextflow.config b/tests/modules/gstama/merge/nextflow.config new file mode 100644 index 00000000..e0d7c8ef --- /dev/null +++ b/tests/modules/gstama/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GSTAMA_MERGE { + ext.prefix = { "${meta.id}_merged" } + } + +} diff --git a/tests/modules/gstama/merge/test.yml b/tests/modules/gstama/merge/test.yml index b98e35b6..1db35d15 100644 --- a/tests/modules/gstama/merge/test.yml +++ b/tests/modules/gstama/merge/test.yml @@ -1,5 +1,5 @@ - name: gstama merge test_gstama_merge - command: nextflow run tests/modules/gstama/merge -entry test_gstama_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gstama/merge -entry test_gstama_merge -c ./tests/config/nextflow.config -c ./tests/modules/gstama/merge/nextflow.config tags: - gstama - gstama/merge diff --git a/tests/modules/gtdbtk/classifywf/main.nf b/tests/modules/gtdbtk/classifywf/main.nf index f52b0ccc..1517d7cc 100644 --- a/tests/modules/gtdbtk/classifywf/main.nf +++ b/tests/modules/gtdbtk/classifywf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GTDBTK_CLASSIFYWF } from '../../../../modules/gtdbtk/classifywf/main.nf' addParams( options: [:] ) +include { GTDBTK_CLASSIFYWF } from '../../../../modules/gtdbtk/classifywf/main.nf' process STUB_GTDBTK_DATABASE { output: diff --git a/tests/modules/gtdbtk/classifywf/nextflow.config b/tests/modules/gtdbtk/classifywf/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gtdbtk/classifywf/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gtdbtk/classifywf/test.yml b/tests/modules/gtdbtk/classifywf/test.yml index 6d0f055e..e24f1e17 100644 --- a/tests/modules/gtdbtk/classifywf/test.yml +++ b/tests/modules/gtdbtk/classifywf/test.yml @@ -1,5 +1,5 @@ - name: gtdbtk classifywf - command: nextflow run ./tests/modules/gtdbtk/classifywf -entry test_gtdbtk_classifywf -c tests/config/nextflow.config -stub-run + command: nextflow run ./tests/modules/gtdbtk/classifywf -entry test_gtdbtk_classifywf -c ./tests/config/nextflow.config -stub-run -c ./tests/modules/gtdbtk/classifywf/nextflow.config tags: - gtdbtk - gtdbtk/classifywf diff --git a/tests/modules/gubbins/main.nf b/tests/modules/gubbins/main.nf index 87e164d0..342150b3 100644 --- a/tests/modules/gubbins/main.nf +++ b/tests/modules/gubbins/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUBBINS } from '../../../modules/gubbins/main.nf' addParams( options: [:] ) +include { GUBBINS } from '../../../modules/gubbins/main.nf' workflow test_gubbins { input = file(params.test_data['sarscov2']['genome']['all_sites_fas'], checkIfExists: true) diff --git a/tests/modules/gubbins/nextflow.config b/tests/modules/gubbins/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gubbins/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gubbins/test.yml b/tests/modules/gubbins/test.yml index 7bc0216b..6c85260d 100644 --- a/tests/modules/gubbins/test.yml +++ b/tests/modules/gubbins/test.yml @@ -1,5 +1,5 @@ - name: gubbins - command: nextflow run ./tests/modules/gubbins -entry test_gubbins -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gubbins -entry test_gubbins -c ./tests/config/nextflow.config -c ./tests/modules/gubbins/nextflow.config tags: - gubbins files: diff --git a/tests/modules/gunc/downloaddb/main.nf b/tests/modules/gunc/downloaddb/main.nf index c0321279..3e3126f5 100644 --- a/tests/modules/gunc/downloaddb/main.nf +++ b/tests/modules/gunc/downloaddb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' workflow test_gunc_downloaddb { diff --git a/tests/modules/gunc/downloaddb/nextflow.config b/tests/modules/gunc/downloaddb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunc/downloaddb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunc/downloaddb/test.yml b/tests/modules/gunc/downloaddb/test.yml index d1aafae7..4e1c23f8 100644 --- a/tests/modules/gunc/downloaddb/test.yml +++ b/tests/modules/gunc/downloaddb/test.yml @@ -1,5 +1,5 @@ - name: gunc downloaddb - command: nextflow run ./tests/modules/gunc/downloaddb -entry test_gunc_downloaddb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunc/downloaddb -entry test_gunc_downloaddb -c ./tests/config/nextflow.config -c ./tests/modules/gunc/downloaddb/nextflow.config tags: - gunc - gunc/downloaddb diff --git a/tests/modules/gunc/run/main.nf b/tests/modules/gunc/run/main.nf index a1a191dc..28ecd35f 100644 --- a/tests/modules/gunc/run/main.nf +++ b/tests/modules/gunc/run/main.nf @@ -2,16 +2,16 @@ nextflow.enable.dsl = 2 -include { GUNC_RUN } from '../../../../modules/gunc/run/main.nf' addParams( options: [:] ) -include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' addParams( options: [:] ) - +include { GUNC_RUN } from '../../../../modules/gunc/run/main.nf' +include { GUNC_DOWNLOADDB } from '../../../../modules/gunc/downloaddb/main.nf' workflow test_gunc_run { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) ] - - GUNC_DOWNLOADDB('progenomes') + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) + ] + GUNC_DOWNLOADDB ( 'progenomes' ) GUNC_RUN ( input, GUNC_DOWNLOADDB.out.db ) } diff --git a/tests/modules/gunc/run/nextflow.config b/tests/modules/gunc/run/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunc/run/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunc/run/test.yml b/tests/modules/gunc/run/test.yml index d527f37e..5bcef868 100644 --- a/tests/modules/gunc/run/test.yml +++ b/tests/modules/gunc/run/test.yml @@ -1,5 +1,5 @@ - name: gunc run - command: nextflow run ./tests/modules/gunc/run -entry test_gunc_run -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunc/run -entry test_gunc_run -c ./tests/config/nextflow.config -c ./tests/modules/gunc/run/nextflow.config tags: - gunc - gunc/run diff --git a/tests/modules/gunzip/main.nf b/tests/modules/gunzip/main.nf index 5a24e742..3d41a4a2 100644 --- a/tests/modules/gunzip/main.nf +++ b/tests/modules/gunzip/main.nf @@ -2,10 +2,12 @@ nextflow.enable.dsl = 2 -include { GUNZIP } from '../../../modules/gunzip/main.nf' addParams( options: [:] ) +include { GUNZIP } from '../../../modules/gunzip/main.nf' workflow test_gunzip { - input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + input = [ [], + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] GUNZIP ( input ) } diff --git a/tests/modules/gunzip/nextflow.config b/tests/modules/gunzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gunzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gunzip/test.yml b/tests/modules/gunzip/test.yml index 70012b21..70e95d6b 100644 --- a/tests/modules/gunzip/test.yml +++ b/tests/modules/gunzip/test.yml @@ -1,5 +1,5 @@ - name: gunzip - command: nextflow run ./tests/modules/gunzip -entry test_gunzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/gunzip -entry test_gunzip -c ./tests/config/nextflow.config -c ./tests/modules/gunzip/nextflow.config tags: - gunzip files: diff --git a/tests/modules/hicap/main.nf b/tests/modules/hicap/main.nf index 77c309a5..82c515de 100644 --- a/tests/modules/hicap/main.nf +++ b/tests/modules/hicap/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { HICAP } from '../../../modules/hicap/main.nf' addParams( options: [:] ) +include { HICAP } from '../../../modules/hicap/main.nf' workflow test_hicap { - - input = [ [ id:'test', single_end:false ], // meta map - file("https://github.com/bactopia/bactopia-tests/raw/main/data/species-specific/haemophilus-influenzae/GCF_900478275.fna.gz", checkIfExists: true) ] - + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_fna_gz'], checkIfExists: true) + ] database_dir = [] model_fp = [] diff --git a/tests/modules/hicap/nextflow.config b/tests/modules/hicap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hicap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hicap/test.yml b/tests/modules/hicap/test.yml index 8c8420fd..0cce28c7 100644 --- a/tests/modules/hicap/test.yml +++ b/tests/modules/hicap/test.yml @@ -1,10 +1,10 @@ - name: hicap test_hicap - command: nextflow run tests/modules/hicap -entry test_hicap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hicap -entry test_hicap -c ./tests/config/nextflow.config -c ./tests/modules/hicap/nextflow.config tags: - hicap files: - - path: output/hicap/GCF_900478275.gbk + - path: output/hicap/genome.gbk md5sum: 562d026956903354ac80721f501335d4 - - path: output/hicap/GCF_900478275.svg + - path: output/hicap/genome.svg md5sum: 4fb94871dd0fdd8b4496049668176631 - - path: output/hicap/GCF_900478275.tsv + - path: output/hicap/genome.tsv diff --git a/tests/modules/hifiasm/main.nf b/tests/modules/hifiasm/main.nf index 30614389..f0e2a0f4 100644 --- a/tests/modules/hifiasm/main.nf +++ b/tests/modules/hifiasm/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HIFIASM } from '../../../modules/hifiasm/main.nf' addParams( options: [args:'-f0'] ) +include { HIFIASM } from '../../../modules/hifiasm/main.nf' /* * Test with long reads only diff --git a/tests/modules/hifiasm/nextflow.config b/tests/modules/hifiasm/nextflow.config new file mode 100644 index 00000000..0994c901 --- /dev/null +++ b/tests/modules/hifiasm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HIFIASM { + ext.args = '-f0' + } + +} diff --git a/tests/modules/hifiasm/test.yml b/tests/modules/hifiasm/test.yml index 47d9e38f..f7e3e6ae 100644 --- a/tests/modules/hifiasm/test.yml +++ b/tests/modules/hifiasm/test.yml @@ -1,5 +1,5 @@ - name: hifiasm test_hifiasm_hifi_only - command: nextflow run tests/modules/hifiasm -entry test_hifiasm_hifi_only -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hifiasm -entry test_hifiasm_hifi_only -c ./tests/config/nextflow.config -c ./tests/modules/hifiasm/nextflow.config tags: - hifiasm files: @@ -16,7 +16,7 @@ - path: output/hifiasm/test.asm.ovlp.source.bin - name: hifiasm test_hifiasm_with_parental_reads - command: nextflow run tests/modules/hifiasm -entry test_hifiasm_with_parental_reads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hifiasm -entry test_hifiasm_with_parental_reads -c ./tests/config/nextflow.config -c ./tests/modules/hifiasm/nextflow.config tags: - hifiasm files: diff --git a/tests/modules/hisat2/align/main.nf b/tests/modules/hisat2/align/main.nf index 7bbe3a4b..17b47c93 100644 --- a/tests/modules/hisat2/align/main.nf +++ b/tests/modules/hisat2/align/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) -include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' addParams( options: [:] ) -include { HISAT2_ALIGN } from '../../../../modules/hisat2/align/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' +include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' +include { HISAT2_ALIGN } from '../../../../modules/hisat2/align/main.nf' workflow test_hisat2_align_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -19,10 +22,13 @@ workflow test_hisat2_align_single_end { } workflow test_hisat2_align_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/hisat2/align/nextflow.config b/tests/modules/hisat2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/align/test.yml b/tests/modules/hisat2/align/test.yml index 1c6c8ac2..54e263bc 100644 --- a/tests/modules/hisat2/align/test.yml +++ b/tests/modules/hisat2/align/test.yml @@ -1,5 +1,5 @@ - name: hisat2 align test_hisat2_align_single_end - command: nextflow run tests/modules/hisat2/align -entry test_hisat2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/align -entry test_hisat2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/align/nextflow.config tags: - hisat2 - hisat2/align @@ -9,25 +9,25 @@ - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/hisat2/test.bam - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 - name: hisat2 align test_hisat2_align_paired_end - command: nextflow run tests/modules/hisat2/align -entry test_hisat2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/align -entry test_hisat2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/align/nextflow.config tags: - hisat2 - hisat2/align @@ -37,19 +37,19 @@ - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/hisat2/test.bam - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 diff --git a/tests/modules/hisat2/build_test/main.nf b/tests/modules/hisat2/build_test/main.nf index f40f47cc..a0c14dc8 100644 --- a/tests/modules/hisat2/build_test/main.nf +++ b/tests/modules/hisat2/build_test/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) -include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' +include { HISAT2_BUILD } from '../../../../modules/hisat2/build/main.nf' workflow test_hisat2_build { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/hisat2/build_test/nextflow.config b/tests/modules/hisat2/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/build_test/test.yml b/tests/modules/hisat2/build_test/test.yml index a8bb2390..da5a450c 100644 --- a/tests/modules/hisat2/build_test/test.yml +++ b/tests/modules/hisat2/build_test/test.yml @@ -1,24 +1,24 @@ - name: hisat2 build test_hisat2_build - command: nextflow run tests/modules/hisat2/build_test -entry test_hisat2_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/build_test -entry test_hisat2_build -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/build/nextflow.config tags: - hisat2 - hisat2/build files: - path: output/hisat2/genome.splice_sites.txt md5sum: d41d8cd98f00b204e9800998ecf8427e - - path: output/index/hisat2/genome.5.ht2 + - path: output/hisat2/hisat2/genome.5.ht2 md5sum: 91198831aaba993acac1734138c5f173 - - path: output/index/hisat2/genome.7.ht2 + - path: output/hisat2/hisat2/genome.7.ht2 md5sum: 9013eccd91ad614d7893c739275a394f - - path: output/index/hisat2/genome.1.ht2 + - path: output/hisat2/hisat2/genome.1.ht2 md5sum: 057cfa8a22b97ee9cff4c8d342498803 - - path: output/index/hisat2/genome.2.ht2 + - path: output/hisat2/hisat2/genome.2.ht2 md5sum: 47b153cd1319abc88dda532462651fcf - - path: output/index/hisat2/genome.6.ht2 + - path: output/hisat2/hisat2/genome.6.ht2 md5sum: 265e1284ce85686516fae5d35540994a - - path: output/index/hisat2/genome.3.ht2 + - path: output/hisat2/hisat2/genome.3.ht2 md5sum: 4ed93abba181d8dfab2e303e33114777 - - path: output/index/hisat2/genome.8.ht2 + - path: output/hisat2/hisat2/genome.8.ht2 md5sum: 33cdeccccebe80329f1fdbee7f5874cb - - path: output/index/hisat2/genome.4.ht2 + - path: output/hisat2/hisat2/genome.4.ht2 md5sum: c25be5f8b0378abf7a58c8a880b87626 diff --git a/tests/modules/hisat2/extractsplicesites/main.nf b/tests/modules/hisat2/extractsplicesites/main.nf index 5c7e17b9..e947717e 100644 --- a/tests/modules/hisat2/extractsplicesites/main.nf +++ b/tests/modules/hisat2/extractsplicesites/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' addParams( options: [:] ) +include { HISAT2_EXTRACTSPLICESITES } from '../../../../modules/hisat2/extractsplicesites/main.nf' workflow test_hisat2_extractsplicesites { gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/hisat2/extractsplicesites/nextflow.config b/tests/modules/hisat2/extractsplicesites/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hisat2/extractsplicesites/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hisat2/extractsplicesites/test.yml b/tests/modules/hisat2/extractsplicesites/test.yml index a3e29346..a528199c 100644 --- a/tests/modules/hisat2/extractsplicesites/test.yml +++ b/tests/modules/hisat2/extractsplicesites/test.yml @@ -1,5 +1,5 @@ - name: hisat2 extractsplicesites test_hisat2_extractsplicesites - command: nextflow run tests/modules/hisat2/extractsplicesites -entry test_hisat2_extractsplicesites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hisat2/extractsplicesites -entry test_hisat2_extractsplicesites -c ./tests/config/nextflow.config -c ./tests/modules/hisat2/extractsplicesites/nextflow.config tags: - hisat2 - hisat2/extractsplicesites diff --git a/tests/modules/hmmcopy/gccounter/main.nf b/tests/modules/hmmcopy/gccounter/main.nf index 30846ca9..05728bf5 100644 --- a/tests/modules/hmmcopy/gccounter/main.nf +++ b/tests/modules/hmmcopy/gccounter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMCOPY_GCCOUNTER } from '../../../../modules/hmmcopy/gccounter/main.nf' addParams( options: [:] ) +include { HMMCOPY_GCCOUNTER } from '../../../../modules/hmmcopy/gccounter/main.nf' workflow test_hmmcopy_gccounter { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/hmmcopy/gccounter/nextflow.config b/tests/modules/hmmcopy/gccounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmcopy/gccounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmcopy/gccounter/test.yml b/tests/modules/hmmcopy/gccounter/test.yml index edcd6b92..1cd20273 100644 --- a/tests/modules/hmmcopy/gccounter/test.yml +++ b/tests/modules/hmmcopy/gccounter/test.yml @@ -1,5 +1,5 @@ - name: hmmcopy gccounter test_hmmcopy_gccounter - command: nextflow run tests/modules/hmmcopy/gccounter -entry test_hmmcopy_gccounter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmcopy/gccounter -entry test_hmmcopy_gccounter -c ./tests/config/nextflow.config -c ./tests/modules/hmmcopy/gccounter/nextflow.config tags: - hmmcopy - hmmcopy/gccounter diff --git a/tests/modules/hmmcopy/readcounter/main.nf b/tests/modules/hmmcopy/readcounter/main.nf index 9025f98e..21737aab 100644 --- a/tests/modules/hmmcopy/readcounter/main.nf +++ b/tests/modules/hmmcopy/readcounter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMCOPY_READCOUNTER } from '../../../../modules/hmmcopy/readcounter/main.nf' addParams( options: [:] ) +include { HMMCOPY_READCOUNTER } from '../../../../modules/hmmcopy/readcounter/main.nf' workflow test_hmmcopy_readcounter { diff --git a/tests/modules/hmmcopy/readcounter/nextflow.config b/tests/modules/hmmcopy/readcounter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmcopy/readcounter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmcopy/readcounter/test.yml b/tests/modules/hmmcopy/readcounter/test.yml index 6c00ee08..a7e84f35 100644 --- a/tests/modules/hmmcopy/readcounter/test.yml +++ b/tests/modules/hmmcopy/readcounter/test.yml @@ -1,5 +1,5 @@ - name: hmmcopy readcounter test_hmmcopy_readcounter - command: nextflow run tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmcopy/readcounter -entry test_hmmcopy_readcounter -c ./tests/config/nextflow.config -c ./tests/modules/hmmcopy/readcounter/nextflow.config tags: - hmmcopy - hmmcopy/readcounter diff --git a/tests/modules/hmmer/hmmalign/main.nf b/tests/modules/hmmer/hmmalign/main.nf index 55194dc6..3bf6d452 100644 --- a/tests/modules/hmmer/hmmalign/main.nf +++ b/tests/modules/hmmer/hmmalign/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HMMER_HMMALIGN } from '../../../../modules/hmmer/hmmalign/main.nf' addParams( options: [:] ) +include { HMMER_HMMALIGN } from '../../../../modules/hmmer/hmmalign/main.nf' workflow test_hmmer_hmmalign { diff --git a/tests/modules/hmmer/hmmalign/nextflow.config b/tests/modules/hmmer/hmmalign/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hmmer/hmmalign/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hmmer/hmmalign/test.yml b/tests/modules/hmmer/hmmalign/test.yml index 4afb34ca..2e5ccfaf 100644 --- a/tests/modules/hmmer/hmmalign/test.yml +++ b/tests/modules/hmmer/hmmalign/test.yml @@ -1,5 +1,5 @@ - name: hmmer hmmalign test_hmmer_hmmalign - command: nextflow run tests/modules/hmmer/hmmalign -entry test_hmmer_hmmalign -c tests/config/nextflow.config + command: nextflow run ./tests/modules/hmmer/hmmalign -entry test_hmmer_hmmalign -c ./tests/config/nextflow.config -c ./tests/modules/hmmer/hmmalign/nextflow.config tags: - hmmer - hmmer/hmmalign diff --git a/tests/modules/homer/annotatepeaks/main.nf b/tests/modules/homer/annotatepeaks/main.nf index b146c857..ab8f6f8f 100644 --- a/tests/modules/homer/annotatepeaks/main.nf +++ b/tests/modules/homer/annotatepeaks/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HOMER_ANNOTATEPEAKS } from '../../../../modules/homer/annotatepeaks/main.nf' addParams( options: [:] ) +include { HOMER_ANNOTATEPEAKS } from '../../../../modules/homer/annotatepeaks/main.nf' workflow test_homer_annotatepeaks { input = [ [ id:'test'], diff --git a/tests/modules/homer/annotatepeaks/nextflow.config b/tests/modules/homer/annotatepeaks/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/homer/annotatepeaks/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/homer/annotatepeaks/test.yml b/tests/modules/homer/annotatepeaks/test.yml index fed0f82e..52fd99a3 100644 --- a/tests/modules/homer/annotatepeaks/test.yml +++ b/tests/modules/homer/annotatepeaks/test.yml @@ -1,5 +1,5 @@ - name: homer annotatepeaks test_homer_annotatepeaks - command: nextflow run tests/modules/homer/annotatepeaks -entry test_homer_annotatepeaks -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/annotatepeaks -entry test_homer_annotatepeaks -c ./tests/config/nextflow.config -c ./tests/modules/homer/annotatepeaks/nextflow.config tags: - homer - homer/annotatepeaks diff --git a/tests/modules/homer/findpeaks/main.nf b/tests/modules/homer/findpeaks/main.nf index 06d44bdf..0e7e8ed6 100644 --- a/tests/modules/homer/findpeaks/main.nf +++ b/tests/modules/homer/findpeaks/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) -include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' addParams( options: [args: '-style factor'] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' workflow test_homer_findpeaks { input = [[id:'test'], diff --git a/tests/modules/homer/findpeaks/nextflow.config b/tests/modules/homer/findpeaks/nextflow.config new file mode 100644 index 00000000..9a921a3c --- /dev/null +++ b/tests/modules/homer/findpeaks/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + + withName: HOMER_FINDPEAKS { + ext.args = '-style factor' + } + +} diff --git a/tests/modules/homer/findpeaks/test.yml b/tests/modules/homer/findpeaks/test.yml index b0b1a0df..75e94529 100644 --- a/tests/modules/homer/findpeaks/test.yml +++ b/tests/modules/homer/findpeaks/test.yml @@ -1,5 +1,5 @@ - name: homer findpeaks - command: nextflow run ./tests/modules/homer/findpeaks -entry test_homer_findpeaks -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/findpeaks -entry test_homer_findpeaks -c ./tests/config/nextflow.config -c ./tests/modules/homer/findpeaks/nextflow.config tags: - homer - homer/findpeaks diff --git a/tests/modules/homer/maketagdirectory/main.nf b/tests/modules/homer/maketagdirectory/main.nf index 897aac1f..766aff0d 100644 --- a/tests/modules/homer/maketagdirectory/main.nf +++ b/tests/modules/homer/maketagdirectory/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' workflow test_homer_maketagdirectory { input = [[id:'test'], diff --git a/tests/modules/homer/maketagdirectory/nextflow.config b/tests/modules/homer/maketagdirectory/nextflow.config new file mode 100644 index 00000000..81587d69 --- /dev/null +++ b/tests/modules/homer/maketagdirectory/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + +} diff --git a/tests/modules/homer/maketagdirectory/test.yml b/tests/modules/homer/maketagdirectory/test.yml index 80112c0b..746c6ef6 100644 --- a/tests/modules/homer/maketagdirectory/test.yml +++ b/tests/modules/homer/maketagdirectory/test.yml @@ -1,5 +1,5 @@ - name: homer maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory @@ -16,7 +16,7 @@ md5sum: e5aa2b9843ca9c04ace297280aed6af4 - name: homer meta maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory diff --git a/tests/modules/homer/makeucscfile/main.nf b/tests/modules/homer/makeucscfile/main.nf index 5ed75959..986c9c14 100644 --- a/tests/modules/homer/makeucscfile/main.nf +++ b/tests/modules/homer/makeucscfile/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' addParams( options: [args: '-format bed'] ) -include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main.nf' addParams( options: [:] ) +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main.nf' workflow test_homer_makeucscfile { input = [[id:'test'], diff --git a/tests/modules/homer/makeucscfile/nextflow.config b/tests/modules/homer/makeucscfile/nextflow.config new file mode 100644 index 00000000..81587d69 --- /dev/null +++ b/tests/modules/homer/makeucscfile/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + +} diff --git a/tests/modules/homer/makeucscfile/test.yml b/tests/modules/homer/makeucscfile/test.yml index 4d337f41..cf3d1b4d 100644 --- a/tests/modules/homer/makeucscfile/test.yml +++ b/tests/modules/homer/makeucscfile/test.yml @@ -1,5 +1,5 @@ - name: homer makeucscfile - command: nextflow run ./tests/modules/homer/makeucscfile -entry test_homer_makeucscfile -c tests/config/nextflow.config + command: nextflow run ./tests/modules/homer/makeucscfile -entry test_homer_makeucscfile -c ./tests/config/nextflow.config -c ./tests/modules/homer/makeucscfile/nextflow.config tags: - homer - homer/makeucscfile diff --git a/tests/modules/idr/main.nf b/tests/modules/idr/main.nf index aa141a57..ed3bf289 100644 --- a/tests/modules/idr/main.nf +++ b/tests/modules/idr/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IDR } from '../../../modules/idr/main.nf' addParams( options: [:] ) +include { IDR } from '../../../modules/idr/main.nf' workflow test_idr_narrowpeak { diff --git a/tests/modules/idr/nextflow.config b/tests/modules/idr/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/idr/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/idr/test.yml b/tests/modules/idr/test.yml index 35ee4bc9..9d5ef2a7 100644 --- a/tests/modules/idr/test.yml +++ b/tests/modules/idr/test.yml @@ -1,5 +1,5 @@ - name: idr test_idr_narrowpeak - command: nextflow run tests/modules/idr -entry test_idr_narrowpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_narrowpeak -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: @@ -11,7 +11,7 @@ md5sum: 6443507ac66b9d3b64bc56b78328083e - name: idr test_idr_broadpeak - command: nextflow run tests/modules/idr -entry test_idr_broadpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_broadpeak -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: @@ -23,7 +23,7 @@ md5sum: e6917133112b5cec135c182ffac19237 - name: idr test_idr_noprefix - command: nextflow run tests/modules/idr -entry test_idr_noprefix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/idr -entry test_idr_noprefix -c ./tests/config/nextflow.config -c ./tests/modules/idr/nextflow.config tags: - idr files: diff --git a/tests/modules/imputeme/vcftoprs/main.nf b/tests/modules/imputeme/vcftoprs/main.nf new file mode 100644 index 00000000..dccc06e0 --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { IMPUTEME_VCFTOPRS } from '../../../../modules/imputeme/vcftoprs/main.nf' + +workflow test_imputeme_vcftoprs { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true) + ] + + IMPUTEME_VCFTOPRS ( input ) +} diff --git a/tests/modules/imputeme/vcftoprs/nextflow.config b/tests/modules/imputeme/vcftoprs/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/imputeme/vcftoprs/test.yml b/tests/modules/imputeme/vcftoprs/test.yml new file mode 100644 index 00000000..e5152a03 --- /dev/null +++ b/tests/modules/imputeme/vcftoprs/test.yml @@ -0,0 +1,8 @@ +- name: imputeme vcftoprs test_imputeme_vcftoprs + command: nextflow run ./tests/modules/imputeme/vcftoprs -entry test_imputeme_vcftoprs -c ./tests/config/nextflow.config -c ./tests/modules/imputeme/vcftoprs/nextflow.config + tags: + - imputeme + - imputeme/vcftoprs + files: + - path: output/imputeme/output.json + contains: [ 'type_2_diabetes_32541925":{"GRS":[24.01]' ] diff --git a/tests/modules/iqtree/main.nf b/tests/modules/iqtree/main.nf index 977d7c0a..2d73bd52 100644 --- a/tests/modules/iqtree/main.nf +++ b/tests/modules/iqtree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IQTREE } from '../../../modules/iqtree/main.nf' addParams( options: [:] ) +include { IQTREE } from '../../../modules/iqtree/main.nf' workflow test_iqtree { diff --git a/tests/modules/iqtree/nextflow.config b/tests/modules/iqtree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/iqtree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/iqtree/test.yml b/tests/modules/iqtree/test.yml index e40656a2..06de90d9 100644 --- a/tests/modules/iqtree/test.yml +++ b/tests/modules/iqtree/test.yml @@ -1,5 +1,5 @@ - name: iqtree test workflow - command: nextflow run ./tests/modules/iqtree -entry test_iqtree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/iqtree -entry test_iqtree -c ./tests/config/nextflow.config -c ./tests/modules/iqtree/nextflow.config tags: - iqtree files: diff --git a/tests/modules/ismapper/main.nf b/tests/modules/ismapper/main.nf index b28344dc..abb180f7 100644 --- a/tests/modules/ismapper/main.nf +++ b/tests/modules/ismapper/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISMAPPER } from '../../../modules/ismapper/main.nf' addParams( options: [:] ) +include { ISMAPPER } from '../../../modules/ismapper/main.nf' workflow test_ismapper { diff --git a/tests/modules/ismapper/nextflow.config b/tests/modules/ismapper/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ismapper/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ismapper/test.yml b/tests/modules/ismapper/test.yml index 0574b855..b4f64448 100644 --- a/tests/modules/ismapper/test.yml +++ b/tests/modules/ismapper/test.yml @@ -1,5 +1,5 @@ - name: ismapper test_ismapper - command: nextflow run tests/modules/ismapper -entry test_ismapper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ismapper -entry test_ismapper -c ./tests/config/nextflow.config -c ./tests/modules/ismapper/nextflow.config tags: - ismapper files: diff --git a/tests/modules/isoseq3/cluster/main.nf b/tests/modules/isoseq3/cluster/main.nf index 90a24c11..958b03a6 100644 --- a/tests/modules/isoseq3/cluster/main.nf +++ b/tests/modules/isoseq3/cluster/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISOSEQ3_CLUSTER } from '../../../../modules/isoseq3/cluster/main.nf' addParams( options: [args: '--singletons --use-qvs --verbose'] ) +include { ISOSEQ3_CLUSTER } from '../../../../modules/isoseq3/cluster/main.nf' workflow test_isoseq3_cluster { diff --git a/tests/modules/isoseq3/cluster/nextflow.config b/tests/modules/isoseq3/cluster/nextflow.config new file mode 100644 index 00000000..8bfeaebd --- /dev/null +++ b/tests/modules/isoseq3/cluster/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ISOSEQ3_CLUSTER { + ext.args = '--singletons --use-qvs --verbose' + } + +} diff --git a/tests/modules/isoseq3/cluster/test.yml b/tests/modules/isoseq3/cluster/test.yml index 58b20ae2..b1f12df7 100644 --- a/tests/modules/isoseq3/cluster/test.yml +++ b/tests/modules/isoseq3/cluster/test.yml @@ -1,5 +1,5 @@ - name: isoseq3 cluster test_isoseq3_cluster - command: nextflow run tests/modules/isoseq3/cluster -entry test_isoseq3_cluster -c tests/config/nextflow.config + command: nextflow run ./tests/modules/isoseq3/cluster -entry test_isoseq3_cluster -c ./tests/config/nextflow.config -c ./tests/modules/isoseq3/cluster/nextflow.config tags: - isoseq3 - isoseq3/cluster diff --git a/tests/modules/isoseq3/refine/main.nf b/tests/modules/isoseq3/refine/main.nf index 13736604..45dd1560 100644 --- a/tests/modules/isoseq3/refine/main.nf +++ b/tests/modules/isoseq3/refine/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { ISOSEQ3_REFINE } from '../../../../modules/isoseq3/refine/main' addParams( options: [suffix:'.refine'] ) +include { ISOSEQ3_REFINE } from '../../../../modules/isoseq3/refine/main' workflow test_isoseq3_refine { diff --git a/tests/modules/isoseq3/refine/nextflow.config b/tests/modules/isoseq3/refine/nextflow.config new file mode 100644 index 00000000..6a4dea9f --- /dev/null +++ b/tests/modules/isoseq3/refine/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ISOSEQ3_REFINE { + ext.prefix = { "${meta.id}.refine" } + } + +} diff --git a/tests/modules/isoseq3/refine/test.yml b/tests/modules/isoseq3/refine/test.yml index 2e7782d3..f2c63fda 100644 --- a/tests/modules/isoseq3/refine/test.yml +++ b/tests/modules/isoseq3/refine/test.yml @@ -1,5 +1,5 @@ - name: isoseq3 refine test_isoseq3_refine - command: nextflow run tests/modules/isoseq3/refine -entry test_isoseq3_refine -c tests/config/nextflow.config + command: nextflow run ./tests/modules/isoseq3/refine -entry test_isoseq3_refine -c ./tests/config/nextflow.config -c ./tests/modules/isoseq3/refine/nextflow.config tags: - isoseq3 - isoseq3/refine diff --git a/tests/modules/ivar/consensus/main.nf b/tests/modules/ivar/consensus/main.nf index 5e0457b5..d0807984 100644 --- a/tests/modules/ivar/consensus/main.nf +++ b/tests/modules/ivar/consensus/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 params.save_mpileup = true -include { IVAR_CONSENSUS } from '../../../../modules/ivar/consensus/main.nf' addParams( [ options: [args2: '-aa -A -d 0 -Q 0'] ] ) +include { IVAR_CONSENSUS } from '../../../../modules/ivar/consensus/main.nf' workflow test_ivar_consensus { input = [ [ id:'test'], diff --git a/tests/modules/ivar/consensus/nextflow.config b/tests/modules/ivar/consensus/nextflow.config new file mode 100644 index 00000000..7407619a --- /dev/null +++ b/tests/modules/ivar/consensus/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: IVAR_CONSENSUS { + ext.args2 = '-aa -A -d 0 -Q 0' + } + +} diff --git a/tests/modules/ivar/consensus/test.yml b/tests/modules/ivar/consensus/test.yml index 071fdc98..caaa640f 100644 --- a/tests/modules/ivar/consensus/test.yml +++ b/tests/modules/ivar/consensus/test.yml @@ -1,5 +1,5 @@ - name: ivar consensus - command: nextflow run ./tests/modules/ivar/consensus -entry test_ivar_consensus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/consensus -entry test_ivar_consensus -c ./tests/config/nextflow.config -c ./tests/modules/ivar/consensus/nextflow.config tags: - ivar - ivar/consensus diff --git a/tests/modules/ivar/trim/main.nf b/tests/modules/ivar/trim/main.nf index 05b390b0..15d0e739 100644 --- a/tests/modules/ivar/trim/main.nf +++ b/tests/modules/ivar/trim/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IVAR_TRIM } from '../../../../modules/ivar/trim/main.nf' addParams([:]) +include { IVAR_TRIM } from '../../../../modules/ivar/trim/main.nf' workflow test_ivar_trim { input = [ [ id:'test'], diff --git a/tests/modules/ivar/trim/nextflow.config b/tests/modules/ivar/trim/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ivar/trim/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ivar/trim/test.yml b/tests/modules/ivar/trim/test.yml index f2f46676..0be18ba8 100644 --- a/tests/modules/ivar/trim/test.yml +++ b/tests/modules/ivar/trim/test.yml @@ -1,5 +1,5 @@ - name: ivar trim - command: nextflow run ./tests/modules/ivar/trim -entry test_ivar_trim -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/trim -entry test_ivar_trim -c ./tests/config/nextflow.config -c ./tests/modules/ivar/trim/nextflow.config tags: - ivar - ivar/trim diff --git a/tests/modules/ivar/variants/main.nf b/tests/modules/ivar/variants/main.nf index 5358e785..f603b5e5 100644 --- a/tests/modules/ivar/variants/main.nf +++ b/tests/modules/ivar/variants/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { IVAR_VARIANTS } from '../../../../modules/ivar/variants/main.nf' addParams([:]) +include { IVAR_VARIANTS } from '../../../../modules/ivar/variants/main.nf' workflow test_ivar_variants_no_gff_no_mpileup { params.gff = false diff --git a/tests/modules/ivar/variants/nextflow.config b/tests/modules/ivar/variants/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ivar/variants/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ivar/variants/test.yml b/tests/modules/ivar/variants/test.yml index a8be12a8..00e6e2c0 100644 --- a/tests/modules/ivar/variants/test.yml +++ b/tests/modules/ivar/variants/test.yml @@ -1,5 +1,5 @@ - name: ivar variants no gff no mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_no_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_no_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants @@ -8,7 +8,7 @@ md5sum: 728f1430f2402861396d9953465ac706 - name: ivar variants no gff with mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_with_mpileup -c tests/config/nextflow.config --save_mpileup + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_no_gff_with_mpileup -c ./tests/config/nextflow.config --save_mpileup -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants @@ -19,7 +19,7 @@ md5sum: 56c4cd5a4ecb7d6364878818f46ae256 - name: ivar variants with gff with mpileup - command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_with_gff_with_mpileup -c tests/config/nextflow.config --gff tests/data/gff/sarscov2/MN908947.3.gff3 --save_mpileup + command: nextflow run ./tests/modules/ivar/variants -entry test_ivar_variants_with_gff_with_mpileup -c ./tests/config/nextflow.config --gff tests/data/gff/sarscov2/MN908947.3.gff3 --save_mpileup -c ./tests/modules/ivar/variants/nextflow.config tags: - ivar - ivar/variants diff --git a/tests/modules/jupyternotebook/main.nf b/tests/modules/jupyternotebook/main.nf index c1da7e11..1db9d812 100644 --- a/tests/modules/jupyternotebook/main.nf +++ b/tests/modules/jupyternotebook/main.nf @@ -2,15 +2,9 @@ nextflow.enable.dsl = 2 -include { JUPYTERNOTEBOOK } from '../../../modules/jupyternotebook/main.nf' addParams( - parametrize: false, options: [:] -) -include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE } from '../../../modules/jupyternotebook/main.nf' addParams( - options: [:] -) -include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB } from '../../../modules/jupyternotebook/main.nf' addParams( - options: [:] -) +include { JUPYTERNOTEBOOK } from '../../../modules/jupyternotebook/main.nf' +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE } from '../../../modules/jupyternotebook/main.nf' +include { JUPYTERNOTEBOOK as JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB } from '../../../modules/jupyternotebook/main.nf' workflow test_jupyternotebook { diff --git a/tests/modules/jupyternotebook/nextflow.config b/tests/modules/jupyternotebook/nextflow.config new file mode 100644 index 00000000..6066b2b8 --- /dev/null +++ b/tests/modules/jupyternotebook/nextflow.config @@ -0,0 +1,19 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: JUPYTERNOTEBOOK { + ext = ['parametrize': false] + } + + // this should be the default options, but need to work around + // https://github.com/nextflow-io/nextflow/issues/2422 + withName: JUPYTERNOTEBOOK_PARAMETRIZE { + ext = ['parametrize': true] + } + + withName: JUPYTERNOTEBOOK_PARAMETRIZE_IPYNB { + ext = ['parametrize': true] + } + +} diff --git a/tests/modules/jupyternotebook/test.yml b/tests/modules/jupyternotebook/test.yml index dd4f1175..31fdfdbb 100644 --- a/tests/modules/jupyternotebook/test.yml +++ b/tests/modules/jupyternotebook/test.yml @@ -1,5 +1,5 @@ - name: jupyternotebook test_jupyternotebook - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: @@ -8,7 +8,7 @@ - "n_iter = 10" - name: jupyternotebook test_jupyternotebook_parametrize - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: @@ -19,7 +19,7 @@ - "n_iter = 12" - name: jupyternotebook test_jupyternotebook_parametrize_ipynb - command: nextflow run tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize_ipynb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/jupyternotebook -entry test_jupyternotebook_parametrize_ipynb -c ./tests/config/nextflow.config -c ./tests/modules/jupyternotebook/nextflow.config tags: - jupyternotebook files: diff --git a/tests/modules/kallisto/index/main.nf b/tests/modules/kallisto/index/main.nf index 7c6078f8..8ecd6d52 100644 --- a/tests/modules/kallisto/index/main.nf +++ b/tests/modules/kallisto/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTO_INDEX } from '../../../../modules/kallisto/index/main.nf' addParams( options: [:] ) +include { KALLISTO_INDEX } from '../../../../modules/kallisto/index/main.nf' workflow test_kallisto_index { diff --git a/tests/modules/kallisto/index/nextflow.config b/tests/modules/kallisto/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kallisto/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kallisto/index/test.yml b/tests/modules/kallisto/index/test.yml index b9dd23ad..90a06325 100644 --- a/tests/modules/kallisto/index/test.yml +++ b/tests/modules/kallisto/index/test.yml @@ -1,5 +1,5 @@ - name: kallisto index test_kallisto_index - command: nextflow run tests/modules/kallisto/index -entry test_kallisto_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallisto/index -entry test_kallisto_index -c ./tests/config/nextflow.config -c ./tests/modules/kallisto/index/nextflow.config tags: - kallisto - kallisto/index diff --git a/tests/modules/kallistobustools/count/main.nf b/tests/modules/kallistobustools/count/main.nf index 9172ddfc..6e6be03d 100644 --- a/tests/modules/kallistobustools/count/main.nf +++ b/tests/modules/kallistobustools/count/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' addParams( options: [args:"--cellranger -m 1"] ) +include { KALLISTOBUSTOOLS_COUNT } from '../../../../modules/kallistobustools/count/main.nf' workflow test_kallistobustools_count { diff --git a/tests/modules/kallistobustools/count/nextflow.config b/tests/modules/kallistobustools/count/nextflow.config new file mode 100644 index 00000000..eb4e20bd --- /dev/null +++ b/tests/modules/kallistobustools/count/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: KALLISTOBUSTOOLS_COUNT { + ext.args = '--cellranger -m 1' + } + +} diff --git a/tests/modules/kallistobustools/count/test.yml b/tests/modules/kallistobustools/count/test.yml index 766d5b57..664e9fa6 100644 --- a/tests/modules/kallistobustools/count/test.yml +++ b/tests/modules/kallistobustools/count/test.yml @@ -1,5 +1,5 @@ - name: kallistobustools count test_kallistobustools_count - command: nextflow run tests/modules/kallistobustools/count -entry test_kallistobustools_count -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/count -entry test_kallistobustools_count -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/count/nextflow.config tags: - kallistobustools/count - kallistobustools diff --git a/tests/modules/kallistobustools/ref/main.nf b/tests/modules/kallistobustools/ref/main.nf index 31b36d0d..09ea68ea 100644 --- a/tests/modules/kallistobustools/ref/main.nf +++ b/tests/modules/kallistobustools/ref/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KALLISTOBUSTOOLS_REF } from '../../../../modules/kallistobustools/ref/main.nf' addParams( options: [:] ) +include { KALLISTOBUSTOOLS_REF } from '../../../../modules/kallistobustools/ref/main.nf' workflow test_kallistobustools_ref_standard { diff --git a/tests/modules/kallistobustools/ref/nextflow.config b/tests/modules/kallistobustools/ref/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kallistobustools/ref/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kallistobustools/ref/test.yml b/tests/modules/kallistobustools/ref/test.yml index 54954085..1e8fd6c4 100644 --- a/tests/modules/kallistobustools/ref/test.yml +++ b/tests/modules/kallistobustools/ref/test.yml @@ -1,5 +1,5 @@ - name: kallistobustools ref test_kallistobustools_ref_standard - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_standard -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_standard -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools @@ -9,7 +9,7 @@ - path: output/kallistobustools/t2g.txt - name: kallistobustools ref test_kallistobustools_ref_lamanno - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_lamanno -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_lamanno -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools @@ -22,7 +22,7 @@ - path: output/kallistobustools/t2g.txt - name: kallistobustools ref test_kallistobustools_ref_nucleus - command: nextflow run tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_nucleus -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kallistobustools/ref -entry test_kallistobustools_ref_nucleus -c ./tests/config/nextflow.config -c ./tests/modules/kallistobustools/ref/nextflow.config tags: - kallistobustools/ref - kallistobustools diff --git a/tests/modules/khmer/normalizebymedian/main.nf b/tests/modules/khmer/normalizebymedian/main.nf index 3a3b348c..c439c40f 100644 --- a/tests/modules/khmer/normalizebymedian/main.nf +++ b/tests/modules/khmer/normalizebymedian/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [:] ) -include { KHMER_NORMALIZEBYMEDIAN } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [:] ) -include { KHMER_NORMALIZEBYMEDIAN as KHMER_NORMALIZEBYMEDIAN_ARGS } from '../../../../modules/khmer/normalizebymedian/main.nf' addParams( options: [args: '-C 20 -k 32'] ) +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' +include { KHMER_NORMALIZEBYMEDIAN } from '../../../../modules/khmer/normalizebymedian/main.nf' +include { KHMER_NORMALIZEBYMEDIAN as KHMER_NORMALIZEBYMEDIAN_ARGS } from '../../../../modules/khmer/normalizebymedian/main.nf' workflow test_khmer_normalizebymedian_only_pe { diff --git a/tests/modules/khmer/normalizebymedian/nextflow.config b/tests/modules/khmer/normalizebymedian/nextflow.config new file mode 100644 index 00000000..279a972a --- /dev/null +++ b/tests/modules/khmer/normalizebymedian/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: KHMER_NORMALIZEBYMEDIAN_ARGS { + ext.args = '-C 20 -k 32' + } + +} diff --git a/tests/modules/khmer/normalizebymedian/test.yml b/tests/modules/khmer/normalizebymedian/test.yml index a914a8ef..0e61588f 100644 --- a/tests/modules/khmer/normalizebymedian/test.yml +++ b/tests/modules/khmer/normalizebymedian/test.yml @@ -1,6 +1,6 @@ # nf-core modules create-test-yml khmer/normalizebymedian - name: khmer normalizebymedian only pe reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_pe -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -10,7 +10,7 @@ #md5sum: 75e05f2e80cf4bd0b534d4b73f7c059c - name: khmer normalizebymedian only se reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_se -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_only_se -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -18,7 +18,7 @@ - path: output/khmer/only_se.fastq.gz - name: khmer normalizebymedian mixed reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_mixed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_mixed -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -26,7 +26,7 @@ - path: output/khmer/mixed.fastq.gz - name: khmer normalizebymedian multiple pe reads - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_multiple_pe -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_multiple_pe -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian @@ -34,7 +34,7 @@ - path: output/khmer/multiple_pe.fastq.gz - name: khmer normalizebymedian args - command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_args -c tests/config/nextflow.config + command: nextflow run ./tests/modules/khmer/normalizebymedian -entry test_khmer_normalizebymedian_args -c ./tests/config/nextflow.config -c ./tests/modules/khmer/normalizebymedian/nextflow.config tags: - khmer - khmer/normalizebymedian diff --git a/tests/modules/kleborate/main.nf b/tests/modules/kleborate/main.nf index f846e642..bce31225 100644 --- a/tests/modules/kleborate/main.nf +++ b/tests/modules/kleborate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { KLEBORATE } from '../../../modules/kleborate/main.nf' addParams( options: [:] ) +include { KLEBORATE } from '../../../modules/kleborate/main.nf' workflow test_kleborate { diff --git a/tests/modules/kleborate/nextflow.config b/tests/modules/kleborate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kleborate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kleborate/test.yml b/tests/modules/kleborate/test.yml index 1bee4708..c7b25778 100644 --- a/tests/modules/kleborate/test.yml +++ b/tests/modules/kleborate/test.yml @@ -1,5 +1,5 @@ - name: kleborate - command: nextflow run ./tests/modules/kleborate -entry test_kleborate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kleborate -entry test_kleborate -c ./tests/config/nextflow.config -c ./tests/modules/kleborate/nextflow.config tags: - kleborate files: diff --git a/tests/modules/kraken2/kraken2/main.nf b/tests/modules/kraken2/kraken2/main.nf index e5638ec5..12399e9e 100644 --- a/tests/modules/kraken2/kraken2/main.nf +++ b/tests/modules/kraken2/kraken2/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { KRAKEN2_KRAKEN2 } from '../../../../modules/kraken2/kraken2/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { KRAKEN2_KRAKEN2 } from '../../../../modules/kraken2/kraken2/main.nf' workflow test_kraken2_kraken2_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/kraken2/kraken2/nextflow.config b/tests/modules/kraken2/kraken2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/kraken2/kraken2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/kraken2/kraken2/test.yml b/tests/modules/kraken2/kraken2/test.yml index 688fb34c..1ec413bf 100644 --- a/tests/modules/kraken2/kraken2/test.yml +++ b/tests/modules/kraken2/kraken2/test.yml @@ -1,5 +1,5 @@ - name: kraken2 kraken2 single-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config tags: - kraken2 - kraken2/kraken2 @@ -12,7 +12,7 @@ md5sum: 4227755fe40478b8d7dc8634b489761e - name: kraken2 kraken2 paired-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config tags: - kraken2 - kraken2/kraken2 diff --git a/tests/modules/krona/kronadb/main.nf b/tests/modules/krona/kronadb/main.nf new file mode 100644 index 00000000..ed955854 --- /dev/null +++ b/tests/modules/krona/kronadb/main.nf @@ -0,0 +1,9 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONA_KRONADB } from '../../../../modules/krona/kronadb/main.nf' + +workflow test_krona_kronadb { + KRONA_KRONADB ( ) +} diff --git a/tests/modules/krona/kronadb/nextflow.config b/tests/modules/krona/kronadb/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/krona/kronadb/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/krona/kronadb/test.yml b/tests/modules/krona/kronadb/test.yml new file mode 100644 index 00000000..1d61640f --- /dev/null +++ b/tests/modules/krona/kronadb/test.yml @@ -0,0 +1,7 @@ +- name: krona kronadb test_krona_kronadb + command: nextflow run ./tests/modules/krona/kronadb -entry test_krona_kronadb -c ./tests/config/nextflow.config -c ./tests/modules/krona/kronadb/nextflow.config + tags: + - krona + - krona/kronadb + files: + - path: output/krona/taxonomy/taxonomy.tab diff --git a/tests/modules/krona/ktimporttaxonomy/main.nf b/tests/modules/krona/ktimporttaxonomy/main.nf new file mode 100644 index 00000000..a23e6fcb --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONA_KTIMPORTTAXONOMY } from '../../../../modules/krona/ktimporttaxonomy/main.nf' + +workflow test_krona_ktimporttaxonomy { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + ] + taxonomy = file(params.test_data['generic']['txt']['hello'], checkIfExists: true) + + KRONA_KTIMPORTTAXONOMY ( input, taxonomy ) +} diff --git a/tests/modules/krona/ktimporttaxonomy/nextflow.config b/tests/modules/krona/ktimporttaxonomy/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/krona/ktimporttaxonomy/test.yml b/tests/modules/krona/ktimporttaxonomy/test.yml new file mode 100644 index 00000000..b7748980 --- /dev/null +++ b/tests/modules/krona/ktimporttaxonomy/test.yml @@ -0,0 +1,9 @@ +- name: krona ktimporttaxonomy test_krona_ktimporttaxonomy + command: nextflow run ./tests/modules/krona/ktimporttaxonomy -entry test_krona_ktimporttaxonomy -c ./tests/config/nextflow.config -c ./tests/modules/krona/ktimporttaxonomy/nextflow.config + tags: + - krona/ktimporttaxonomy + - krona + files: + - path: output/krona/taxonomy.krona.html + contains: + - "DOCTYPE html PUBLIC" diff --git a/tests/modules/last/dotplot/main.nf b/tests/modules/last/dotplot/main.nf index b92ed270..3353821d 100644 --- a/tests/modules/last/dotplot/main.nf +++ b/tests/modules/last/dotplot/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_DOTPLOT } from '../../../../modules/last/dotplot/main.nf' addParams( options: [:] ) +include { LAST_DOTPLOT } from '../../../../modules/last/dotplot/main.nf' workflow test_last_dotplot { diff --git a/tests/modules/last/dotplot/nextflow.config b/tests/modules/last/dotplot/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/dotplot/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/dotplot/test.yml b/tests/modules/last/dotplot/test.yml index 177e377b..c2a9910f 100644 --- a/tests/modules/last/dotplot/test.yml +++ b/tests/modules/last/dotplot/test.yml @@ -1,5 +1,5 @@ - name: last dotplot test_last_dotplot - command: nextflow run tests/modules/last/dotplot -entry test_last_dotplot -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/dotplot -entry test_last_dotplot -c ./tests/config/nextflow.config -c ./tests/modules/last/dotplot/nextflow.config tags: - last/dotplot - last diff --git a/tests/modules/last/lastal/main.nf b/tests/modules/last/lastal/main.nf index 262c8f5f..95c2f917 100644 --- a/tests/modules/last/lastal/main.nf +++ b/tests/modules/last/lastal/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { LAST_LASTAL } from '../../../../modules/last/lastal/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { LAST_LASTAL } from '../../../../modules/last/lastal/main.nf' workflow test_last_lastal_with_dummy_param_file { diff --git a/tests/modules/last/lastal/nextflow.config b/tests/modules/last/lastal/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/lastal/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/lastal/test.yml b/tests/modules/last/lastal/test.yml index 48b0d223..f75e4ac5 100644 --- a/tests/modules/last/lastal/test.yml +++ b/tests/modules/last/lastal/test.yml @@ -1,5 +1,5 @@ - name: last lastal test_last_lastal_with_dummy_param_file - command: nextflow run tests/modules/last/lastal -entry test_last_lastal_with_dummy_param_file -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastal -entry test_last_lastal_with_dummy_param_file -c ./tests/config/nextflow.config -c ./tests/modules/last/lastal/nextflow.config tags: - last - last/lastal @@ -22,7 +22,7 @@ md5sum: b7c40f06b1309dc6f37849eeb86dfd22 - name: last lastal test_last_lastal_with_real_param_file - command: nextflow run tests/modules/last/lastal -entry test_last_lastal_with_real_param_file -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastal -entry test_last_lastal_with_real_param_file -c ./tests/config/nextflow.config -c ./tests/modules/last/lastal/nextflow.config tags: - last - last/lastal diff --git a/tests/modules/last/lastdb/main.nf b/tests/modules/last/lastdb/main.nf index 2f11bee4..d1c7b79a 100644 --- a/tests/modules/last/lastdb/main.nf +++ b/tests/modules/last/lastdb/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_LASTDB } from '../../../../modules/last/lastdb/main.nf' addParams( options: ['args': '-Q0'] ) +include { LAST_LASTDB } from '../../../../modules/last/lastdb/main.nf' workflow test_last_lastdb { diff --git a/tests/modules/last/lastdb/nextflow.config b/tests/modules/last/lastdb/nextflow.config new file mode 100644 index 00000000..9b8b9878 --- /dev/null +++ b/tests/modules/last/lastdb/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_LASTDB { + ext.args = '-Q0' + } + +} diff --git a/tests/modules/last/lastdb/test.yml b/tests/modules/last/lastdb/test.yml index c69ecfac..ece44cf3 100644 --- a/tests/modules/last/lastdb/test.yml +++ b/tests/modules/last/lastdb/test.yml @@ -1,5 +1,5 @@ - name: last lastdb test_last_lastdb - command: nextflow run tests/modules/last/lastdb -entry test_last_lastdb -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastdb -entry test_last_lastdb -c ./tests/config/nextflow.config -c ./tests/modules/last/lastdb/nextflow.config tags: - last/lastdb - last @@ -20,7 +20,7 @@ md5sum: b7c40f06b1309dc6f37849eeb86dfd22 - name: last lastdb test_last_lastdb_gzipped_input - command: nextflow run tests/modules/last/lastdb -entry test_last_lastdb_gzipped_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/lastdb -entry test_last_lastdb_gzipped_input -c ./tests/config/nextflow.config -c ./tests/modules/last/lastdb/nextflow.config tags: - last/lastdb - last diff --git a/tests/modules/last/mafconvert/main.nf b/tests/modules/last/mafconvert/main.nf index 7864c68a..c87f6e6a 100644 --- a/tests/modules/last/mafconvert/main.nf +++ b/tests/modules/last/mafconvert/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_MAFCONVERT } from '../../../../modules/last/mafconvert/main.nf' addParams( options: [:] ) +include { LAST_MAFCONVERT } from '../../../../modules/last/mafconvert/main.nf' workflow test_last_mafconvert { diff --git a/tests/modules/last/mafconvert/nextflow.config b/tests/modules/last/mafconvert/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/mafconvert/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/mafconvert/test.yml b/tests/modules/last/mafconvert/test.yml index 35c65ce9..86a80f20 100644 --- a/tests/modules/last/mafconvert/test.yml +++ b/tests/modules/last/mafconvert/test.yml @@ -1,5 +1,5 @@ - name: last mafconvert test_last_mafconvert - command: nextflow run tests/modules/last/mafconvert -entry test_last_mafconvert -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/mafconvert -entry test_last_mafconvert -c ./tests/config/nextflow.config -c ./tests/modules/last/mafconvert/nextflow.config tags: - last/mafconvert - last diff --git a/tests/modules/last/mafswap/main.nf b/tests/modules/last/mafswap/main.nf index 3bb72d63..5cc94932 100644 --- a/tests/modules/last/mafswap/main.nf +++ b/tests/modules/last/mafswap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_MAFSWAP } from '../../../../modules/last/mafswap/main.nf' addParams( options: [:] ) +include { LAST_MAFSWAP } from '../../../../modules/last/mafswap/main.nf' workflow test_last_mafswap { diff --git a/tests/modules/last/mafswap/nextflow.config b/tests/modules/last/mafswap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/mafswap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/mafswap/test.yml b/tests/modules/last/mafswap/test.yml index c7e3778d..a0865e00 100644 --- a/tests/modules/last/mafswap/test.yml +++ b/tests/modules/last/mafswap/test.yml @@ -1,5 +1,5 @@ - name: last mafswap test_last_mafswap - command: nextflow run tests/modules/last/mafswap -entry test_last_mafswap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/mafswap -entry test_last_mafswap -c ./tests/config/nextflow.config -c ./tests/modules/last/mafswap/nextflow.config tags: - last - last/mafswap diff --git a/tests/modules/last/postmask/main.nf b/tests/modules/last/postmask/main.nf index c30ac806..9bbb10e9 100644 --- a/tests/modules/last/postmask/main.nf +++ b/tests/modules/last/postmask/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_POSTMASK } from '../../../../modules/last/postmask/main.nf' addParams( options: [suffix:'.postmask'] ) +include { LAST_POSTMASK } from '../../../../modules/last/postmask/main.nf' workflow test_last_postmask { diff --git a/tests/modules/last/postmask/nextflow.config b/tests/modules/last/postmask/nextflow.config new file mode 100644 index 00000000..70c3f35b --- /dev/null +++ b/tests/modules/last/postmask/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_POSTMASK { + ext.prefix = { "${meta.id}.postmask" } + } + +} diff --git a/tests/modules/last/postmask/test.yml b/tests/modules/last/postmask/test.yml index 57aea822..81ae7f73 100644 --- a/tests/modules/last/postmask/test.yml +++ b/tests/modules/last/postmask/test.yml @@ -1,5 +1,5 @@ - name: last postmask test_last_postmask - command: nextflow run tests/modules/last/postmask -entry test_last_postmask -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/postmask -entry test_last_postmask -c ./tests/config/nextflow.config -c ./tests/modules/last/postmask/nextflow.config tags: - last - last/postmask diff --git a/tests/modules/last/split/main.nf b/tests/modules/last/split/main.nf index 19d899ab..f4ece4f2 100644 --- a/tests/modules/last/split/main.nf +++ b/tests/modules/last/split/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LAST_SPLIT } from '../../../../modules/last/split/main.nf' addParams( options: ['suffix':'.split'] ) +include { LAST_SPLIT } from '../../../../modules/last/split/main.nf' workflow test_last_split { diff --git a/tests/modules/last/split/nextflow.config b/tests/modules/last/split/nextflow.config new file mode 100644 index 00000000..6252ec14 --- /dev/null +++ b/tests/modules/last/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LAST_SPLIT { + ext.prefix = { "${meta.id}.split" } + } + +} diff --git a/tests/modules/last/split/test.yml b/tests/modules/last/split/test.yml index d57d7477..57eb345f 100644 --- a/tests/modules/last/split/test.yml +++ b/tests/modules/last/split/test.yml @@ -1,5 +1,5 @@ - name: last split test_last_split - command: nextflow run tests/modules/last/split -entry test_last_split -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/split -entry test_last_split -c ./tests/config/nextflow.config -c ./tests/modules/last/split/nextflow.config tags: - last - last/split diff --git a/tests/modules/last/train/main.nf b/tests/modules/last/train/main.nf index 26e318c3..0f280a82 100644 --- a/tests/modules/last/train/main.nf +++ b/tests/modules/last/train/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../../modules/untar/main.nf' addParams( options: [:] ) -include { LAST_TRAIN } from '../../../../modules/last/train/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../../modules/untar/main.nf' +include { LAST_TRAIN } from '../../../../modules/last/train/main.nf' workflow test_last_train { diff --git a/tests/modules/last/train/nextflow.config b/tests/modules/last/train/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/last/train/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/last/train/test.yml b/tests/modules/last/train/test.yml index 18eec951..8641600b 100644 --- a/tests/modules/last/train/test.yml +++ b/tests/modules/last/train/test.yml @@ -1,5 +1,5 @@ - name: last train test_last_train - command: nextflow run tests/modules/last/train -entry test_last_train -c tests/config/nextflow.config + command: nextflow run ./tests/modules/last/train -entry test_last_train -c ./tests/config/nextflow.config -c ./tests/modules/last/train/nextflow.config tags: - last/train - last diff --git a/tests/modules/leehom/main.nf b/tests/modules/leehom/main.nf new file mode 100644 index 00000000..1615d2e1 --- /dev/null +++ b/tests/modules/leehom/main.nf @@ -0,0 +1,36 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { LEEHOM } from '../../../modules/leehom/main.nf' +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' + +workflow test_leehom_bam { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + + fasta = [] + + SAMTOOLS_VIEW ( input, fasta ) + LEEHOM ( SAMTOOLS_VIEW.out.bam ) +} + +workflow test_leehom_se_fq { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + + LEEHOM ( input ) +} + +workflow test_leehom_pe_fq { + + input = [ [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] ] + + LEEHOM ( input ) +} diff --git a/tests/modules/leehom/nextflow.config b/tests/modules/leehom/nextflow.config new file mode 100644 index 00000000..25df48cd --- /dev/null +++ b/tests/modules/leehom/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_VIEW { + ext.args = '-f4 -b' + } + +} diff --git a/tests/modules/leehom/test.yml b/tests/modules/leehom/test.yml new file mode 100644 index 00000000..98257492 --- /dev/null +++ b/tests/modules/leehom/test.yml @@ -0,0 +1,41 @@ +- name: leehom test_leehom_bam + command: nextflow run ./tests/modules/leehom -entry test_leehom_bam -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.bam + - path: output/samtools/test.bam + - path: output/leehom/test.log + md5sum: d1f5da273eb69f41babda510797c7671 + +- name: leehom test_leehom_se_fq + command: nextflow run ./tests/modules/leehom -entry test_leehom_se_fq -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test.fq.gz + md5sum: ed10c4bbf5c3082ca68823535b91e1e2 + - path: output/leehom/test.log + md5sum: 59aa280cb72dfbea05ba913cb89db143 + +- name: leehom test_leehom_pe_fq + command: nextflow run ./tests/modules/leehom -entry test_leehom_pe_fq -c ./tests/config/nextflow.config -c ./tests/modules/leehom/nextflow.config + tags: + - leehom + files: + - path: output/leehom/test.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test.fq.gz + md5sum: 84929b78e3f89371ecd3b4c915b9ec33 + - path: output/leehom/test.log + md5sum: 800b5a88dc0822886bfbb271029e2a4a + - path: output/leehom/test_r1.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test_r1.fq.gz + md5sum: e9258420fa712e8536106995a7d1d97a + - path: output/leehom/test_r2.fail.fq.gz + md5sum: 7029066c27ac6f5ef18d660d5741979a + - path: output/leehom/test_r2.fq.gz + md5sum: 27230bcc5eae81ec5c1701798d39c1af diff --git a/tests/modules/lima/main.nf b/tests/modules/lima/main.nf index df4b2be2..7501def9 100644 --- a/tests/modules/lima/main.nf +++ b/tests/modules/lima/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LIMA } from '../../../modules/lima/main.nf' addParams( options: [args: '--isoseq --peek-guess', suffix: ".fl"] ) +include { LIMA } from '../../../modules/lima/main.nf' workflow test_lima_bam { diff --git a/tests/modules/lima/nextflow.config b/tests/modules/lima/nextflow.config new file mode 100644 index 00000000..8da2613f --- /dev/null +++ b/tests/modules/lima/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LIMA { + ext.args = '--isoseq --peek-guess' + ext.prefix = { "${meta.id}.fl" } + } + +} diff --git a/tests/modules/lima/test.yml b/tests/modules/lima/test.yml index 1ff860d9..8d927624 100644 --- a/tests/modules/lima/test.yml +++ b/tests/modules/lima/test.yml @@ -1,5 +1,5 @@ - name: lima test_lima_bam - command: nextflow run tests/modules/lima -entry test_lima_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_bam -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -23,7 +23,7 @@ md5sum: bcbcaaaca418bdeb91141c81715ca420 - name: lima test_lima_fa - command: nextflow run tests/modules/lima -entry test_lima_fa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fa -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -39,7 +39,7 @@ md5sum: 03be2311ba4afb878d8e547ab38c11eb - name: lima test_lima_fa_gz - command: nextflow run tests/modules/lima -entry test_lima_fa_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fa_gz -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -55,7 +55,7 @@ md5sum: 03be2311ba4afb878d8e547ab38c11eb - name: lima test_lima_fq - command: nextflow run tests/modules/lima -entry test_lima_fq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fq -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: @@ -73,7 +73,7 @@ md5sum: e91d3c386aaf4effa63f33ee2eb7da2a - name: lima test_lima_fq_gz - command: nextflow run tests/modules/lima -entry test_lima_fq_gz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lima -entry test_lima_fq_gz -c ./tests/config/nextflow.config -c ./tests/modules/lima/nextflow.config tags: - lima files: diff --git a/tests/modules/lissero/main.nf b/tests/modules/lissero/main.nf new file mode 100644 index 00000000..339576c3 --- /dev/null +++ b/tests/modules/lissero/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { LISSERO } from '../../../modules/lissero/main.nf' + +workflow test_lissero { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://github.com/MDU-PHL/LisSero/raw/master/tests/test_seq/NC_002973.fna", checkIfExists: true) ] + + LISSERO ( input ) +} diff --git a/tests/modules/lissero/nextflow.config b/tests/modules/lissero/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lissero/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lissero/test.yml b/tests/modules/lissero/test.yml new file mode 100644 index 00000000..8dd7339e --- /dev/null +++ b/tests/modules/lissero/test.yml @@ -0,0 +1,7 @@ +- name: lissero test_lissero + command: nextflow run ./tests/modules/lissero -entry test_lissero -c ./tests/config/nextflow.config -c ./tests/modules/lissero/nextflow.config + tags: + - lissero + files: + - path: output/lissero/test.tsv + contains: ['ID', 'SEROTYPE', 'FULL'] diff --git a/tests/modules/lofreq/call/main.nf b/tests/modules/lofreq/call/main.nf index 2c306fd1..70da4ea5 100644 --- a/tests/modules/lofreq/call/main.nf +++ b/tests/modules/lofreq/call/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_CALL } from '../../../../modules/lofreq/call/main.nf' addParams( options: [:] ) +include { LOFREQ_CALL } from '../../../../modules/lofreq/call/main.nf' workflow test_lofreq_call { diff --git a/tests/modules/lofreq/call/nextflow.config b/tests/modules/lofreq/call/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/call/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/call/test.yml b/tests/modules/lofreq/call/test.yml index 88700bfe..b9f42542 100644 --- a/tests/modules/lofreq/call/test.yml +++ b/tests/modules/lofreq/call/test.yml @@ -1,5 +1,5 @@ - name: lofreq call test_lofreq_call - command: nextflow run tests/modules/lofreq/call -entry test_lofreq_call -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/call -entry test_lofreq_call -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/call/nextflow.config tags: - lofreq - lofreq/call diff --git a/tests/modules/lofreq/callparallel/main.nf b/tests/modules/lofreq/callparallel/main.nf index 724bbff1..24ab2db3 100644 --- a/tests/modules/lofreq/callparallel/main.nf +++ b/tests/modules/lofreq/callparallel/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_CALLPARALLEL } from '../../../../modules/lofreq/callparallel/main.nf' addParams( options: [:] ) +include { LOFREQ_CALLPARALLEL } from '../../../../modules/lofreq/callparallel/main.nf' workflow test_lofreq_callparallel { diff --git a/tests/modules/lofreq/callparallel/nextflow.config b/tests/modules/lofreq/callparallel/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/callparallel/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/callparallel/test.yml b/tests/modules/lofreq/callparallel/test.yml index e09f68c3..db281012 100644 --- a/tests/modules/lofreq/callparallel/test.yml +++ b/tests/modules/lofreq/callparallel/test.yml @@ -1,5 +1,5 @@ - name: lofreq callparallel test_lofreq_callparallel - command: nextflow run tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/callparallel -entry test_lofreq_callparallel -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/callparallel/nextflow.config tags: - lofreq/callparallel - lofreq diff --git a/tests/modules/lofreq/filter/main.nf b/tests/modules/lofreq/filter/main.nf index c5dcea97..bd2a7f54 100644 --- a/tests/modules/lofreq/filter/main.nf +++ b/tests/modules/lofreq/filter/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_FILTER } from '../../../../modules/lofreq/filter/main.nf' addParams( options: [:] ) +include { LOFREQ_FILTER } from '../../../../modules/lofreq/filter/main.nf' workflow test_lofreq_filter { diff --git a/tests/modules/lofreq/filter/nextflow.config b/tests/modules/lofreq/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/lofreq/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/lofreq/filter/test.yml b/tests/modules/lofreq/filter/test.yml index 4ee82654..d3ee3812 100644 --- a/tests/modules/lofreq/filter/test.yml +++ b/tests/modules/lofreq/filter/test.yml @@ -1,5 +1,5 @@ - name: lofreq filter test_lofreq_filter - command: nextflow run tests/modules/lofreq/filter -entry test_lofreq_filter -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/filter -entry test_lofreq_filter -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/filter/nextflow.config tags: - lofreq - lofreq/filter diff --git a/tests/modules/lofreq/indelqual/main.nf b/tests/modules/lofreq/indelqual/main.nf index ba0493dd..71652ce1 100644 --- a/tests/modules/lofreq/indelqual/main.nf +++ b/tests/modules/lofreq/indelqual/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 -include { LOFREQ_INDELQUAL } from '../../../../modules/lofreq/indelqual/main.nf' addParams( options: [ 'args': '--dindel', 'suffix':'.indelqual'] ) +include { LOFREQ_INDELQUAL } from '../../../../modules/lofreq/indelqual/main.nf' workflow test_lofreq_indelqual { diff --git a/tests/modules/lofreq/indelqual/nextflow.config b/tests/modules/lofreq/indelqual/nextflow.config new file mode 100644 index 00000000..c50c1363 --- /dev/null +++ b/tests/modules/lofreq/indelqual/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: LOFREQ_INDELQUAL { + ext.args = '--dindel' + ext.prefix = { "${meta.id}.indelqual" } + } + +} diff --git a/tests/modules/lofreq/indelqual/test.yml b/tests/modules/lofreq/indelqual/test.yml index f3e73297..6fffb523 100644 --- a/tests/modules/lofreq/indelqual/test.yml +++ b/tests/modules/lofreq/indelqual/test.yml @@ -1,5 +1,5 @@ - name: lofreq indelqual - command: nextflow run ./tests/modules/lofreq/indelqual -entry test_lofreq_indelqual -c tests/config/nextflow.config + command: nextflow run ./tests/modules/lofreq/indelqual -entry test_lofreq_indelqual -c ./tests/config/nextflow.config -c ./tests/modules/lofreq/indelqual/nextflow.config tags: - lofreq - lofreq/indelqual diff --git a/tests/modules/macrel/contigs/main.nf b/tests/modules/macrel/contigs/main.nf new file mode 100644 index 00000000..a613dcc4 --- /dev/null +++ b/tests/modules/macrel/contigs/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MACREL_CONTIGS } from '../../../../modules/macrel/contigs/main.nf' + +workflow test_macrel_contigs { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true) + ] + + MACREL_CONTIGS ( input ) +} diff --git a/tests/modules/macrel/contigs/nextflow.config b/tests/modules/macrel/contigs/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/macrel/contigs/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/macrel/contigs/test.yml b/tests/modules/macrel/contigs/test.yml new file mode 100644 index 00000000..af272605 --- /dev/null +++ b/tests/modules/macrel/contigs/test.yml @@ -0,0 +1,16 @@ +- name: macrel contigs test_macrel_contigs + command: nextflow run ./tests/modules/macrel/contigs -entry test_macrel_contigs -c ./tests/config/nextflow.config -c ./tests/modules/macrel/contigs/nextflow.config + tags: + - macrel/contigs + - macrel + files: + - path: output/macrel/test/README.md + md5sum: fa3706dfc95d0538a52c4d0d824be5fb + - path: output/macrel/test/test.all_orfs.faa.gz + - path: output/macrel/test/test.prediction.gz + - path: output/macrel/test/test.smorfs.faa.gz + md5sum: 79704c6120c2f794518301af6f9b963d + - path: output/macrel/test/test_log.txt + md5sum: 6fdba143dce759597eb9f80e5d968729 + - path: output/macrel/versions.yml + md5sum: be8bf0d0647751c635c3736655f29f85 diff --git a/tests/modules/macs2/callpeak/main.nf b/tests/modules/macs2/callpeak/main.nf new file mode 100644 index 00000000..070469dd --- /dev/null +++ b/tests/modules/macs2/callpeak/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MACS2_CALLPEAK } from '../../../../modules/macs2/callpeak/main.nf' +include { MACS2_CALLPEAK as MACS2_CALLPEAK_CTRL } from '../../../../modules/macs2/callpeak/main.nf' +include { MACS2_CALLPEAK as MACS2_CALLPEAK_BED } from '../../../../modules/macs2/callpeak/main.nf' + +workflow test_macs2_callpeak_bed { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['pacbio']['genemodel1'], checkIfExists: true)], + []] + + MACS2_CALLPEAK_BED ( input, 4000 ) +} + +workflow test_macs2_callpeak { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ], + []] + + MACS2_CALLPEAK ( input, 40000 ) +} + +workflow test_macs2_callpeak_ctrl { + input = [ [ id:'test', single_end:false ], // meta map + [ file( params.test_data['homo_sapiens']['illumina']['test_paired_end_name_sorted_bam'], checkIfExists: true) ], + [ file( params.test_data['homo_sapiens']['illumina']['test2_paired_end_name_sorted_bam'], checkIfExists: true) ]] + + MACS2_CALLPEAK_CTRL ( input, 40000 ) +} diff --git a/tests/modules/macs2/callpeak/nextflow.config b/tests/modules/macs2/callpeak/nextflow.config new file mode 100644 index 00000000..e3bd3f5d --- /dev/null +++ b/tests/modules/macs2/callpeak/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MACS2_CALLPEAK { + ext.args = '--qval 0.1' + } + + withName: MACS2_CALLPEAK_CTRL { + ext.args = '--qval 0.1' + } + + withName: MACS2_CALLPEAK_BED { + ext.args = '--format BED --qval 1 --nomodel --extsize 200' + } + +} diff --git a/tests/modules/macs2/callpeak/test.yml b/tests/modules/macs2/callpeak/test.yml new file mode 100644 index 00000000..43c99140 --- /dev/null +++ b/tests/modules/macs2/callpeak/test.yml @@ -0,0 +1,38 @@ +- name: macs2 callpeak test_macs2_callpeak_bed + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak_bed -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/macs2/test_peaks.xls + md5sum: 762383e3a35e1f9ac3834fd6b2926092 + - path: output/macs2/test_summits.bed + md5sum: d41d8cd98f00b204e9800998ecf8427e + +- name: macs2 callpeak test_macs2_callpeak + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: 2e4da1c1704595e12aaf99cc715ad70c + - path: output/macs2/test_peaks.xls + md5sum: 5d65cb3dbd5421ea3bb5b490a100e9a4 + - path: output/macs2/test_summits.bed + md5sum: 26f0f97b6c14dbca129e947a58067c82 + +- name: macs2 callpeak test_macs2_callpeak_ctrl + command: nextflow run ./tests/modules/macs2/callpeak -entry test_macs2_callpeak_ctrl -c ./tests/config/nextflow.config -c ./tests/modules/macs2/callpeak/nextflow.config + tags: + - macs2 + - macs2/callpeak + files: + - path: output/macs2/test_peaks.narrowPeak + md5sum: 653e1108cc57ca07d0f60fc0f4fb8ba3 + - path: output/macs2/test_peaks.xls + md5sum: bf86546faa7b581b5209c29b22046a0a + - path: output/macs2/test_summits.bed + md5sum: 4f3c7c53a1d730d90d1b3dd9d3197af4 diff --git a/tests/modules/malt/build_test/main.nf b/tests/modules/malt/build_test/main.nf index b2f3eaf6..2542da0c 100644 --- a/tests/modules/malt/build_test/main.nf +++ b/tests/modules/malt/build_test/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' workflow test_malt_build { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/malt/build_test/nextflow.config b/tests/modules/malt/build_test/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/malt/build_test/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/malt/build_test/test.yml b/tests/modules/malt/build_test/test.yml index c3ed4b8f..c6694ad5 100644 --- a/tests/modules/malt/build_test/test.yml +++ b/tests/modules/malt/build_test/test.yml @@ -1,5 +1,5 @@ - name: malt build - command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build -c ./tests/config/nextflow.config -c ./tests/modules/malt/build/nextflow.config tags: - malt - malt/build @@ -21,7 +21,7 @@ - path: output/malt/malt_index/taxonomy.tre md5sum: bde26a1fff5c63d3046d3863607a1e70 - name: malt build gff - command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/build_test -entry test_malt_build_gff -c ./tests/config/nextflow.config -c ./tests/modules/malt/build/nextflow.config tags: - malt - malt/build diff --git a/tests/modules/malt/run/main.nf b/tests/modules/malt/run/main.nf index 6292ca61..292a3fcf 100644 --- a/tests/modules/malt/run/main.nf +++ b/tests/modules/malt/run/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../../modules/malt/run/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../../modules/malt/run/main.nf' workflow test_malt_run { diff --git a/tests/modules/malt/run/nextflow.config b/tests/modules/malt/run/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/malt/run/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/malt/run/test.yml b/tests/modules/malt/run/test.yml index 0c245f2f..5b0742e4 100644 --- a/tests/modules/malt/run/test.yml +++ b/tests/modules/malt/run/test.yml @@ -1,5 +1,5 @@ - name: malt run - command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c tests/config/nextflow.config + command: nextflow run ./tests/modules/malt/run -entry test_malt_run -c ./tests/config/nextflow.config -c ./tests/modules/malt/run/nextflow.config tags: - malt - malt/run diff --git a/tests/modules/maltextract/main.nf b/tests/modules/maltextract/main.nf index d18923ca..8e0a2241 100644 --- a/tests/modules/maltextract/main.nf +++ b/tests/modules/maltextract/main.nf @@ -2,11 +2,11 @@ nextflow.enable.dsl = 2 -include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' addParams( options: [:] ) -include { MALT_BUILD } from '../../../modules/malt/build/main.nf' addParams( options: [:] ) -include { MALT_RUN } from '../../../modules/malt/run/main.nf' addParams( options: [:] ) -include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' addParams( options: [:] ) +include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' +include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../modules/malt/run/main.nf' +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' workflow test_maltextract { diff --git a/tests/modules/maltextract/nextflow.config b/tests/modules/maltextract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/maltextract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/maltextract/test.yml b/tests/modules/maltextract/test.yml index 87bf0182..2440c100 100644 --- a/tests/modules/maltextract/test.yml +++ b/tests/modules/maltextract/test.yml @@ -1,5 +1,5 @@ - name: maltextract - command: nextflow run ./tests/modules/maltextract -entry test_maltextract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/maltextract -entry test_maltextract -c ./tests/config/nextflow.config -c ./tests/modules/maltextract/nextflow.config tags: - maltextract files: diff --git a/tests/modules/manta/germline/main.nf b/tests/modules/manta/germline/main.nf index df996464..f8adedb0 100644 --- a/tests/modules/manta/germline/main.nf +++ b/tests/modules/manta/germline/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' addParams( options: [:] ) +include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' workflow test_manta_germline { input = [ diff --git a/tests/modules/manta/germline/nextflow.config b/tests/modules/manta/germline/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/germline/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/germline/test.yml b/tests/modules/manta/germline/test.yml index b4086d76..c6ead9eb 100644 --- a/tests/modules/manta/germline/test.yml +++ b/tests/modules/manta/germline/test.yml @@ -1,5 +1,5 @@ - name: manta germline - command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c ./tests/config/nextflow.config -c ./tests/modules/manta/germline/nextflow.config tags: - manta - manta/germline @@ -11,7 +11,7 @@ - path: output/manta/test.diploid_sv.vcf.gz - path: output/manta/test.diploid_sv.vcf.gz.tbi - name: manta germline target bed - command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/manta/germline/nextflow.config tags: - manta - manta/germline diff --git a/tests/modules/manta/somatic/main.nf b/tests/modules/manta/somatic/main.nf index 553735c9..7da41bea 100644 --- a/tests/modules/manta/somatic/main.nf +++ b/tests/modules/manta/somatic/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' addParams( options: [:] ) +include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' workflow test_manta_somatic { diff --git a/tests/modules/manta/somatic/nextflow.config b/tests/modules/manta/somatic/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/somatic/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/somatic/test.yml b/tests/modules/manta/somatic/test.yml index 72f0953d..d701a210 100644 --- a/tests/modules/manta/somatic/test.yml +++ b/tests/modules/manta/somatic/test.yml @@ -1,5 +1,5 @@ - name: manta somatic test_manta_somatic - command: nextflow run tests/modules/manta/somatic -entry test_manta_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/somatic -entry test_manta_somatic -c ./tests/config/nextflow.config -c ./tests/modules/manta/somatic/nextflow.config tags: - manta/somatic - manta diff --git a/tests/modules/manta/tumoronly/main.nf b/tests/modules/manta/tumoronly/main.nf index 436ab781..be0d3dbb 100644 --- a/tests/modules/manta/tumoronly/main.nf +++ b/tests/modules/manta/tumoronly/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' addParams( options: [:] ) +include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' workflow test_manta_tumoronly { input = [ diff --git a/tests/modules/manta/tumoronly/nextflow.config b/tests/modules/manta/tumoronly/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/manta/tumoronly/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/manta/tumoronly/test.yml b/tests/modules/manta/tumoronly/test.yml index 13f2cde1..c56e23fa 100644 --- a/tests/modules/manta/tumoronly/test.yml +++ b/tests/modules/manta/tumoronly/test.yml @@ -1,5 +1,5 @@ - name: manta tumoronly - command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c ./tests/config/nextflow.config -c ./tests/modules/manta/tumoronly/nextflow.config tags: - manta - manta/tumoronly @@ -11,7 +11,7 @@ - path: output/manta/test.tumor_sv.vcf.gz - path: output/manta/test.tumor_sv.vcf.gz.tbi - name: manta tumoronly target bed - command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/manta/tumoronly/nextflow.config tags: - manta - manta/tumoronly diff --git a/tests/modules/mapdamage2/main.nf b/tests/modules/mapdamage2/main.nf new file mode 100644 index 00000000..b7e4d23b --- /dev/null +++ b/tests/modules/mapdamage2/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MAPDAMAGE2 } from '../../../modules/mapdamage2/main.nf' + +workflow test_mapdamage2 { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + MAPDAMAGE2 ( input, fasta ) +} diff --git a/tests/modules/mapdamage2/nextflow.config b/tests/modules/mapdamage2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mapdamage2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mapdamage2/test.yml b/tests/modules/mapdamage2/test.yml new file mode 100644 index 00000000..96c8b2da --- /dev/null +++ b/tests/modules/mapdamage2/test.yml @@ -0,0 +1,25 @@ +- name: mapdamage2 test_mapdamage2 + command: nextflow run ./tests/modules/mapdamage2 -entry test_mapdamage2 -c ./tests/config/nextflow.config -c ./tests/modules/mapdamage2/nextflow.config + tags: + - mapdamage2 + files: + - path: output/mapdamage2/results_test.paired_end.sorted/3pGtoA_freq.txt + md5sum: 3b300b8d2842441675cb2b56740801f0 + - path: output/mapdamage2/results_test.paired_end.sorted/5pCtoT_freq.txt + md5sum: 4c27465cd02e1fb8bf6fb2b01e98446d + - path: output/mapdamage2/results_test.paired_end.sorted/Fragmisincorporation_plot.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Runtime_log.txt + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_correct_prob.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_hist.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_iter.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_iter_summ_stat.csv + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_post_pred.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/Stats_out_MCMC_trace.pdf + - path: output/mapdamage2/results_test.paired_end.sorted/dnacomp.txt + md5sum: 4244d9fa554bbfeebbcea8eba3ad6466 + - path: output/mapdamage2/results_test.paired_end.sorted/dnacomp_genome.csv + md5sum: ea91a3d205717d3c6b3e0b77bb840945 + - path: output/mapdamage2/results_test.paired_end.sorted/lgdistribution.txt + md5sum: f86dfc04b1fff4337cc91add6356e3a0 + - path: output/mapdamage2/results_test.paired_end.sorted/misincorporation.txt + md5sum: 1c89b4c96d1f8996c3d0879cad5129a5 diff --git a/tests/modules/mash/sketch/main.nf b/tests/modules/mash/sketch/main.nf index da72d1e3..cec2035b 100644 --- a/tests/modules/mash/sketch/main.nf +++ b/tests/modules/mash/sketch/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MASH_SKETCH } from '../../../../modules/mash/sketch/main.nf' addParams( options: [:] ) +include { MASH_SKETCH } from '../../../../modules/mash/sketch/main.nf' workflow test_mash_sketch { diff --git a/tests/modules/mash/sketch/nextflow.config b/tests/modules/mash/sketch/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mash/sketch/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mash/sketch/test.yml b/tests/modules/mash/sketch/test.yml index 78f4598b..d5039956 100644 --- a/tests/modules/mash/sketch/test.yml +++ b/tests/modules/mash/sketch/test.yml @@ -1,5 +1,5 @@ - name: mash sketch - command: nextflow run ./tests/modules/mash/sketch -entry test_mash_sketch -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mash/sketch -entry test_mash_sketch -c ./tests/config/nextflow.config -c ./tests/modules/mash/sketch/nextflow.config tags: - mash/sketch files: diff --git a/tests/modules/mashtree/main.nf b/tests/modules/mashtree/main.nf index 47a7c12a..07f5e561 100644 --- a/tests/modules/mashtree/main.nf +++ b/tests/modules/mashtree/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MASHTREE } from '../../../modules/mashtree/main.nf' addParams( options: [:] ) +include { MASHTREE } from '../../../modules/mashtree/main.nf' workflow test_mashtree { diff --git a/tests/modules/mashtree/nextflow.config b/tests/modules/mashtree/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mashtree/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mashtree/test.yml b/tests/modules/mashtree/test.yml index 83ff6272..bea9638c 100644 --- a/tests/modules/mashtree/test.yml +++ b/tests/modules/mashtree/test.yml @@ -1,5 +1,5 @@ - name: mashtree test_mashtree - command: nextflow run tests/modules/mashtree -entry test_mashtree -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mashtree -entry test_mashtree -c ./tests/config/nextflow.config -c ./tests/modules/mashtree/nextflow.config tags: - mashtree files: diff --git a/tests/modules/maxbin2/main.nf b/tests/modules/maxbin2/main.nf index bede2c6a..3df417be 100644 --- a/tests/modules/maxbin2/main.nf +++ b/tests/modules/maxbin2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MAXBIN2 } from '../../../modules/maxbin2/main.nf' addParams( options: [:] ) +include { MAXBIN2 } from '../../../modules/maxbin2/main.nf' workflow test_maxbin2 { diff --git a/tests/modules/maxbin2/nextflow.config b/tests/modules/maxbin2/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/maxbin2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/maxbin2/test.yml b/tests/modules/maxbin2/test.yml index 2721d17a..a8ba98f9 100644 --- a/tests/modules/maxbin2/test.yml +++ b/tests/modules/maxbin2/test.yml @@ -1,5 +1,5 @@ - name: maxbin2 - command: nextflow run ./tests/modules/maxbin2 -entry test_maxbin2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/maxbin2 -entry test_maxbin2 -c ./tests/config/nextflow.config -c ./tests/modules/maxbin2/nextflow.config tags: - maxbin2 files: diff --git a/tests/modules/medaka/main.nf b/tests/modules/medaka/main.nf new file mode 100644 index 00000000..75fc135b --- /dev/null +++ b/tests/modules/medaka/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MEDAKA } from '../../../modules/medaka/main.nf' + +workflow test_medaka { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + + MEDAKA ( input ) +} diff --git a/tests/modules/medaka/nextflow.config b/tests/modules/medaka/nextflow.config new file mode 100644 index 00000000..c0b1b507 --- /dev/null +++ b/tests/modules/medaka/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MEDAKA { + ext.prefix = { "${meta.id}.polished.genome" } + } + +} diff --git a/tests/modules/medaka/test.yml b/tests/modules/medaka/test.yml new file mode 100644 index 00000000..54146bdc --- /dev/null +++ b/tests/modules/medaka/test.yml @@ -0,0 +1,7 @@ +- name: medaka test_medaka + command: nextflow run ./tests/modules/medaka -entry test_medaka -c ./tests/config/nextflow.config -c ./tests/modules/medaka/nextflow.config + tags: + - medaka + files: + - path: output/medaka/test.polished.genome.fa.gz + md5sum: f42303f1d6c2c79175faeb00e10b9a6e \ No newline at end of file diff --git a/tests/modules/megahit/main.nf b/tests/modules/megahit/main.nf index dcf07cd6..88acf3e3 100644 --- a/tests/modules/megahit/main.nf +++ b/tests/modules/megahit/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MEGAHIT } from '../../../modules/megahit/main.nf' addParams( options: [:] ) +include { MEGAHIT } from '../../../modules/megahit/main.nf' workflow test_megahit { diff --git a/tests/modules/megahit/nextflow.config b/tests/modules/megahit/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/megahit/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/megahit/test.yml b/tests/modules/megahit/test.yml index c390891b..2072ac12 100644 --- a/tests/modules/megahit/test.yml +++ b/tests/modules/megahit/test.yml @@ -1,5 +1,5 @@ - name: megahit - command: nextflow run ./tests/modules/megahit -entry test_megahit -c tests/config/nextflow.config -process.cpus 1 + command: nextflow run ./tests/modules/megahit -entry test_megahit -c ./tests/config/nextflow.config -process.cpus 1 -c ./tests/modules/megahit/nextflow.config tags: - megahit files: @@ -31,7 +31,7 @@ md5sum: 7029066c27ac6f5ef18d660d5741979a - name: megahit_single - command: nextflow run ./tests/modules/megahit -entry test_megahit_single -c tests/config/nextflow.config -process.cpus 1 + command: nextflow run ./tests/modules/megahit -entry test_megahit_single -c ./tests/config/nextflow.config -process.cpus 1 -c ./tests/modules/megahit/nextflow.config tags: - megahit files: diff --git a/tests/modules/meningotype/main.nf b/tests/modules/meningotype/main.nf new file mode 100644 index 00000000..a2d0ff10 --- /dev/null +++ b/tests/modules/meningotype/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MENINGOTYPE } from '../../../modules/meningotype/main.nf' + +workflow test_meningotype { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + MENINGOTYPE ( input ) +} diff --git a/tests/modules/meningotype/nextflow.config b/tests/modules/meningotype/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/meningotype/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/meningotype/test.yml b/tests/modules/meningotype/test.yml new file mode 100644 index 00000000..02ec8e1f --- /dev/null +++ b/tests/modules/meningotype/test.yml @@ -0,0 +1,7 @@ +- name: meningotype test_meningotype + command: nextflow run ./tests/modules/meningotype -entry test_meningotype -c ./tests/config/nextflow.config -c ./tests/modules/meningotype/nextflow.config + tags: + - meningotype + files: + - path: output/meningotype/test.tsv + md5sum: 25651bccb3d1c64cefcb7946fda30a6c diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf new file mode 100644 index 00000000..00309402 --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' + +workflow test_metabat2_jgisummarizebamcontigdepths { + + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input ) +} diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config b/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml new file mode 100644 index 00000000..86c49d26 --- /dev/null +++ b/tests/modules/metabat2/jgisummarizebamcontigdepths/test.yml @@ -0,0 +1,8 @@ +- name: metabat2 jgisummarizebamcontigdepths test_metabat2_jgisummarizebamcontigdepths + command: nextflow run ./tests/modules/metabat2/jgisummarizebamcontigdepths -entry test_metabat2_jgisummarizebamcontigdepths -c ./tests/config/nextflow.config -c ./tests/modules/metabat2/jgisummarizebamcontigdepths/nextflow.config + tags: + - metabat2/jgisummarizebamcontigdepths + - metabat2 + files: + - path: output/metabat2/test.txt.gz + md5sum: 8f735aa408d6c90e5a0310e06ace7a9a diff --git a/tests/modules/metabat2/metabat2/main.nf b/tests/modules/metabat2/metabat2/main.nf new file mode 100644 index 00000000..0179e4c3 --- /dev/null +++ b/tests/modules/metabat2/metabat2/main.nf @@ -0,0 +1,35 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' + +workflow test_metabat2_no_depth { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + Channel.fromPath(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it, []] } + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) +} + +workflow test_metabat2_depth { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) +} diff --git a/tests/modules/metabat2/metabat2/nextflow.config b/tests/modules/metabat2/metabat2/nextflow.config new file mode 100644 index 00000000..83754d8b --- /dev/null +++ b/tests/modules/metabat2/metabat2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: METABAT2_METABAT2 { + ext.args = '--minContig 1500 --minCV 0.1 --minCVSum 0.1 --minClsSize 10 --minS 2' + } + +} diff --git a/tests/modules/metabat2/metabat2/test.yml b/tests/modules/metabat2/metabat2/test.yml new file mode 100644 index 00000000..9389295e --- /dev/null +++ b/tests/modules/metabat2/metabat2/test.yml @@ -0,0 +1,24 @@ +- name: metabat2 metabat2 test_metabat2_no_depth + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_no_depth -c tests/config/nextflow.config + tags: + - metabat2 + - metabat2/metabat2 + files: + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 0e9bce5b5a0033fd4411a21dec881170 + - path: output/metabat2/test.tsv.gz + - path: output/metabat2/versions.yml + md5sum: 5742a71af36c3a748fd5726d76924ba8 + +- name: metabat2 metabat2 test_metabat2_depth + command: nextflow run tests/modules/metabat2/metabat2 -entry test_metabat2_depth -c tests/config/nextflow.config + tags: + - metabat2 + - metabat2/metabat2 + files: + - path: output/metabat2/bins/test.1.fa.gz + md5sum: 0e9bce5b5a0033fd4411a21dec881170 + - path: output/metabat2/test.tsv.gz + - path: output/metabat2/test.txt.gz + - path: output/metabat2/versions.yml + md5sum: 538c56b2df7d90580f05097218b5d5b1 diff --git a/tests/modules/metaphlan3/main.nf b/tests/modules/metaphlan3/main.nf index 2d855683..3354d2d9 100644 --- a/tests/modules/metaphlan3/main.nf +++ b/tests/modules/metaphlan3/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../modules/untar/main.nf' addParams( options: [:] ) -include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' addParams( options: ['suffix': '.sam'] ) -include { METAPHLAN3 } from '../../../modules/metaphlan3/main.nf' addParams( options: [ 'args':'--index mpa_v30_CHOCOPhlAn_201901 --add_viruses --bt2_ps very-sensitive-local' ] ) +include { UNTAR } from '../../../modules/untar/main.nf' +include { SAMTOOLS_VIEW } from '../../../modules/samtools/view/main.nf' +include { METAPHLAN3 } from '../../../modules/metaphlan3/main.nf' workflow test_metaphlan3_single_end { @@ -42,7 +42,7 @@ workflow test_metaphlan3_sam { UNTAR ( db ) - SAMTOOLS_VIEW ( input ) + SAMTOOLS_VIEW ( input, [] ) METAPHLAN3 ( SAMTOOLS_VIEW.out.bam, UNTAR.out.untar ) } diff --git a/tests/modules/metaphlan3/nextflow.config b/tests/modules/metaphlan3/nextflow.config new file mode 100644 index 00000000..a47b46e0 --- /dev/null +++ b/tests/modules/metaphlan3/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_VIEW { + ext.prefix = { "${meta.id}.sam" } + } + + withName: METAPHLAN3 { + ext.args = '--index mpa_v30_CHOCOPhlAn_201901 --add_viruses --bt2_ps very-sensitive-local' + } + +} diff --git a/tests/modules/metaphlan3/test.yml b/tests/modules/metaphlan3/test.yml index fbd5e70b..92e731d2 100644 --- a/tests/modules/metaphlan3/test.yml +++ b/tests/modules/metaphlan3/test.yml @@ -1,5 +1,5 @@ - name: metaphlan3 test_metaphlan3_single_end - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_single_end -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -30,7 +30,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_paired_end - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -61,7 +61,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_sam - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_sam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_sam -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: @@ -92,7 +92,7 @@ md5sum: 1ca16b905abf657b88ca2bc12e7ad404 - name: metaphlan3 test_metaphlan3_fasta - command: nextflow run tests/modules/metaphlan3 -entry test_metaphlan3_fasta -c tests/config/nextflow.config + command: nextflow run ./tests/modules/metaphlan3 -entry test_metaphlan3_fasta -c ./tests/config/nextflow.config -c ./tests/modules/metaphlan3/nextflow.config tags: - metaphlan3 files: diff --git a/tests/modules/methyldackel/extract/main.nf b/tests/modules/methyldackel/extract/main.nf index 40e87b0b..92f92308 100644 --- a/tests/modules/methyldackel/extract/main.nf +++ b/tests/modules/methyldackel/extract/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METHYLDACKEL_EXTRACT } from '../../../../modules/methyldackel/extract/main.nf' addParams( options: [:] ) +include { METHYLDACKEL_EXTRACT } from '../../../../modules/methyldackel/extract/main.nf' workflow test_methyldackel_extract { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/methyldackel/extract/nextflow.config b/tests/modules/methyldackel/extract/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/methyldackel/extract/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/methyldackel/extract/test.yml b/tests/modules/methyldackel/extract/test.yml index 70c371d7..28f969f3 100644 --- a/tests/modules/methyldackel/extract/test.yml +++ b/tests/modules/methyldackel/extract/test.yml @@ -1,5 +1,5 @@ - name: methyldackel extract - command: nextflow run ./tests/modules/methyldackel/extract -entry test_methyldackel_extract -c tests/config/nextflow.config + command: nextflow run ./tests/modules/methyldackel/extract -entry test_methyldackel_extract -c ./tests/config/nextflow.config -c ./tests/modules/methyldackel/extract/nextflow.config tags: - methyldackel - methyldackel/extract diff --git a/tests/modules/methyldackel/mbias/main.nf b/tests/modules/methyldackel/mbias/main.nf index 318dd663..f304e22f 100644 --- a/tests/modules/methyldackel/mbias/main.nf +++ b/tests/modules/methyldackel/mbias/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { METHYLDACKEL_MBIAS } from '../../../../modules/methyldackel/mbias/main.nf' addParams( options: [:] ) +include { METHYLDACKEL_MBIAS } from '../../../../modules/methyldackel/mbias/main.nf' workflow test_methyldackel_mbias { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/methyldackel/mbias/nextflow.config b/tests/modules/methyldackel/mbias/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/methyldackel/mbias/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/methyldackel/mbias/test.yml b/tests/modules/methyldackel/mbias/test.yml index 43074291..8bb23f24 100644 --- a/tests/modules/methyldackel/mbias/test.yml +++ b/tests/modules/methyldackel/mbias/test.yml @@ -1,5 +1,5 @@ - name: methyldackel mbias - command: nextflow run ./tests/modules/methyldackel/mbias -entry test_methyldackel_mbias -c tests/config/nextflow.config + command: nextflow run ./tests/modules/methyldackel/mbias -entry test_methyldackel_mbias -c ./tests/config/nextflow.config -c ./tests/modules/methyldackel/mbias/nextflow.config tags: - methyldackel - methyldackel/mbias diff --git a/tests/modules/minia/main.nf b/tests/modules/minia/main.nf index e23f5cc4..5be4d17f 100644 --- a/tests/modules/minia/main.nf +++ b/tests/modules/minia/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIA } from '../../../modules/minia/main.nf' addParams( options: [:] ) +include { MINIA } from '../../../modules/minia/main.nf' workflow test_minia { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/minia/nextflow.config b/tests/modules/minia/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minia/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minia/test.yml b/tests/modules/minia/test.yml index 6836f51d..78b84f37 100644 --- a/tests/modules/minia/test.yml +++ b/tests/modules/minia/test.yml @@ -1,5 +1,5 @@ - name: minia - command: nextflow run tests/modules/minia -entry test_minia -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minia -entry test_minia -c ./tests/config/nextflow.config -c ./tests/modules/minia/nextflow.config tags: - minia files: diff --git a/tests/modules/miniasm/main.nf b/tests/modules/miniasm/main.nf new file mode 100644 index 00000000..949660ac --- /dev/null +++ b/tests/modules/miniasm/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MINIASM } from '../../../modules/miniasm/main.nf' + +workflow test_miniasm { + + input = [ [ id:'test', single_end:true ], // meta map + file(params.test_data['bacteroides_fragilis']['nanopore']['test_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['nanopore']['overlap_paf'], checkIfExists: true) + ] + + MINIASM ( input ) +} diff --git a/tests/modules/miniasm/nextflow.config b/tests/modules/miniasm/nextflow.config new file mode 100644 index 00000000..23f0a8d0 --- /dev/null +++ b/tests/modules/miniasm/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MINIASM { + ext.prefix = { "${meta.id}.assembly" } + } + +} diff --git a/tests/modules/miniasm/test.yml b/tests/modules/miniasm/test.yml new file mode 100644 index 00000000..0bdc350a --- /dev/null +++ b/tests/modules/miniasm/test.yml @@ -0,0 +1,9 @@ +- name: miniasm test_miniasm + command: nextflow run ./tests/modules/miniasm -entry test_miniasm -c ./tests/config/nextflow.config -c ./tests/modules/miniasm/nextflow.config + tags: + - miniasm + files: + - path: output/miniasm/test.assembly.gfa.gz + md5sum: c68e4c2b64338d1c0f5b79b32934da14 + - path: output/miniasm/test.assembly.fasta.gz + md5sum: d2f78ae618c02744e7a57bf4706ab8b4 diff --git a/tests/modules/minimap2/align/main.nf b/tests/modules/minimap2/align/main.nf index b4dbf5bd..e507d3e5 100644 --- a/tests/modules/minimap2/align/main.nf +++ b/tests/modules/minimap2/align/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIMAP2_ALIGN } from '../../../../modules/minimap2/align/main.nf' addParams( options: [:] ) +include { MINIMAP2_ALIGN } from '../../../../modules/minimap2/align/main.nf' workflow test_minimap2_align_single_end { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/minimap2/align/nextflow.config b/tests/modules/minimap2/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minimap2/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minimap2/align/test.yml b/tests/modules/minimap2/align/test.yml index 3309bf4b..598a5d25 100644 --- a/tests/modules/minimap2/align/test.yml +++ b/tests/modules/minimap2/align/test.yml @@ -1,5 +1,5 @@ - name: minimap2 align single-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config tags: - minimap2 - minimap2/align @@ -8,7 +8,7 @@ md5sum: 70e8cf299ee3ecd33e629d10c1f588ce - name: minimap2 align paired-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config tags: - minimap2 - minimap2/align diff --git a/tests/modules/minimap2/index/main.nf b/tests/modules/minimap2/index/main.nf index 39aa93e0..a69efa85 100644 --- a/tests/modules/minimap2/index/main.nf +++ b/tests/modules/minimap2/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MINIMAP2_INDEX } from '../../../../modules/minimap2/index/main.nf' addParams( options: [:] ) +include { MINIMAP2_INDEX } from '../../../../modules/minimap2/index/main.nf' workflow test_minimap2_index { diff --git a/tests/modules/minimap2/index/nextflow.config b/tests/modules/minimap2/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/minimap2/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/minimap2/index/test.yml b/tests/modules/minimap2/index/test.yml index 7a3cc8fa..95700452 100644 --- a/tests/modules/minimap2/index/test.yml +++ b/tests/modules/minimap2/index/test.yml @@ -1,5 +1,5 @@ - name: minimap2 index - command: nextflow run ./tests/modules/minimap2/index -entry test_minimap2_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/minimap2/index -entry test_minimap2_index -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/index/nextflow.config tags: - minimap2 - minimap2/index diff --git a/tests/modules/mlst/main.nf b/tests/modules/mlst/main.nf index 4b7d44be..f84ec622 100644 --- a/tests/modules/mlst/main.nf +++ b/tests/modules/mlst/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MLST } from '../../../modules/mlst/main.nf' addParams( options: [:] ) +include { MLST } from '../../../modules/mlst/main.nf' workflow test_mlst { diff --git a/tests/modules/mlst/nextflow.config b/tests/modules/mlst/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mlst/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mlst/test.yml b/tests/modules/mlst/test.yml index 5a7c7a0e..53eacc5a 100644 --- a/tests/modules/mlst/test.yml +++ b/tests/modules/mlst/test.yml @@ -1,5 +1,5 @@ - name: mlst test_mlst - command: nextflow run tests/modules/mlst -entry test_mlst -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mlst -entry test_mlst -c ./tests/config/nextflow.config -c ./tests/modules/mlst/nextflow.config tags: - mlst files: diff --git a/tests/modules/mosdepth/main.nf b/tests/modules/mosdepth/main.nf index c4d8e9c4..8862204d 100644 --- a/tests/modules/mosdepth/main.nf +++ b/tests/modules/mosdepth/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' addParams( options: [:] ) +include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' workflow test_mosdepth { input = [ [ id:'test', single_end:true ], diff --git a/tests/modules/mosdepth/nextflow.config b/tests/modules/mosdepth/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mosdepth/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mosdepth/test.yml b/tests/modules/mosdepth/test.yml index f5ab5608..e264ef3b 100644 --- a/tests/modules/mosdepth/test.yml +++ b/tests/modules/mosdepth/test.yml @@ -1,5 +1,5 @@ - name: mosdepth - command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config tags: - mosdepth files: diff --git a/tests/modules/msisensor/msi/main.nf b/tests/modules/msisensor/msi/main.nf index f8ce4187..259ec887 100644 --- a/tests/modules/msisensor/msi/main.nf +++ b/tests/modules/msisensor/msi/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' addParams( options: [:] ) -include { MSISENSOR_MSI } from '../../../../modules/msisensor/msi/main.nf' addParams( options: [:] ) +include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' +include { MSISENSOR_MSI } from '../../../../modules/msisensor/msi/main.nf' workflow test_msisensor_msi { diff --git a/tests/modules/msisensor/msi/nextflow.config b/tests/modules/msisensor/msi/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/msisensor/msi/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/msisensor/msi/test.yml b/tests/modules/msisensor/msi/test.yml index 1fc74ad3..0d0da1ee 100644 --- a/tests/modules/msisensor/msi/test.yml +++ b/tests/modules/msisensor/msi/test.yml @@ -1,5 +1,5 @@ - name: msisensor msi - command: nextflow run ./tests/modules/msisensor/msi -entry test_msisensor_msi -c tests/config/nextflow.config + command: nextflow run ./tests/modules/msisensor/msi -entry test_msisensor_msi -c ./tests/config/nextflow.config -c ./tests/modules/msisensor/msi/nextflow.config tags: - msisensor - msisensor/msi diff --git a/tests/modules/msisensor/scan/main.nf b/tests/modules/msisensor/scan/main.nf index 2303d0b9..de46dd9b 100644 --- a/tests/modules/msisensor/scan/main.nf +++ b/tests/modules/msisensor/scan/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' addParams( options: [:] ) +include { MSISENSOR_SCAN } from '../../../../modules/msisensor/scan/main.nf' workflow test_msisensor_scan { diff --git a/tests/modules/msisensor/scan/nextflow.config b/tests/modules/msisensor/scan/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/msisensor/scan/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/msisensor/scan/test.yml b/tests/modules/msisensor/scan/test.yml index 0d28c5a2..9e697a59 100644 --- a/tests/modules/msisensor/scan/test.yml +++ b/tests/modules/msisensor/scan/test.yml @@ -1,5 +1,5 @@ - name: msisensor scan - command: nextflow run ./tests/modules/msisensor/scan -entry test_msisensor_scan -c tests/config/nextflow.config + command: nextflow run ./tests/modules/msisensor/scan -entry test_msisensor_scan -c ./tests/config/nextflow.config -c ./tests/modules/msisensor/scan/nextflow.config tags: - msisensor - msisensor/scan diff --git a/tests/modules/mtnucratio/main.nf b/tests/modules/mtnucratio/main.nf index dd9fc9db..6d6f5e1d 100644 --- a/tests/modules/mtnucratio/main.nf +++ b/tests/modules/mtnucratio/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { MTNUCRATIO } from '../../../modules/mtnucratio/main.nf' addParams( options: [:] ) +include { MTNUCRATIO } from '../../../modules/mtnucratio/main.nf' workflow test_mtnucratio { diff --git a/tests/modules/mtnucratio/nextflow.config b/tests/modules/mtnucratio/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mtnucratio/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mtnucratio/test.yml b/tests/modules/mtnucratio/test.yml index 76cbaf32..24dc3d16 100644 --- a/tests/modules/mtnucratio/test.yml +++ b/tests/modules/mtnucratio/test.yml @@ -1,5 +1,5 @@ - name: mtnucratio - command: nextflow run tests/modules/mtnucratio -entry test_mtnucratio -c tests/config/nextflow.config + command: nextflow run ./tests/modules/mtnucratio -entry test_mtnucratio -c ./tests/config/nextflow.config -c ./tests/modules/mtnucratio/nextflow.config tags: - mtnucratio files: diff --git a/tests/modules/multiqc/main.nf b/tests/modules/multiqc/main.nf index ddabb43a..43643985 100644 --- a/tests/modules/multiqc/main.nf +++ b/tests/modules/multiqc/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { FASTQC } from '../../../modules/fastqc/main.nf' addParams( options: [:] ) -include { MULTIQC } from '../../../modules/multiqc/main.nf' addParams( options: [:] ) +include { FASTQC } from '../../../modules/fastqc/main.nf' +include { MULTIQC } from '../../../modules/multiqc/main.nf' workflow test_multiqc { input = [ [ id: 'test', single_end: false ], diff --git a/tests/modules/multiqc/nextflow.config b/tests/modules/multiqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/multiqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/multiqc/test.yml b/tests/modules/multiqc/test.yml index 69ded5d5..39796872 100644 --- a/tests/modules/multiqc/test.yml +++ b/tests/modules/multiqc/test.yml @@ -1,5 +1,5 @@ - name: multiqc - command: nextflow run ./tests/modules/multiqc -entry test_multiqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/multiqc -entry test_multiqc -c ./tests/config/nextflow.config -c ./tests/modules/multiqc/nextflow.config tags: - multiqc files: diff --git a/tests/modules/mummer/main.nf b/tests/modules/mummer/main.nf new file mode 100644 index 00000000..30c8c4b8 --- /dev/null +++ b/tests/modules/mummer/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MUMMER } from '../../../modules/mummer/main.nf' + +workflow test_mummer { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + MUMMER ( input ) +} diff --git a/tests/modules/mummer/nextflow.config b/tests/modules/mummer/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/mummer/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/mummer/test.yml b/tests/modules/mummer/test.yml new file mode 100644 index 00000000..359fd4ad --- /dev/null +++ b/tests/modules/mummer/test.yml @@ -0,0 +1,7 @@ +- name: mummer test_mummer + command: nextflow run ./tests/modules/mummer -entry test_mummer -c ./tests/config/nextflow.config -c ./tests/modules/mummer/nextflow.config + tags: + - mummer + files: + - path: output/mummer/test.coords + md5sum: 6084fe43c7cb2eca8b96d674560bdefc diff --git a/tests/modules/muscle/main.nf b/tests/modules/muscle/main.nf index 81a71761..a6294519 100644 --- a/tests/modules/muscle/main.nf +++ b/tests/modules/muscle/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { MUSCLE } from '../../../modules/muscle/main.nf' addParams( options: ['args': '-fasta -verbose -phys -phyi -maxiters 2']) -include { MUSCLE as MUSCLE_TREE } from '../../../modules/muscle/main.nf' addParams( options: ['args': '-maketree']) +include { MUSCLE } from '../../../modules/muscle/main.nf' +include { MUSCLE as MUSCLE_TREE } from '../../../modules/muscle/main.nf' workflow test_muscle { diff --git a/tests/modules/muscle/nextflow.config b/tests/modules/muscle/nextflow.config new file mode 100644 index 00000000..31331b0f --- /dev/null +++ b/tests/modules/muscle/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MUSCLE { + ext.args = '-fasta -verbose -phys -phyi -maxiters 2' + } + + withName: MUSCLE_TREE { + ext.args = '-maketree' + } + +} diff --git a/tests/modules/muscle/test.yml b/tests/modules/muscle/test.yml index 7f9d2a54..6995d71d 100644 --- a/tests/modules/muscle/test.yml +++ b/tests/modules/muscle/test.yml @@ -1,5 +1,5 @@ - name: muscle test_muscle - command: nextflow run tests/modules/muscle -entry test_muscle -c tests/config/nextflow.config + command: nextflow run ./tests/modules/muscle -entry test_muscle -c ./tests/config/nextflow.config -c ./tests/modules/muscle/nextflow.config tags: - muscle files: diff --git a/tests/modules/nanolyse/main.nf b/tests/modules/nanolyse/main.nf index 97941a6d..91013cd0 100644 --- a/tests/modules/nanolyse/main.nf +++ b/tests/modules/nanolyse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NANOLYSE } from '../../../modules/nanolyse/main.nf' addParams( options: [suffix: '.clean'] ) +include { NANOLYSE } from '../../../modules/nanolyse/main.nf' workflow test_nanolyse { input = [ diff --git a/tests/modules/nanolyse/nextflow.config b/tests/modules/nanolyse/nextflow.config new file mode 100644 index 00000000..5f7b5bed --- /dev/null +++ b/tests/modules/nanolyse/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: NANOLYSE { + ext.prefix = { "${meta.id}.clean" } + } + +} diff --git a/tests/modules/nanolyse/test.yml b/tests/modules/nanolyse/test.yml index 4938fe57..5af2e65e 100644 --- a/tests/modules/nanolyse/test.yml +++ b/tests/modules/nanolyse/test.yml @@ -1,5 +1,5 @@ - name: nanolyse - command: nextflow run ./tests/modules/nanolyse -entry test_nanolyse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanolyse -entry test_nanolyse -c ./tests/config/nextflow.config -c ./tests/modules/nanolyse/nextflow.config tags: - nanolyse files: diff --git a/tests/modules/nanoplot/main.nf b/tests/modules/nanoplot/main.nf index a483f5e2..04c923c2 100644 --- a/tests/modules/nanoplot/main.nf +++ b/tests/modules/nanoplot/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NANOPLOT } from '../../../modules/nanoplot/main.nf' addParams( options: [:] ) +include { NANOPLOT } from '../../../modules/nanoplot/main.nf' workflow test_nanoplot_summary { def input = [] diff --git a/tests/modules/nanoplot/nextflow.config b/tests/modules/nanoplot/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nanoplot/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nanoplot/test.yml b/tests/modules/nanoplot/test.yml index 475b90c9..6549953e 100644 --- a/tests/modules/nanoplot/test.yml +++ b/tests/modules/nanoplot/test.yml @@ -1,6 +1,6 @@ - name: nanoplot_summary - command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_summary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_summary -c ./tests/config/nextflow.config -c ./tests/modules/nanoplot/nextflow.config tags: - nanoplot files: @@ -8,7 +8,7 @@ contains: - "report" - name: nanoplot_fastq - command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_fastq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nanoplot -entry test_nanoplot_fastq -c ./tests/config/nextflow.config -c ./tests/modules/nanoplot/nextflow.config tags: - nanoplot files: diff --git a/tests/modules/ncbigenomedownload/main.nf b/tests/modules/ncbigenomedownload/main.nf new file mode 100644 index 00000000..2447b97c --- /dev/null +++ b/tests/modules/ncbigenomedownload/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NCBIGENOMEDOWNLOAD } from '../../../modules/ncbigenomedownload/main.nf' + +workflow test_ncbigenomedownload { + + input = [ [ id:'test', single_end:false ] ] + + accessions = [] + + NCBIGENOMEDOWNLOAD ( input, accessions) +} + + diff --git a/tests/modules/ncbigenomedownload/nextflow.config b/tests/modules/ncbigenomedownload/nextflow.config new file mode 100644 index 00000000..7e6ccf70 --- /dev/null +++ b/tests/modules/ncbigenomedownload/nextflow.config @@ -0,0 +1,8 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: NCBIGENOMEDOWNLOAD { + ext.args = '-A GCF_000013425.1 --formats genbank,fasta,assembly-stats bacteria' + } +} diff --git a/tests/modules/ncbigenomedownload/test.yml b/tests/modules/ncbigenomedownload/test.yml new file mode 100644 index 00000000..8765e04f --- /dev/null +++ b/tests/modules/ncbigenomedownload/test.yml @@ -0,0 +1,11 @@ +- name: ncbigenomedownload test_ncbigenomedownload + command: nextflow run ./tests/modules/ncbigenomedownload -entry test_ncbigenomedownload -c ./tests/config/nextflow.config -c ./tests/modules/ncbigenomedownload/nextflow.config + tags: + - ncbigenomedownload + files: + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_assembly_stats.txt + md5sum: f78c6a373130e50fac5472962a5fdf44 + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_genomic.fna.gz + md5sum: b086eb1020e7df022afa545dc6d93297 + - path: output/ncbigenomedownload/GCF_000013425.1_ASM1342v1_genomic.gbff.gz + md5sum: ae2da70e32c783858e6c60c72e9eeb7a diff --git a/tests/modules/nextclade/main.nf b/tests/modules/nextclade/main.nf index 93c50ca5..15750990 100755 --- a/tests/modules/nextclade/main.nf +++ b/tests/modules/nextclade/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { NEXTCLADE } from '../../../modules/nextclade/main.nf' addParams( options: [:] ) +include { NEXTCLADE } from '../../../modules/nextclade/main.nf' workflow test_nextclade { input = [ diff --git a/tests/modules/nextclade/nextflow.config b/tests/modules/nextclade/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nextclade/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nextclade/test.yml b/tests/modules/nextclade/test.yml index 4d1d7743..36218aad 100755 --- a/tests/modules/nextclade/test.yml +++ b/tests/modules/nextclade/test.yml @@ -1,5 +1,5 @@ - name: nextclade test_nextclade - command: nextflow run tests/modules/nextclade -entry test_nextclade -c tests/config/nextflow.config + command: nextflow run ./tests/modules/nextclade -entry test_nextclade -c ./tests/config/nextflow.config -c ./tests/modules/nextclade/nextflow.config tags: - nextclade files: diff --git a/tests/modules/ngmaster/main.nf b/tests/modules/ngmaster/main.nf new file mode 100644 index 00000000..b23530bc --- /dev/null +++ b/tests/modules/ngmaster/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NGMASTER } from '../../../modules/ngmaster/main.nf' + +workflow test_ngmaster { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + NGMASTER ( input ) +} diff --git a/tests/modules/ngmaster/nextflow.config b/tests/modules/ngmaster/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ngmaster/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ngmaster/test.yml b/tests/modules/ngmaster/test.yml new file mode 100644 index 00000000..fb8dec82 --- /dev/null +++ b/tests/modules/ngmaster/test.yml @@ -0,0 +1,7 @@ +- name: ngmaster test_ngmaster + command: nextflow run ./tests/modules/ngmaster -entry test_ngmaster -c ./tests/config/nextflow.config -c ./tests/modules/ngmaster/nextflow.config + tags: + - ngmaster + files: + - path: output/ngmaster/test.tsv + md5sum: cf674474eaf8ac6abfcebce0af0226cf diff --git a/tests/modules/nucmer/main.nf b/tests/modules/nucmer/main.nf new file mode 100644 index 00000000..98e74b07 --- /dev/null +++ b/tests/modules/nucmer/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NUCMER } from '../../../modules/nucmer/main.nf' + +workflow test_nucmer { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + NUCMER ( input ) +} diff --git a/tests/modules/nucmer/nextflow.config b/tests/modules/nucmer/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/nucmer/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/nucmer/test.yml b/tests/modules/nucmer/test.yml new file mode 100644 index 00000000..62caced4 --- /dev/null +++ b/tests/modules/nucmer/test.yml @@ -0,0 +1,9 @@ +- name: nucmer test_nucmer + command: nextflow run ./tests/modules/nucmer -entry test_nucmer -c ./tests/config/nextflow.config -c ./tests/modules/nucmer/nextflow.config + tags: + - nucmer + files: + - path: output/nucmer/test.coords + contains: ['MT192765.1'] + - path: output/nucmer/test.delta + contains: ['MT192765.1'] diff --git a/tests/modules/optitype/main.nf b/tests/modules/optitype/main.nf index c27a5c99..55b46f0a 100644 --- a/tests/modules/optitype/main.nf +++ b/tests/modules/optitype/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { OPTITYPE } from '../../../modules/optitype/main.nf' addParams( options: ['args':'-e 1 -b 0.009', 'args2':'solver=glpk'] ) +include { OPTITYPE } from '../../../modules/optitype/main.nf' workflow test_optitype { input = [ [ id:'test', seq_type:'dna' ], // meta map diff --git a/tests/modules/optitype/nextflow.config b/tests/modules/optitype/nextflow.config new file mode 100644 index 00000000..14ad9e3f --- /dev/null +++ b/tests/modules/optitype/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: OPTITYPE { + ext.args = '-e 1 -b 0.009' + ext.args2 = 'solver=glpk' + } + +} diff --git a/tests/modules/optitype/test.yml b/tests/modules/optitype/test.yml index 41f35988..7c2ff0d0 100644 --- a/tests/modules/optitype/test.yml +++ b/tests/modules/optitype/test.yml @@ -1,5 +1,5 @@ - name: optitype test_optitype - command: nextflow run tests/modules/optitype -entry test_optitype -c tests/config/nextflow.config + command: nextflow run ./tests/modules/optitype -entry test_optitype -c ./tests/config/nextflow.config -c ./tests/modules/optitype/nextflow.config tags: - optitype files: diff --git a/tests/modules/pairix/main.nf b/tests/modules/pairix/main.nf index f1e2a44a..474bacbb 100644 --- a/tests/modules/pairix/main.nf +++ b/tests/modules/pairix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRIX } from '../../../modules/pairix/main.nf' addParams( options: [:] ) +include { PAIRIX } from '../../../modules/pairix/main.nf' workflow test_pairix { diff --git a/tests/modules/pairix/nextflow.config b/tests/modules/pairix/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pairix/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pairix/test.yml b/tests/modules/pairix/test.yml index 304a94b3..4cd9d37d 100644 --- a/tests/modules/pairix/test.yml +++ b/tests/modules/pairix/test.yml @@ -1,5 +1,5 @@ - name: pairix test_pairix - command: nextflow run tests/modules/pairix -entry test_pairix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairix -entry test_pairix -c ./tests/config/nextflow.config -c ./tests/modules/pairix/nextflow.config tags: - pairix files: diff --git a/tests/modules/pairtools/dedup/main.nf b/tests/modules/pairtools/dedup/main.nf index 2c10c85b..28121526 100644 --- a/tests/modules/pairtools/dedup/main.nf +++ b/tests/modules/pairtools/dedup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_DEDUP } from '../../../../modules/pairtools/dedup/main.nf' addParams( options: ['suffix':'.dedup'] ) +include { PAIRTOOLS_DEDUP } from '../../../../modules/pairtools/dedup/main.nf' workflow test_pairtools_dedup { diff --git a/tests/modules/pairtools/dedup/nextflow.config b/tests/modules/pairtools/dedup/nextflow.config new file mode 100644 index 00000000..b47fab16 --- /dev/null +++ b/tests/modules/pairtools/dedup/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_DEDUP { + ext.prefix = { "${meta.id}.dedup" } + } + +} diff --git a/tests/modules/pairtools/dedup/test.yml b/tests/modules/pairtools/dedup/test.yml index 25fc51f7..6d7f99f4 100644 --- a/tests/modules/pairtools/dedup/test.yml +++ b/tests/modules/pairtools/dedup/test.yml @@ -1,5 +1,5 @@ - name: pairtools dedup test_pairtools_dedup - command: nextflow run tests/modules/pairtools/dedup -entry test_pairtools_dedup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/dedup -entry test_pairtools_dedup -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/dedup/nextflow.config tags: - pairtools/dedup - pairtools diff --git a/tests/modules/pairtools/flip/main.nf b/tests/modules/pairtools/flip/main.nf index ed980102..e4d740e2 100644 --- a/tests/modules/pairtools/flip/main.nf +++ b/tests/modules/pairtools/flip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_FLIP } from '../../../../modules/pairtools/flip/main.nf' addParams( options: [:] ) +include { PAIRTOOLS_FLIP } from '../../../../modules/pairtools/flip/main.nf' workflow test_pairtools_flip { diff --git a/tests/modules/pairtools/flip/nextflow.config b/tests/modules/pairtools/flip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pairtools/flip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pairtools/flip/test.yml b/tests/modules/pairtools/flip/test.yml index eeef6530..cec54976 100644 --- a/tests/modules/pairtools/flip/test.yml +++ b/tests/modules/pairtools/flip/test.yml @@ -1,5 +1,5 @@ - name: pairtools flip test_pairtools_flip - command: nextflow run tests/modules/pairtools/flip -entry test_pairtools_flip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/flip -entry test_pairtools_flip -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/flip/nextflow.config tags: - pairtools/flip - pairtools diff --git a/tests/modules/pairtools/parse/main.nf b/tests/modules/pairtools/parse/main.nf index 26ceaa4f..f006fd6a 100644 --- a/tests/modules/pairtools/parse/main.nf +++ b/tests/modules/pairtools/parse/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_PARSE } from '../../../../modules/pairtools/parse/main.nf' addParams( options: ['suffix':'.raw'] ) +include { PAIRTOOLS_PARSE } from '../../../../modules/pairtools/parse/main.nf' workflow test_pairtools_parse { diff --git a/tests/modules/pairtools/parse/nextflow.config b/tests/modules/pairtools/parse/nextflow.config new file mode 100644 index 00000000..a5d3ef9d --- /dev/null +++ b/tests/modules/pairtools/parse/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_PARSE { + ext.prefix = { "${meta.id}.raw" } + } + +} diff --git a/tests/modules/pairtools/parse/test.yml b/tests/modules/pairtools/parse/test.yml index e5d18e01..cf01038c 100644 --- a/tests/modules/pairtools/parse/test.yml +++ b/tests/modules/pairtools/parse/test.yml @@ -1,5 +1,5 @@ - name: pairtools parse test_pairtools_parse - command: nextflow run tests/modules/pairtools/parse -entry test_pairtools_parse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/parse -entry test_pairtools_parse -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/parse/nextflow.config tags: - pairtools - pairtools/parse diff --git a/tests/modules/pairtools/restrict/main.nf b/tests/modules/pairtools/restrict/main.nf index f785ed88..ae7e328b 100644 --- a/tests/modules/pairtools/restrict/main.nf +++ b/tests/modules/pairtools/restrict/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_RESTRICT } from '../../../../modules/pairtools/restrict/main.nf' addParams( options: ['suffix':'.restrict'] ) +include { PAIRTOOLS_RESTRICT } from '../../../../modules/pairtools/restrict/main.nf' workflow test_pairtools_restrict { diff --git a/tests/modules/pairtools/restrict/nextflow.config b/tests/modules/pairtools/restrict/nextflow.config new file mode 100644 index 00000000..fa8217bc --- /dev/null +++ b/tests/modules/pairtools/restrict/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_RESTRICT { + ext.prefix = { "${meta.id}.restrict" } + } + +} diff --git a/tests/modules/pairtools/restrict/test.yml b/tests/modules/pairtools/restrict/test.yml index afc64930..484b3739 100644 --- a/tests/modules/pairtools/restrict/test.yml +++ b/tests/modules/pairtools/restrict/test.yml @@ -1,5 +1,5 @@ - name: pairtools restrict test_pairtools_restrict - command: nextflow run tests/modules/pairtools/restrict -entry test_pairtools_restrict -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/restrict -entry test_pairtools_restrict -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/restrict/nextflow.config tags: - pairtools/restrict - pairtools diff --git a/tests/modules/pairtools/select/main.nf b/tests/modules/pairtools/select/main.nf index 2efd29c7..ff65cd95 100644 --- a/tests/modules/pairtools/select/main.nf +++ b/tests/modules/pairtools/select/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_SELECT } from '../../../../modules/pairtools/select/main.nf' addParams( options: [args:"(pair_type == 'RU') or (pair_type == 'UR') or (pair_type == 'UU')"] ) +include { PAIRTOOLS_SELECT } from '../../../../modules/pairtools/select/main.nf' workflow test_pairtools_select { diff --git a/tests/modules/pairtools/select/nextflow.config b/tests/modules/pairtools/select/nextflow.config new file mode 100644 index 00000000..df33cd2e --- /dev/null +++ b/tests/modules/pairtools/select/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_SELECT { + ext.args = "(pair_type == \'RU\') or (pair_type == \'UR\') or (pair_type == \'UU\')" + } + +} diff --git a/tests/modules/pairtools/select/test.yml b/tests/modules/pairtools/select/test.yml index adeb50c3..431e8366 100644 --- a/tests/modules/pairtools/select/test.yml +++ b/tests/modules/pairtools/select/test.yml @@ -1,5 +1,5 @@ - name: pairtools select test_pairtools_select - command: nextflow run tests/modules/pairtools/select -entry test_pairtools_select -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/select -entry test_pairtools_select -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/select/nextflow.config tags: - pairtools/select - pairtools diff --git a/tests/modules/pairtools/sort/main.nf b/tests/modules/pairtools/sort/main.nf index dfb505e0..0e484c76 100644 --- a/tests/modules/pairtools/sort/main.nf +++ b/tests/modules/pairtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PAIRTOOLS_SORT } from '../../../../modules/pairtools/sort/main.nf' addParams( options: ['suffix':'.sorted'] ) +include { PAIRTOOLS_SORT } from '../../../../modules/pairtools/sort/main.nf' workflow test_pairtools_sort { diff --git a/tests/modules/pairtools/sort/nextflow.config b/tests/modules/pairtools/sort/nextflow.config new file mode 100644 index 00000000..dfaf6053 --- /dev/null +++ b/tests/modules/pairtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PAIRTOOLS_SORT { + ext.prefix = { "${meta.id}.sorted" } + } + +} diff --git a/tests/modules/pairtools/sort/test.yml b/tests/modules/pairtools/sort/test.yml index 9eea74a0..4d4866aa 100644 --- a/tests/modules/pairtools/sort/test.yml +++ b/tests/modules/pairtools/sort/test.yml @@ -1,5 +1,5 @@ - name: pairtools sort test_pairtools_sort - command: nextflow run tests/modules/pairtools/sort -entry test_pairtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pairtools/sort -entry test_pairtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/pairtools/sort/nextflow.config tags: - pairtools/sort - pairtools diff --git a/tests/modules/pangolin/main.nf b/tests/modules/pangolin/main.nf index b8130c5d..ab4aa4af 100644 --- a/tests/modules/pangolin/main.nf +++ b/tests/modules/pangolin/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PANGOLIN } from '../../../modules/pangolin/main.nf' addParams( options: [:] ) +include { PANGOLIN } from '../../../modules/pangolin/main.nf' workflow test_pangolin { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/pangolin/nextflow.config b/tests/modules/pangolin/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pangolin/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pangolin/test.yml b/tests/modules/pangolin/test.yml index 5fb5e79e..c77e4912 100644 --- a/tests/modules/pangolin/test.yml +++ b/tests/modules/pangolin/test.yml @@ -1,5 +1,5 @@ - name: pangolin - command: nextflow run ./tests/modules/pangolin -entry test_pangolin -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/pangolin -entry test_pangolin -c ./tests/config/nextflow.config -c ./tests/modules/pangolin/nextflow.config tags: - pangolin files: diff --git a/tests/modules/paraclu/main.nf b/tests/modules/paraclu/main.nf index f5101591..3bd75dc0 100644 --- a/tests/modules/paraclu/main.nf +++ b/tests/modules/paraclu/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PARACLU } from '../../../modules/paraclu/main.nf' addParams( options: [:] ) +include { PARACLU } from '../../../modules/paraclu/main.nf' workflow test_paraclu { diff --git a/tests/modules/paraclu/nextflow.config b/tests/modules/paraclu/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/paraclu/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/paraclu/test.yml b/tests/modules/paraclu/test.yml index 3aa3e8b4..36b37df5 100644 --- a/tests/modules/paraclu/test.yml +++ b/tests/modules/paraclu/test.yml @@ -1,5 +1,5 @@ - name: paraclu test_paraclu - command: nextflow run tests/modules/paraclu -entry test_paraclu -c tests/config/nextflow.config + command: nextflow run ./tests/modules/paraclu -entry test_paraclu -c ./tests/config/nextflow.config -c ./tests/modules/paraclu/nextflow.config tags: - paraclu files: diff --git a/tests/modules/pbbam/pbmerge/main.nf b/tests/modules/pbbam/pbmerge/main.nf index 9220af0c..34ed33a6 100644 --- a/tests/modules/pbbam/pbmerge/main.nf +++ b/tests/modules/pbbam/pbmerge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PBBAM_PBMERGE } from '../../../../modules/pbbam/pbmerge/main.nf' addParams( options: [suffix: '.merged'] ) +include { PBBAM_PBMERGE } from '../../../../modules/pbbam/pbmerge/main.nf' workflow test_pbbam_pbmerge { diff --git a/tests/modules/pbbam/pbmerge/nextflow.config b/tests/modules/pbbam/pbmerge/nextflow.config new file mode 100644 index 00000000..4fc270a9 --- /dev/null +++ b/tests/modules/pbbam/pbmerge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PBBAM_PBMERGE { + ext.prefix = { "${meta.id}.merged" } + } + +} diff --git a/tests/modules/pbbam/pbmerge/test.yml b/tests/modules/pbbam/pbmerge/test.yml index 4f334c0e..0a6d7da3 100644 --- a/tests/modules/pbbam/pbmerge/test.yml +++ b/tests/modules/pbbam/pbmerge/test.yml @@ -1,5 +1,5 @@ - name: pbbam pbmerge test_pbbam_pbmerge - command: nextflow run tests/modules/pbbam/pbmerge -entry test_pbbam_pbmerge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pbbam/pbmerge -entry test_pbbam_pbmerge -c ./tests/config/nextflow.config -c ./tests/modules/pbbam/pbmerge/nextflow.config tags: - pbbam/pbmerge - pbbam diff --git a/tests/modules/pbccs/main.nf b/tests/modules/pbccs/main.nf index 74c1b864..91a2ab30 100644 --- a/tests/modules/pbccs/main.nf +++ b/tests/modules/pbccs/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PBCCS } from '../../../modules/pbccs/main.nf' addParams( options: [args:'--min-rq 0.9'] ) +include { PBCCS } from '../../../modules/pbccs/main.nf' workflow test_pbccs { diff --git a/tests/modules/pbccs/nextflow.config b/tests/modules/pbccs/nextflow.config new file mode 100644 index 00000000..869909ce --- /dev/null +++ b/tests/modules/pbccs/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PBCCS { + ext.args = '--min-rq 0.9' + } + +} diff --git a/tests/modules/pbccs/test.yml b/tests/modules/pbccs/test.yml index 1d1651c4..5d481923 100644 --- a/tests/modules/pbccs/test.yml +++ b/tests/modules/pbccs/test.yml @@ -1,15 +1,15 @@ - name: pbccs test_pbccs - command: nextflow run tests/modules/pbccs -entry test_pbccs -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pbccs -entry test_pbccs -c ./tests/config/nextflow.config -c ./tests/modules/pbccs/nextflow.config tags: - pbccs files: - path: output/pbccs/alz.chunk2.bam - md5sum: d1a0266d5df64b74409a21981071a1c6 + md5sum: 2b6451f2d0454eb08359cb84e2e4069c - path: output/pbccs/alz.chunk2.bam.pbi - md5sum: 582a4500ddcb3f4a24a443192620d039 - - path: output/pbccs/alz.metrics.json.gz + md5sum: 3112cda9744e3facbf38245d41aaf080 + - path: output/pbccs/alz.chunk2.metrics.json.gz contains: [ 'zmws' ] - - path: output/pbccs/alz.report.json + - path: output/pbccs/alz.chunk2.report.json contains: [ 'Created by pbcopper' ] - - path: output/pbccs/alz.report.txt + - path: output/pbccs/alz.chunk2.report.txt md5sum: bbc5bd7a1269345cf7a7f3d4c746024b diff --git a/tests/modules/peddy/main.nf b/tests/modules/peddy/main.nf new file mode 100644 index 00000000..e53e8152 --- /dev/null +++ b/tests/modules/peddy/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PEDDY } from '../../../modules/peddy/main.nf' + +workflow test_peddy { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['homo_sapiens']['genome']['justhusky_minimal_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['justhusky_minimal_vcf_gz_tbi'], checkIfExists: true) + ] + ped = file(params.test_data['homo_sapiens']['genome']['justhusky_ped'], checkIfExists: true) + + PEDDY ( input, ped ) +} diff --git a/tests/modules/peddy/nextflow.config b/tests/modules/peddy/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/peddy/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/peddy/test.yml b/tests/modules/peddy/test.yml new file mode 100644 index 00000000..0ed6dc94 --- /dev/null +++ b/tests/modules/peddy/test.yml @@ -0,0 +1,17 @@ +- name: peddy test_peddy + command: nextflow run ./tests/modules/peddy -entry test_peddy -c ./tests/config/nextflow.config ./tests/modules/peddy/nextflow.config + tags: + - peddy + files: + - path: output/peddy/justhusky_minimal.het_check.csv + md5sum: f4006d47355f2a760e40215b403926c3 + - path: output/peddy/justhusky_minimal.html + md5sum: 4f189cdbe8f03fe5c32d343c183506a5 + - path: output/peddy/justhusky_minimal.ped_check.csv + md5sum: d79a98558e280afe794d1374d2b985d4 + - path: output/peddy/justhusky_minimal.ped_check.rel-difference.csv + md5sum: 9de7e287cb30c742db2ff3622b0e63b1 + - path: output/peddy/justhusky_minimal.sex_check.csv + md5sum: 60848489bc697490da6a53b5170baf3b + - path: output/peddy/justhusky_minimal.vs.html + md5sum: 20f5f3a97fa781057c876ac79e044010 diff --git a/tests/modules/phyloflash/main.nf b/tests/modules/phyloflash/main.nf new file mode 100644 index 00000000..412e0321 --- /dev/null +++ b/tests/modules/phyloflash/main.nf @@ -0,0 +1,44 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PHYLOFLASH } from '../../../modules/phyloflash/main.nf' + +process STUB_PHYLOFLASH_DATABASE { + output: + path "ref" , emit: silva_db + path "UniVec" , emit: univec_db + + stub: + """ + mkdir ref + touch UniVec + """ +} + +workflow test_phyloflash_single_end { + + STUB_PHYLOFLASH_DATABASE () + + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) +} + +workflow test_phyloflash_paired_end { + + STUB_PHYLOFLASH_DATABASE () + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + PHYLOFLASH ( input, STUB_PHYLOFLASH_DATABASE.out.silva_db, STUB_PHYLOFLASH_DATABASE.out.univec_db ) +} diff --git a/tests/modules/phyloflash/nextflow.config b/tests/modules/phyloflash/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/phyloflash/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/phyloflash/test.yml b/tests/modules/phyloflash/test.yml new file mode 100644 index 00000000..81eac2f2 --- /dev/null +++ b/tests/modules/phyloflash/test.yml @@ -0,0 +1,15 @@ +- name: phyloflash single-end + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_single_end -c ./tests/config/nextflow.config -c ./tests/modules/phyloflash/nextflow.config -stub-run + tags: + - phyloflash + files: + - path: output/phyloflash/test/test.SSU.collection.fasta + md5sum: d41d8cd98f00b204e9800998ecf8427e + +- name: phyloflash paired-end + command: nextflow run ./tests/modules/phyloflash -entry test_phyloflash_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/phyloflash/nextflow.config -stub-run + tags: + - phyloflash + files: + - path: output/phyloflash/test/test.SSU.collection.fasta + md5sum: d41d8cd98f00b204e9800998ecf8427e diff --git a/tests/modules/picard/collecthsmetrics/main.nf b/tests/modules/picard/collecthsmetrics/main.nf new file mode 100644 index 00000000..2e8727b5 --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsmetrics/main.nf' + +workflow test_picard_collecthsmetrics { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + bait_intervals = file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true) + target_intervals = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) + + PICARD_COLLECTHSMETRICS ( input, fasta, fai, bait_intervals, target_intervals ) +} diff --git a/tests/modules/picard/collecthsmetrics/nextflow.config b/tests/modules/picard/collecthsmetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collecthsmetrics/test.yml b/tests/modules/picard/collecthsmetrics/test.yml new file mode 100644 index 00000000..9232d508 --- /dev/null +++ b/tests/modules/picard/collecthsmetrics/test.yml @@ -0,0 +1,8 @@ +- name: picard collecthsmetrics test_picard_collecthsmetrics + command: nextflow run ./tests/modules/picard/collecthsmetrics -entry test_picard_collecthsmetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collecthsmetrics/nextflow.config + tags: + - picard + - picard/collecthsmetrics + files: + # The file can't be md5'd consistently + - path: output/picard/test_collecthsmetrics.txt diff --git a/tests/modules/picard/collectmultiplemetrics/main.nf b/tests/modules/picard/collectmultiplemetrics/main.nf index 73ac0013..453ecc91 100644 --- a/tests/modules/picard/collectmultiplemetrics/main.nf +++ b/tests/modules/picard/collectmultiplemetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../../modules/picard/collectmultiplemetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../../modules/picard/collectmultiplemetrics/main.nf' workflow test_picard_collectmultiplemetrics { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/collectmultiplemetrics/nextflow.config b/tests/modules/picard/collectmultiplemetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collectmultiplemetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collectmultiplemetrics/test.yml b/tests/modules/picard/collectmultiplemetrics/test.yml index fc4d0347..8fecca73 100644 --- a/tests/modules/picard/collectmultiplemetrics/test.yml +++ b/tests/modules/picard/collectmultiplemetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collectmultiplemetrics - command: nextflow run ./tests/modules/picard/collectmultiplemetrics -entry test_picard_collectmultiplemetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collectmultiplemetrics -entry test_picard_collectmultiplemetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collectmultiplemetrics/nextflow.config tags: - picard - picard/collectmultiplemetrics diff --git a/tests/modules/picard/collectwgsmetrics/main.nf b/tests/modules/picard/collectwgsmetrics/main.nf index 5bdf17ab..1d75a2bd 100644 --- a/tests/modules/picard/collectwgsmetrics/main.nf +++ b/tests/modules/picard/collectwgsmetrics/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_COLLECTWGSMETRICS } from '../../../../modules/picard/collectwgsmetrics/main.nf' addParams( options: [:] ) +include { PICARD_COLLECTWGSMETRICS } from '../../../../modules/picard/collectwgsmetrics/main.nf' workflow test_picard_collectwgsmetrics { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/collectwgsmetrics/nextflow.config b/tests/modules/picard/collectwgsmetrics/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/collectwgsmetrics/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/collectwgsmetrics/test.yml b/tests/modules/picard/collectwgsmetrics/test.yml index 62e87e65..2daef406 100644 --- a/tests/modules/picard/collectwgsmetrics/test.yml +++ b/tests/modules/picard/collectwgsmetrics/test.yml @@ -1,5 +1,5 @@ - name: picard collectwgsmetrics test_picard_collectwgsmetrics - command: nextflow run tests/modules/picard/collectwgsmetrics -entry test_picard_collectwgsmetrics -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/collectwgsmetrics -entry test_picard_collectwgsmetrics -c ./tests/config/nextflow.config -c ./tests/modules/picard/collectwgsmetrics/nextflow.config tags: - picard/collectwgsmetrics - picard diff --git a/tests/modules/picard/filtersamreads/main.nf b/tests/modules/picard/filtersamreads/main.nf index a03471dd..847bee57 100644 --- a/tests/modules/picard/filtersamreads/main.nf +++ b/tests/modules/picard/filtersamreads/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) -include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' addParams( options: [suffix:'.filtered'] ) +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' +include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' workflow test_picard_filtersamreads { diff --git a/tests/modules/picard/filtersamreads/nextflow.config b/tests/modules/picard/filtersamreads/nextflow.config new file mode 100644 index 00000000..653e9633 --- /dev/null +++ b/tests/modules/picard/filtersamreads/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_SORTSAM { + ext.prefix = { "${meta.id}.sorted" } + } + + withName: PICARD_FILTERSAMREADS { + ext.prefix = { "${meta.id}.filtered" } + } + +} diff --git a/tests/modules/picard/filtersamreads/test.yml b/tests/modules/picard/filtersamreads/test.yml index e8e73ed0..a0ab712b 100644 --- a/tests/modules/picard/filtersamreads/test.yml +++ b/tests/modules/picard/filtersamreads/test.yml @@ -1,5 +1,5 @@ - name: picard filtersamreads - command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c ./tests/config/nextflow.config -c ./tests/modules/picard/filtersamreads/nextflow.config tags: - picard - picard/filtersamreads @@ -9,7 +9,7 @@ - name: picard filtersamreads readlist - command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c ./tests/config/nextflow.config -c ./tests/modules/picard/filtersamreads/nextflow.config tags: - picard - picard/filtersamreads diff --git a/tests/modules/picard/markduplicates/main.nf b/tests/modules/picard/markduplicates/main.nf index 78643f8b..12f3ac26 100644 --- a/tests/modules/picard/markduplicates/main.nf +++ b/tests/modules/picard/markduplicates/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [:] ) -include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' addParams( options: [args : '--ASSUME_SORT_ORDER queryname' ] ) +include { PICARD_MARKDUPLICATES } from '../../../../modules/picard/markduplicates/main.nf' +include { PICARD_MARKDUPLICATES as PICARD_MARKDUPLICATES_UNSORTED} from '../../../../modules/picard/markduplicates/main.nf' workflow test_picard_markduplicates_sorted_bam { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/markduplicates/nextflow.config b/tests/modules/picard/markduplicates/nextflow.config new file mode 100644 index 00000000..9178c5b1 --- /dev/null +++ b/tests/modules/picard/markduplicates/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_MARKDUPLICATES_UNSORTED { + ext.args = 'ASSUME_SORT_ORDER=queryname' + } + +} diff --git a/tests/modules/picard/markduplicates/test.yml b/tests/modules/picard/markduplicates/test.yml index 04075548..beb54009 100644 --- a/tests/modules/picard/markduplicates/test.yml +++ b/tests/modules/picard/markduplicates/test.yml @@ -1,19 +1,21 @@ - name: picard markduplicates sorted bam - command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_sorted_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_sorted_bam -c ./tests/config/nextflow.config -c ./tests/modules/picard/markduplicates/nextflow.config tags: - picard - picard/markduplicates files: - path: ./output/picard/test.MarkDuplicates.metrics.txt + contains: + - "1.0 97 97" - path: ./output/picard/test.bam - md5sum: b520ccdc3a9edf3c6a314983752881f2 - name: picard markduplicates unsorted bam - command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/markduplicates -entry test_picard_markduplicates_unsorted_bam -c ./tests/config/nextflow.config -c ./tests/modules/picard/markduplicates/nextflow.config tags: - picard - picard/markduplicates files: - path: ./output/picard/test.MarkDuplicates.metrics.txt + contains: + - "1.0 97 97" - path: ./output/picard/test.bam - md5sum: 46a6fc76048ba801d328f869ac9db020 diff --git a/tests/modules/picard/mergesamfiles/main.nf b/tests/modules/picard/mergesamfiles/main.nf index 5ddc849f..51c070b6 100644 --- a/tests/modules/picard/mergesamfiles/main.nf +++ b/tests/modules/picard/mergesamfiles/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_MERGESAMFILES } from '../../../../modules/picard/mergesamfiles/main.nf' addParams( options: [:] ) +include { PICARD_MERGESAMFILES } from '../../../../modules/picard/mergesamfiles/main.nf' workflow test_picard_mergesamfiles { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/picard/mergesamfiles/nextflow.config b/tests/modules/picard/mergesamfiles/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/mergesamfiles/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/mergesamfiles/test.yml b/tests/modules/picard/mergesamfiles/test.yml index a331c96f..1cf59cb7 100644 --- a/tests/modules/picard/mergesamfiles/test.yml +++ b/tests/modules/picard/mergesamfiles/test.yml @@ -1,5 +1,5 @@ - name: picard mergesamfiles - command: nextflow run ./tests/modules/picard/mergesamfiles -entry test_picard_mergesamfiles -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/mergesamfiles -entry test_picard_mergesamfiles -c ./tests/config/nextflow.config -c ./tests/modules/picard/mergesamfiles/nextflow.config tags: - picard - picard/mergesamfiles diff --git a/tests/modules/picard/sortsam/main.nf b/tests/modules/picard/sortsam/main.nf index 0130fad6..1516682c 100644 --- a/tests/modules/picard/sortsam/main.nf +++ b/tests/modules/picard/sortsam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] ) +include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' workflow test_picard_sortsam { diff --git a/tests/modules/picard/sortsam/nextflow.config b/tests/modules/picard/sortsam/nextflow.config new file mode 100644 index 00000000..ca572c2f --- /dev/null +++ b/tests/modules/picard/sortsam/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PICARD_SORTSAM { + ext.prefix = { "${meta.id}.sorted" } + } + +} diff --git a/tests/modules/picard/sortsam/test.yml b/tests/modules/picard/sortsam/test.yml index 4443228e..61521850 100644 --- a/tests/modules/picard/sortsam/test.yml +++ b/tests/modules/picard/sortsam/test.yml @@ -1,5 +1,5 @@ - name: picard sortsam - command: nextflow run ./tests/modules/picard/sortsam -entry test_picard_sortsam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/picard/sortsam -entry test_picard_sortsam -c ./tests/config/nextflow.config -c ./tests/modules/picard/sortsam/nextflow.config tags: - picard - picard/sortsam diff --git a/tests/modules/pirate/main.nf b/tests/modules/pirate/main.nf index 5957b1e6..05e5bdd8 100644 --- a/tests/modules/pirate/main.nf +++ b/tests/modules/pirate/main.nf @@ -2,15 +2,22 @@ nextflow.enable.dsl = 2 -include { PIRATE } from '../../../modules/pirate/main.nf' addParams( options: [:] ) +include { PIRATE } from '../../../modules/pirate/main.nf' workflow test_pirate { - - input = [ [ id:'test', single_end:false ], // meta map - [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test1_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test2_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) + ] ] + // [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), + // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), + // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + // ] PIRATE ( input ) } diff --git a/tests/modules/pirate/nextflow.config b/tests/modules/pirate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pirate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pirate/test.yml b/tests/modules/pirate/test.yml index d8c4d0c4..b8d36b95 100644 --- a/tests/modules/pirate/test.yml +++ b/tests/modules/pirate/test.yml @@ -1,5 +1,5 @@ - name: pirate test_pirate - command: nextflow run tests/modules/pirate -entry test_pirate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pirate -entry test_pirate -c ./tests/config/nextflow.config -c ./tests/modules/pirate/nextflow.config tags: - pirate files: diff --git a/tests/modules/plasmidid/main.nf b/tests/modules/plasmidid/main.nf index 1dd57daf..52d25a91 100644 --- a/tests/modules/plasmidid/main.nf +++ b/tests/modules/plasmidid/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLASMIDID } from '../../../modules/plasmidid/main.nf' addParams ( options: ['args' : '-k 0.8'] ) +include { PLASMIDID } from '../../../modules/plasmidid/main.nf' workflow test_plasmidid { diff --git a/tests/modules/plasmidid/nextflow.config b/tests/modules/plasmidid/nextflow.config new file mode 100644 index 00000000..2090bfae --- /dev/null +++ b/tests/modules/plasmidid/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLASMIDID { + ext.args = '-k 0.8' + } + +} diff --git a/tests/modules/plasmidid/test.yml b/tests/modules/plasmidid/test.yml index 838af394..cd0528cb 100644 --- a/tests/modules/plasmidid/test.yml +++ b/tests/modules/plasmidid/test.yml @@ -1,5 +1,5 @@ - name: plasmidid - command: nextflow run ./tests/modules/plasmidid -entry test_plasmidid -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plasmidid -entry test_plasmidid -c ./tests/config/nextflow.config -c ./tests/modules/plasmidid/nextflow.config tags: - plasmidid files: diff --git a/tests/modules/plink/extract/main.nf b/tests/modules/plink/extract/main.nf new file mode 100644 index 00000000..6beb0469 --- /dev/null +++ b/tests/modules/plink/extract/main.nf @@ -0,0 +1,29 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' +include { PLINK_EXTRACT } from '../../../../modules/plink/extract/main.nf' + +workflow test_plink_extract { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true) + ] + + PLINK_VCF ( input ) + + PLINK_VCF.out.bim + .splitText(file: 'variants.keep', keepHeader: false, by: 10) + .first() + .set { ch_variants } + + PLINK_VCF.out.bed + .concat(PLINK_VCF.out.bim, PLINK_VCF.out.fam.concat(ch_variants)) + .groupTuple() + .map{ meta, paths -> [meta, paths[0], paths[1], paths[2], paths[3]] } + .set { ch_extract } + + PLINK_EXTRACT ( ch_extract ) +} diff --git a/tests/modules/plink/extract/nextflow.config b/tests/modules/plink/extract/nextflow.config new file mode 100644 index 00000000..6a7f6d42 --- /dev/null +++ b/tests/modules/plink/extract/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK_VCF { + ext.args = '--make-bed --set-missing-var-ids @:#:\\$1:\\$2' + } + + withName: PLINK_EXTRACT { + ext.prefix = { "${meta.id}.extract" } + } + +} diff --git a/tests/modules/plink/extract/test.yml b/tests/modules/plink/extract/test.yml new file mode 100644 index 00000000..87cf82cc --- /dev/null +++ b/tests/modules/plink/extract/test.yml @@ -0,0 +1,18 @@ +- name: plink extract test_plink_extract + command: nextflow run ./tests/modules/plink/extract -entry test_plink_extract -c ./tests/config/nextflow.config -c ./tests/modules/plink/extract/nextflow.config + tags: + - plink + - plink/extract + files: + - path: output/plink/test.bed + md5sum: 9121010aba9905eee965e96bc983611d + - path: output/plink/test.bim + md5sum: 510ec606219ee5daaf5c207cb01554bf + - path: output/plink/test.extract.bed + md5sum: 9e02f7143bcc756a51f20d50ca7f8032 + - path: output/plink/test.extract.bim + md5sum: 63d190aea4094aa5d042aacd63397f94 + - path: output/plink/test.extract.fam + md5sum: c499456df4da78792ef29934ef3cd47d + - path: output/plink/test.fam + md5sum: c499456df4da78792ef29934ef3cd47d diff --git a/tests/modules/plink/vcf/main.nf b/tests/modules/plink/vcf/main.nf index 096bacdd..4dac8978 100644 --- a/tests/modules/plink/vcf/main.nf +++ b/tests/modules/plink/vcf/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' addParams( options: ['args':" --make-bed --biallelic-only strict --vcf-half-call missing --double-id --recode ped --id-delim \'=\' --allow-extra-chr"]) +include { PLINK_VCF } from '../../../../modules/plink/vcf/main.nf' workflow test_plink_vcf { diff --git a/tests/modules/plink/vcf/nextflow.config b/tests/modules/plink/vcf/nextflow.config new file mode 100644 index 00000000..f0b72c8d --- /dev/null +++ b/tests/modules/plink/vcf/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK_VCF { + ext.args = ' --make-bed --biallelic-only strict --vcf-half-call missing --double-id --recode ped --id-delim \'=\' --allow-extra-chr' + } + +} diff --git a/tests/modules/plink/vcf/test.yml b/tests/modules/plink/vcf/test.yml index bfd54386..9042d14a 100644 --- a/tests/modules/plink/vcf/test.yml +++ b/tests/modules/plink/vcf/test.yml @@ -1,5 +1,5 @@ - name: plink vcf test_plink_vcf - command: nextflow run tests/modules/plink/vcf -entry test_plink_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/plink/vcf -entry test_plink_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink/vcf/nextflow.config tags: - plink - plink/vcf diff --git a/tests/modules/plink2/vcf/main.nf b/tests/modules/plink2/vcf/main.nf new file mode 100644 index 00000000..08d7dc61 --- /dev/null +++ b/tests/modules/plink2/vcf/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' + +workflow test_plink2_vcf { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + + PLINK2_VCF ( input ) +} diff --git a/tests/modules/plink2/vcf/nextflow.config b/tests/modules/plink2/vcf/nextflow.config new file mode 100644 index 00000000..7f7e5e77 --- /dev/null +++ b/tests/modules/plink2/vcf/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PLINK2_VCF { + ext.args = '--allow-extra-chr' + } + +} diff --git a/tests/modules/plink2/vcf/test.yml b/tests/modules/plink2/vcf/test.yml new file mode 100644 index 00000000..52f58a42 --- /dev/null +++ b/tests/modules/plink2/vcf/test.yml @@ -0,0 +1,12 @@ +- name: plink2 vcf test_plink2_vcf + command: nextflow run ./tests/modules/plink2/vcf -entry test_plink2_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink2/vcf/nextflow.config + tags: + - plink2/vcf + - plink2 + files: + - path: output/plink2/test.pgen + md5sum: d66d3cd4a6c9cca1a4073d7f4b277041 + - path: output/plink2/test.psam + md5sum: dc3b77d7753a7bed41734323e3549b10 + - path: output/plink2/test.pvar + md5sum: d61e53f847a6335138b584216b4e45d0 diff --git a/tests/modules/pmdtools/filter/main.nf b/tests/modules/pmdtools/filter/main.nf new file mode 100644 index 00000000..f1b2b4d3 --- /dev/null +++ b/tests/modules/pmdtools/filter/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PMDTOOLS_FILTER } from '../../../../modules/pmdtools/filter/main.nf' + +workflow test_pmdtools_filter { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ]] + threshold = 3 + reference = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] + PMDTOOLS_FILTER ( input, threshold, reference ) +} diff --git a/tests/modules/pmdtools/filter/nextflow.config b/tests/modules/pmdtools/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pmdtools/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pmdtools/filter/test.yml b/tests/modules/pmdtools/filter/test.yml new file mode 100644 index 00000000..a7ebefbe --- /dev/null +++ b/tests/modules/pmdtools/filter/test.yml @@ -0,0 +1,8 @@ +- name: pmdtools filter + command: nextflow run ./tests/modules/pmdtools/filter -entry test_pmdtools_filter -c ./tests/config/nextflow.config -c ./tests/modules/pmdtools/filter/nextflow.config + tags: + - pmdtools + - pmdtools/filter + files: + - path: output/pmdtools/test.bam + md5sum: 0fa64cb87d0439d4482938a4b6990b9d diff --git a/tests/modules/porechop/main.nf b/tests/modules/porechop/main.nf index b6d7bafa..f20b7a6e 100644 --- a/tests/modules/porechop/main.nf +++ b/tests/modules/porechop/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PORECHOP } from '../../../modules/porechop/main.nf' addParams( options: [args: '', suffix: '_porechop'] ) +include { PORECHOP } from '../../../modules/porechop/main.nf' workflow test_porechop { diff --git a/tests/modules/porechop/nextflow.config b/tests/modules/porechop/nextflow.config new file mode 100644 index 00000000..85eb257a --- /dev/null +++ b/tests/modules/porechop/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PORECHOP { + ext.args = '' + ext.prefix = { "${meta.id}_porechop" } + } + +} diff --git a/tests/modules/porechop/test.yml b/tests/modules/porechop/test.yml index b37a7ec4..8790ab87 100644 --- a/tests/modules/porechop/test.yml +++ b/tests/modules/porechop/test.yml @@ -1,5 +1,5 @@ - name: porechop test_porechop - command: nextflow run tests/modules/porechop -entry test_porechop -c tests/config/nextflow.config + command: nextflow run ./tests/modules/porechop -entry test_porechop -c ./tests/config/nextflow.config -c ./tests/modules/porechop/nextflow.config tags: - porechop files: diff --git a/tests/modules/preseq/lcextrap/main.nf b/tests/modules/preseq/lcextrap/main.nf index 390039bd..4bbbd146 100644 --- a/tests/modules/preseq/lcextrap/main.nf +++ b/tests/modules/preseq/lcextrap/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PRESEQ_LCEXTRAP } from '../../../../modules/preseq/lcextrap/main.nf' addParams( options: [:] ) +include { PRESEQ_LCEXTRAP } from '../../../../modules/preseq/lcextrap/main.nf' // // Test with single-end data diff --git a/tests/modules/preseq/lcextrap/nextflow.config b/tests/modules/preseq/lcextrap/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/preseq/lcextrap/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/preseq/lcextrap/test.yml b/tests/modules/preseq/lcextrap/test.yml index 4472a485..ecd1d046 100644 --- a/tests/modules/preseq/lcextrap/test.yml +++ b/tests/modules/preseq/lcextrap/test.yml @@ -1,5 +1,5 @@ - name: preseq lcextrap single-end - command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_single_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/lcextrap/nextflow.config tags: - preseq - preseq/lcextrap @@ -9,7 +9,7 @@ - path: output/preseq/test.command.log - name: preseq lcextrap paired-end - command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/preseq/lcextrap -entry test_preseq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/lcextrap/nextflow.config tags: - preseq - preseq/lcextrap diff --git a/tests/modules/prodigal/main.nf b/tests/modules/prodigal/main.nf index 414585a1..6e282015 100644 --- a/tests/modules/prodigal/main.nf +++ b/tests/modules/prodigal/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PRODIGAL } from '../../../modules/prodigal/main.nf' addParams( options: [:] ) +include { PRODIGAL } from '../../../modules/prodigal/main.nf' workflow test_prodigal { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/prodigal/nextflow.config b/tests/modules/prodigal/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/prodigal/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/prodigal/test.yml b/tests/modules/prodigal/test.yml index 93caa998..7f0ab88c 100644 --- a/tests/modules/prodigal/test.yml +++ b/tests/modules/prodigal/test.yml @@ -1,5 +1,5 @@ - name: prodigal test_prodigal - command: nextflow run tests/modules/prodigal -entry test_prodigal -c tests/config/nextflow.config + command: nextflow run ./tests/modules/prodigal -entry test_prodigal -c ./tests/config/nextflow.config -c ./tests/modules/prodigal/nextflow.config tags: - prodigal files: diff --git a/tests/modules/prokka/main.nf b/tests/modules/prokka/main.nf index e35cb1d9..97e94ca8 100644 --- a/tests/modules/prokka/main.nf +++ b/tests/modules/prokka/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PROKKA } from '../../../modules/prokka/main.nf' addParams( options: [:] ) +include { PROKKA } from '../../../modules/prokka/main.nf' workflow test_prokka { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/prokka/nextflow.config b/tests/modules/prokka/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/prokka/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/prokka/test.yml b/tests/modules/prokka/test.yml index 2823353c..92f813a7 100644 --- a/tests/modules/prokka/test.yml +++ b/tests/modules/prokka/test.yml @@ -1,5 +1,5 @@ - name: prokka - command: nextflow run ./tests/modules/prokka -entry test_prokka -c tests/config/nextflow.config + command: nextflow run ./tests/modules/prokka -entry test_prokka -c ./tests/config/nextflow.config -c ./tests/modules/prokka/nextflow.config tags: - prokka files: diff --git a/tests/modules/pycoqc/main.nf b/tests/modules/pycoqc/main.nf index ab65dadc..c8a8ee2c 100644 --- a/tests/modules/pycoqc/main.nf +++ b/tests/modules/pycoqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PYCOQC } from '../../../modules/pycoqc/main.nf' addParams ( options: ['args' : '--min_pass_qual 0'] ) +include { PYCOQC } from '../../../modules/pycoqc/main.nf' workflow test_pycoqc { diff --git a/tests/modules/pycoqc/nextflow.config b/tests/modules/pycoqc/nextflow.config new file mode 100644 index 00000000..d532f8f7 --- /dev/null +++ b/tests/modules/pycoqc/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PYCOQC { + ext.args = '--min_pass_qual 0' + } + +} diff --git a/tests/modules/pycoqc/test.yml b/tests/modules/pycoqc/test.yml index 052e3e1a..becd911b 100644 --- a/tests/modules/pycoqc/test.yml +++ b/tests/modules/pycoqc/test.yml @@ -1,5 +1,5 @@ - name: pycoqc - command: nextflow run ./tests/modules/pycoqc -entry test_pycoqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pycoqc -entry test_pycoqc -c ./tests/config/nextflow.config -c ./tests/modules/pycoqc/nextflow.config tags: - pycoqc files: diff --git a/tests/modules/pydamage/analyze/main.nf b/tests/modules/pydamage/analyze/main.nf index ddf0b27a..920a4201 100644 --- a/tests/modules/pydamage/analyze/main.nf +++ b/tests/modules/pydamage/analyze/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' workflow test_pydamage { diff --git a/tests/modules/pydamage/analyze/nextflow.config b/tests/modules/pydamage/analyze/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pydamage/analyze/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pydamage/analyze/test.yml b/tests/modules/pydamage/analyze/test.yml index 157e947f..9d22f20e 100644 --- a/tests/modules/pydamage/analyze/test.yml +++ b/tests/modules/pydamage/analyze/test.yml @@ -1,5 +1,5 @@ - name: pydamage analyze test workflow - command: nextflow run tests/modules/pydamage/analyze -entry test_pydamage -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pydamage/analyze -entry test_pydamage -c ./tests/config/nextflow.config -c ./tests/modules/pydamage/analyze/nextflow.config tags: - pydamage - pydamage/analyze diff --git a/tests/modules/pydamage/filter/main.nf b/tests/modules/pydamage/filter/main.nf index 03e90408..dac03e78 100644 --- a/tests/modules/pydamage/filter/main.nf +++ b/tests/modules/pydamage/filter/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' addParams( options: [:] ) -include { PYDAMAGE_FILTER } from '../../../../modules/pydamage/filter/main.nf' addParams( options: [:] ) +include { PYDAMAGE_ANALYZE } from '../../../../modules/pydamage/analyze/main.nf' +include { PYDAMAGE_FILTER } from '../../../../modules/pydamage/filter/main.nf' workflow test_pydamage { diff --git a/tests/modules/pydamage/filter/nextflow.config b/tests/modules/pydamage/filter/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/pydamage/filter/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/pydamage/filter/test.yml b/tests/modules/pydamage/filter/test.yml index 248be44b..b6738e3d 100644 --- a/tests/modules/pydamage/filter/test.yml +++ b/tests/modules/pydamage/filter/test.yml @@ -1,5 +1,5 @@ - name: pydamage filter test workflow - command: nextflow run tests/modules/pydamage/filter -entry test_pydamage -c tests/config/nextflow.config + command: nextflow run ./tests/modules/pydamage/filter -entry test_pydamage -c ./tests/config/nextflow.config -c ./tests/modules/pydamage/filter/nextflow.config tags: - pydamage - pydamage/filter @@ -7,4 +7,4 @@ - path: output/pydamage/pydamage_results/pydamage_filtered_results.csv md5sum: 9f297233cf4932d7d7e52cc72d4727dc - path: output/pydamage/pydamage_results/pydamage_results.csv - md5sum: 6847e0d5aa6dba85bbd2dd509772b7a0 + md5sum: 37ee6b4dee6890fd2ec8550337f21ac9 diff --git a/tests/modules/qcat/main.nf b/tests/modules/qcat/main.nf index 72c87e37..8a5cdd6d 100644 --- a/tests/modules/qcat/main.nf +++ b/tests/modules/qcat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QCAT } from '../../../modules/qcat/main.nf' addParams( options: [:] ) +include { QCAT } from '../../../modules/qcat/main.nf' workflow test_qcat { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/qcat/nextflow.config b/tests/modules/qcat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/qcat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/qcat/test.yml b/tests/modules/qcat/test.yml index 5c43841b..47ece983 100644 --- a/tests/modules/qcat/test.yml +++ b/tests/modules/qcat/test.yml @@ -1,5 +1,5 @@ - name: qcat - command: nextflow run ./tests/modules/qcat -entry test_qcat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/qcat -entry test_qcat -c ./tests/config/nextflow.config -c ./tests/modules/qcat/nextflow.config tags: - qcat files: diff --git a/tests/modules/qualimap/bamqc/main.nf b/tests/modules/qualimap/bamqc/main.nf index 803d0220..a17efd59 100644 --- a/tests/modules/qualimap/bamqc/main.nf +++ b/tests/modules/qualimap/bamqc/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QUALIMAP_BAMQC } from '../../../../modules/qualimap/bamqc/main.nf' addParams( options: [:] ) +include { QUALIMAP_BAMQC } from '../../../../modules/qualimap/bamqc/main.nf' workflow test_qualimap_bamqc { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/qualimap/bamqc/nextflow.config b/tests/modules/qualimap/bamqc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/qualimap/bamqc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/qualimap/bamqc/test.yml b/tests/modules/qualimap/bamqc/test.yml index 7d746a51..41c4199e 100644 --- a/tests/modules/qualimap/bamqc/test.yml +++ b/tests/modules/qualimap/bamqc/test.yml @@ -1,5 +1,5 @@ - name: qualimap bamqc test workflow - command: nextflow run ./tests/modules/qualimap/bamqc -entry test_qualimap_bamqc -c tests/config/nextflow.config + command: nextflow run ./tests/modules/qualimap/bamqc -entry test_qualimap_bamqc -c ./tests/config/nextflow.config -c ./tests/modules/qualimap/bamqc/nextflow.config tags: - qualimap - qualimap/bamqc diff --git a/tests/modules/quast/main.nf b/tests/modules/quast/main.nf index d263470c..c879a8a9 100644 --- a/tests/modules/quast/main.nf +++ b/tests/modules/quast/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { QUAST } from '../../../modules/quast/main.nf' addParams(options: [:]) +include { QUAST } from '../../../modules/quast/main.nf' workflow test_quast_ref { fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/quast/nextflow.config b/tests/modules/quast/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/quast/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/quast/test.yml b/tests/modules/quast/test.yml index 6e1f991f..166cd896 100644 --- a/tests/modules/quast/test.yml +++ b/tests/modules/quast/test.yml @@ -1,5 +1,5 @@ - name: quast with reference - command: nextflow run ./tests/modules/quast -entry test_quast_ref -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/quast -entry test_quast_ref -c ./tests/config/nextflow.config -c ./tests/modules/quast/nextflow.config tags: - quast files: @@ -82,7 +82,7 @@ - path: ./output/quast/quast/icarus_viewers/contig_size_viewer.html - name: quast without reference - command: nextflow run ./tests/modules/quast -entry test_quast_noref -c ./tests/config/nextflow.config + command: nextflow run ./tests/modules/quast -entry test_quast_noref -c ./tests/config/nextflow.config -c ./tests/modules/quast/nextflow.config tags: - quast files: diff --git a/tests/modules/racon/main.nf b/tests/modules/racon/main.nf index b6b864e1..507d8d8d 100644 --- a/tests/modules/racon/main.nf +++ b/tests/modules/racon/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RACON } from '../../../modules/racon/main.nf' addParams( options: [:] ) +include { RACON } from '../../../modules/racon/main.nf' workflow test_racon { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/racon/nextflow.config b/tests/modules/racon/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/racon/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/racon/test.yml b/tests/modules/racon/test.yml index dc8e57dc..0250fa36 100644 --- a/tests/modules/racon/test.yml +++ b/tests/modules/racon/test.yml @@ -1,5 +1,5 @@ - name: racon test_racon - command: nextflow run tests/modules/racon -entry test_racon -c tests/config/nextflow.config + command: nextflow run ./tests/modules/racon -entry test_racon -c ./tests/config/nextflow.config -c ./tests/modules/racon/nextflow.config tags: - racon files: diff --git a/tests/modules/rapidnj/main.nf b/tests/modules/rapidnj/main.nf index e23fa46f..66d19c3c 100644 --- a/tests/modules/rapidnj/main.nf +++ b/tests/modules/rapidnj/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RAPIDNJ } from '../../../modules/rapidnj/main.nf' addParams( options: [:] ) +include { RAPIDNJ } from '../../../modules/rapidnj/main.nf' workflow test_rapidnj { diff --git a/tests/modules/rapidnj/nextflow.config b/tests/modules/rapidnj/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rapidnj/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rapidnj/test.yml b/tests/modules/rapidnj/test.yml index 0b7ecff5..21f6ead9 100644 --- a/tests/modules/rapidnj/test.yml +++ b/tests/modules/rapidnj/test.yml @@ -1,5 +1,5 @@ - name: rapidnj - command: nextflow run ./tests/modules/rapidnj -entry test_rapidnj -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rapidnj -entry test_rapidnj -c ./tests/config/nextflow.config -c ./tests/modules/rapidnj/nextflow.config tags: - rapidnj files: diff --git a/tests/modules/rasusa/main.nf b/tests/modules/rasusa/main.nf index 9cc139ad..8a11627c 100644 --- a/tests/modules/rasusa/main.nf +++ b/tests/modules/rasusa/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RASUSA } from '../../../modules/rasusa/main.nf' addParams( options: ['suffix':'_100X']) +include { RASUSA } from '../../../modules/rasusa/main.nf' workflow test_rasusa { input = [ [ id:'test', single_end:false], // meta map diff --git a/tests/modules/rasusa/nextflow.config b/tests/modules/rasusa/nextflow.config new file mode 100644 index 00000000..50c32e5c --- /dev/null +++ b/tests/modules/rasusa/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RASUSA { + ext.prefix = { "${meta.id}_100X" } + } + +} diff --git a/tests/modules/rasusa/test.yml b/tests/modules/rasusa/test.yml index bb30c99e..41c56b67 100644 --- a/tests/modules/rasusa/test.yml +++ b/tests/modules/rasusa/test.yml @@ -1,5 +1,5 @@ - name: rasusa test_rasusa - command: nextflow run tests/modules/rasusa -entry test_rasusa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rasusa -entry test_rasusa -c ./tests/config/nextflow.config -c ./tests/modules/rasusa/nextflow.config tags: - rasusa files: diff --git a/tests/modules/raxmlng/main.nf b/tests/modules/raxmlng/main.nf index 2cac6b31..5fad6953 100644 --- a/tests/modules/raxmlng/main.nf +++ b/tests/modules/raxmlng/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { RAXMLNG as RAXMLNG_NO_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' addParams( options: [args:'--model GTR+G'] ) -include { RAXMLNG as RAXMLNG_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' addParams( options: [args:'--all --model GTR+G --bs-trees 1000'] ) +include { RAXMLNG as RAXMLNG_NO_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' +include { RAXMLNG as RAXMLNG_BOOTSTRAP } from '../../../modules/raxmlng/main.nf' // // Test without bootstrapping diff --git a/tests/modules/raxmlng/nextflow.config b/tests/modules/raxmlng/nextflow.config new file mode 100644 index 00000000..8c269a9b --- /dev/null +++ b/tests/modules/raxmlng/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RAXMLNG_NO_BOOTSTRAP { + ext.args = '--model GTR+G' + } + + withName: RAXMLNG_BOOTSTRAP { + ext.args = '--all --model GTR+G --bs-trees 1000' + } + +} diff --git a/tests/modules/raxmlng/test.yml b/tests/modules/raxmlng/test.yml index 950c48ad..735b6a74 100644 --- a/tests/modules/raxmlng/test.yml +++ b/tests/modules/raxmlng/test.yml @@ -1,5 +1,5 @@ - name: raxmlng no_bootstrap - command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_no_bootstrap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_no_bootstrap -c ./tests/config/nextflow.config -c ./tests/modules/raxmlng/nextflow.config tags: - raxmlng files: @@ -11,7 +11,7 @@ - 'sample4:0.111' - name: raxmlng bootstrap - command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_bootstrap -c tests/config/nextflow.config + command: nextflow run ./tests/modules/raxmlng -entry test_raxmlng_bootstrap -c ./tests/config/nextflow.config -c ./tests/modules/raxmlng/nextflow.config tags: - raxmlng files: diff --git a/tests/modules/rmarkdownnotebook/main.nf b/tests/modules/rmarkdownnotebook/main.nf index e56d54ff..fdb7d3b9 100644 --- a/tests/modules/rmarkdownnotebook/main.nf +++ b/tests/modules/rmarkdownnotebook/main.nf @@ -2,12 +2,8 @@ nextflow.enable.dsl = 2 -include { RMARKDOWNNOTEBOOK } from '../../../modules/rmarkdownnotebook/main.nf' addParams( - parametrize: false, options: [:] -) -include { RMARKDOWNNOTEBOOK as RMARKDOWNNOTEBOOK_PARAMETRIZE } from '../../../modules/rmarkdownnotebook/main.nf' addParams( - options: [:] -) +include { RMARKDOWNNOTEBOOK } from '../../../modules/rmarkdownnotebook/main.nf' +include { RMARKDOWNNOTEBOOK as RMARKDOWNNOTEBOOK_PARAMETRIZE } from '../../../modules/rmarkdownnotebook/main.nf' workflow test_rmarkdown { diff --git a/tests/modules/rmarkdownnotebook/nextflow.config b/tests/modules/rmarkdownnotebook/nextflow.config new file mode 100644 index 00000000..c99f5250 --- /dev/null +++ b/tests/modules/rmarkdownnotebook/nextflow.config @@ -0,0 +1,15 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RMARKDOWNNOTEBOOK { + ext = ['parametrize': false] + } + + // this should be the default options, but need to work around + // https://github.com/nextflow-io/nextflow/issues/2422 + withName: RMARKDOWNNOTEBOOK_PARAMETRIZE { + ext = ['parametrize': true] + } + +} diff --git a/tests/modules/rmarkdownnotebook/test.yml b/tests/modules/rmarkdownnotebook/test.yml index bef6086a..3645514a 100644 --- a/tests/modules/rmarkdownnotebook/test.yml +++ b/tests/modules/rmarkdownnotebook/test.yml @@ -1,5 +1,5 @@ - name: rmarkdownnotebook test_rmarkdown - command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rmarkdownnotebook -entry test_rmarkdown -c ./tests/config/nextflow.config -c ./tests/modules/rmarkdownnotebook/nextflow.config tags: - rmarkdownnotebook files: @@ -12,7 +12,7 @@ - "n_iter = 10" - name: rmarkdownnotebook test_rmarkdown_parametrize - command: nextflow run tests/modules/rmarkdownnotebook -entry test_rmarkdown_parametrize -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rmarkdownnotebook -entry test_rmarkdown_parametrize -c ./tests/config/nextflow.config -c ./tests/modules/rmarkdownnotebook/nextflow.config tags: - rmarkdownnotebook files: diff --git a/tests/modules/roary/main.nf b/tests/modules/roary/main.nf index a4a96d6e..3fae516c 100644 --- a/tests/modules/roary/main.nf +++ b/tests/modules/roary/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { ROARY } from '../../../modules/roary/main.nf' addParams( options: [:] ) +include { ROARY } from '../../../modules/roary/main.nf' workflow test_roary { - - input = [ [ id:'test', single_end:false ], // meta map - [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), - file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test1_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test2_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) + ] ] ROARY ( input ) diff --git a/tests/modules/roary/nextflow.config b/tests/modules/roary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/roary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/roary/test.yml b/tests/modules/roary/test.yml index c8e8c33d..981ab51c 100644 --- a/tests/modules/roary/test.yml +++ b/tests/modules/roary/test.yml @@ -1,5 +1,5 @@ - name: roary test_roary - command: nextflow run tests/modules/roary -entry test_roary -c tests/config/nextflow.config + command: nextflow run ./tests/modules/roary -entry test_roary -c ./tests/config/nextflow.config -c ./tests/modules/roary/nextflow.config tags: - roary files: @@ -8,9 +8,9 @@ - path: output/roary/results/accessory.tab contains: ['FT'] - path: output/roary/results/accessory_binary_genes.fa - md5sum: 0baeea4947bf17a2bf29d43a44f0278f + md5sum: d4191cf748dd8016ad877857a034bef3 - path: output/roary/results/accessory_binary_genes.fa.newick - md5sum: b1f8c76ab231bd38b850c1f8d3c1584b + md5sum: d4a2a64e781263ca1b9b3a4bc9d3a6ea - path: output/roary/results/accessory_graph.dot contains: ['/* list of nodes */'] - path: output/roary/results/blast_identity_frequency.Rtab @@ -20,7 +20,7 @@ - path: output/roary/results/core_accessory.header.embl contains: ['ID Genome standard; DNA; PRO; 1234 BP.'] - path: output/roary/results/core_accessory.tab - contains: ['FT /taxa="GCF_000292685 GCF_000298385 GCF_002849995"'] + contains: ['FT /taxa="test1 test2 test3"'] - path: output/roary/results/core_accessory_graph.dot contains: ['/* list of nodes */'] - path: output/roary/results/gene_presence_absence.Rtab diff --git a/tests/modules/rsem/calculateexpression/main.nf b/tests/modules/rsem/calculateexpression/main.nf index e7de83a4..9d6d3c5c 100644 --- a/tests/modules/rsem/calculateexpression/main.nf +++ b/tests/modules/rsem/calculateexpression/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [args: "--star"]) -include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' addParams(options: [args: "--star --star-gzipped-read-file"]) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' +include { RSEM_CALCULATEEXPRESSION } from '../../../../modules/rsem/calculateexpression/main.nf' workflow test_rsem_calculateexpression { diff --git a/tests/modules/rsem/calculateexpression/nextflow.config b/tests/modules/rsem/calculateexpression/nextflow.config new file mode 100644 index 00000000..b17a1cf2 --- /dev/null +++ b/tests/modules/rsem/calculateexpression/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: RSEM_PREPAREREFERENCE { + ext.args = '--star' + } + + withName: RSEM_CALCULATEEXPRESSION { + ext.args = '--star --star-gzipped-read-file' + } + +} diff --git a/tests/modules/rsem/calculateexpression/test.yml b/tests/modules/rsem/calculateexpression/test.yml index ac0866ea..f19c3398 100644 --- a/tests/modules/rsem/calculateexpression/test.yml +++ b/tests/modules/rsem/calculateexpression/test.yml @@ -1,55 +1,55 @@ - name: rsem calculateexpression test_rsem_calculateexpression - command: nextflow run tests/modules/rsem/calculateexpression -entry test_rsem_calculateexpression -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rsem/calculateexpression -entry test_rsem_calculateexpression -c ./tests/config/nextflow.config -c ./tests/modules/rsem/calculateexpression/nextflow.config tags: - rsem - rsem/calculateexpression files: - - path: output/index/rsem/Genome + - path: output/rsem/rsem/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/rsem/Log.out - - path: output/index/rsem/SA + - path: output/rsem/rsem/Log.out + - path: output/rsem/rsem/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/rsem/SAindex + - path: output/rsem/rsem/SAindex md5sum: fd05c149960e72642a8d7c860528ae81 - - path: output/index/rsem/chrLength.txt + - path: output/rsem/rsem/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/rsem/chrName.txt + - path: output/rsem/rsem/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/rsem/chrNameLength.txt + - path: output/rsem/rsem/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/rsem/chrStart.txt + - path: output/rsem/rsem/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/rsem/exonGeTrInfo.tab + - path: output/rsem/rsem/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/rsem/exonInfo.tab + - path: output/rsem/rsem/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/rsem/geneInfo.tab + - path: output/rsem/rsem/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/rsem/genome.chrlist + - path: output/rsem/rsem/genome.chrlist md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/rsem/genome.fasta + - path: output/rsem/rsem/genome.fasta md5sum: f315020d899597c1b57e5fe9f60f4c3e - - path: output/index/rsem/genome.grp + - path: output/rsem/rsem/genome.grp md5sum: c2848a8b6d495956c11ec53efc1de67e - - path: output/index/rsem/genome.idx.fa + - path: output/rsem/rsem/genome.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.n2g.idx.fa + - path: output/rsem/rsem/genome.n2g.idx.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genome.seq + - path: output/rsem/rsem/genome.seq md5sum: 94da0c6b88c33e63c9a052a11f4f57c1 - - path: output/index/rsem/genome.ti + - path: output/rsem/rsem/genome.ti md5sum: c9e4ae8d4d13a504eec2acf1b8589a66 - - path: output/index/rsem/genome.transcripts.fa + - path: output/rsem/rsem/genome.transcripts.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - - path: output/index/rsem/genomeParameters.txt + - path: output/rsem/rsem/genomeParameters.txt md5sum: 2fe3a030e1706c3e8cd4df3818e6dd2f - - path: output/index/rsem/sjdbInfo.txt + - path: output/rsem/rsem/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/rsem/sjdbList.fromGTF.out.tab + - path: output/rsem/rsem/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/rsem/sjdbList.out.tab + - path: output/rsem/rsem/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/rsem/transcriptInfo.tab + - path: output/rsem/rsem/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/rsem/test.genes.results md5sum: c7ec226f76736ea805771e73553ae359 diff --git a/tests/modules/rsem/preparereference/main.nf b/tests/modules/rsem/preparereference/main.nf index 2d4a9053..8062737d 100644 --- a/tests/modules/rsem/preparereference/main.nf +++ b/tests/modules/rsem/preparereference/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' addParams(options: [publish_dir:'rsem']) +include { RSEM_PREPAREREFERENCE } from '../../../../modules/rsem/preparereference/main.nf' workflow test_rsem_preparereference { diff --git a/tests/modules/rsem/preparereference/nextflow.config b/tests/modules/rsem/preparereference/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rsem/preparereference/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rsem/preparereference/test.yml b/tests/modules/rsem/preparereference/test.yml index 734a92b2..1f058bea 100644 --- a/tests/modules/rsem/preparereference/test.yml +++ b/tests/modules/rsem/preparereference/test.yml @@ -1,5 +1,5 @@ - name: rsem preparereference test_rsem_preparereference - command: nextflow run tests/modules/rsem/preparereference -entry test_rsem_preparereference -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rsem/preparereference -entry test_rsem_preparereference -c ./tests/config/nextflow.config -c ./tests/modules/rsem/preparereference/nextflow.config tags: - rsem - rsem/preparereference diff --git a/tests/modules/rseqc/bamstat/main.nf b/tests/modules/rseqc/bamstat/main.nf index c13e7f97..4c53a1af 100644 --- a/tests/modules/rseqc/bamstat/main.nf +++ b/tests/modules/rseqc/bamstat/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_BAMSTAT } from '../../../../modules/rseqc/bamstat/main.nf' addParams(options: [:]) +include { RSEQC_BAMSTAT } from '../../../../modules/rseqc/bamstat/main.nf' workflow test_rseqc_bamstat { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/rseqc/bamstat/nextflow.config b/tests/modules/rseqc/bamstat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/bamstat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/bamstat/test.yml b/tests/modules/rseqc/bamstat/test.yml index 75d62672..4cb35d0c 100644 --- a/tests/modules/rseqc/bamstat/test.yml +++ b/tests/modules/rseqc/bamstat/test.yml @@ -1,5 +1,5 @@ - name: rseqc bamstat test_rseqc_bamstat - command: nextflow run tests/modules/rseqc/bamstat -entry test_rseqc_bamstat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/bamstat -entry test_rseqc_bamstat -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/bamstat/nextflow.config tags: - rseqc - rseqc/bamstat diff --git a/tests/modules/rseqc/inferexperiment/main.nf b/tests/modules/rseqc/inferexperiment/main.nf index ae8c53a9..6337063d 100644 --- a/tests/modules/rseqc/inferexperiment/main.nf +++ b/tests/modules/rseqc/inferexperiment/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_INFEREXPERIMENT } from '../../../../modules/rseqc/inferexperiment/main.nf' addParams(options: [:]) +include { RSEQC_INFEREXPERIMENT } from '../../../../modules/rseqc/inferexperiment/main.nf' workflow test_rseqc_inferexperiment { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/rseqc/inferexperiment/nextflow.config b/tests/modules/rseqc/inferexperiment/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/inferexperiment/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/inferexperiment/test.yml b/tests/modules/rseqc/inferexperiment/test.yml index 59d6f3d5..554f8317 100644 --- a/tests/modules/rseqc/inferexperiment/test.yml +++ b/tests/modules/rseqc/inferexperiment/test.yml @@ -1,5 +1,5 @@ - name: rseqc inferexperiment test_rseqc_inferexperiment - command: nextflow run tests/modules/rseqc/inferexperiment -entry test_rseqc_inferexperiment -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/inferexperiment -entry test_rseqc_inferexperiment -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/inferexperiment/nextflow.config tags: - rseqc - rseqc/inferexperiment diff --git a/tests/modules/rseqc/innerdistance/main.nf b/tests/modules/rseqc/innerdistance/main.nf index 003e8a14..8cc0ec3e 100644 --- a/tests/modules/rseqc/innerdistance/main.nf +++ b/tests/modules/rseqc/innerdistance/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_INNERDISTANCE } from '../../../../modules/rseqc/innerdistance/main.nf' addParams(options: [:]) +include { RSEQC_INNERDISTANCE } from '../../../../modules/rseqc/innerdistance/main.nf' workflow test_rseqc_innerdistance { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/innerdistance/nextflow.config b/tests/modules/rseqc/innerdistance/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/innerdistance/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/innerdistance/test.yml b/tests/modules/rseqc/innerdistance/test.yml index b0ee0283..c0f480e9 100644 --- a/tests/modules/rseqc/innerdistance/test.yml +++ b/tests/modules/rseqc/innerdistance/test.yml @@ -1,5 +1,5 @@ - name: rseqc innerdistance test_rseqc_innerdistance - command: nextflow run tests/modules/rseqc/innerdistance -entry test_rseqc_innerdistance -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/innerdistance -entry test_rseqc_innerdistance -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/innerdistance/nextflow.config tags: - rseqc - rseqc/innerdistance diff --git a/tests/modules/rseqc/junctionannotation/main.nf b/tests/modules/rseqc/junctionannotation/main.nf index a6913850..303dcd85 100644 --- a/tests/modules/rseqc/junctionannotation/main.nf +++ b/tests/modules/rseqc/junctionannotation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_JUNCTIONANNOTATION } from '../../../../modules/rseqc/junctionannotation/main.nf' addParams(options: [:]) +include { RSEQC_JUNCTIONANNOTATION } from '../../../../modules/rseqc/junctionannotation/main.nf' workflow test_rseqc_junctionannotation { input = [ diff --git a/tests/modules/rseqc/junctionannotation/nextflow.config b/tests/modules/rseqc/junctionannotation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/junctionannotation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/junctionannotation/test.yml b/tests/modules/rseqc/junctionannotation/test.yml index 39326f67..f2020b10 100644 --- a/tests/modules/rseqc/junctionannotation/test.yml +++ b/tests/modules/rseqc/junctionannotation/test.yml @@ -1,5 +1,5 @@ - name: rseqc junctionannotation test_rseqc_junctionannotation - command: nextflow run tests/modules/rseqc/junctionannotation -entry test_rseqc_junctionannotation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/junctionannotation -entry test_rseqc_junctionannotation -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/junctionannotation/nextflow.config tags: - rseqc - rseqc/junctionannotation diff --git a/tests/modules/rseqc/junctionsaturation/main.nf b/tests/modules/rseqc/junctionsaturation/main.nf index 047fb372..eefbb492 100644 --- a/tests/modules/rseqc/junctionsaturation/main.nf +++ b/tests/modules/rseqc/junctionsaturation/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_JUNCTIONSATURATION } from '../../../../modules/rseqc/junctionsaturation/main.nf' addParams(options: [:]) +include { RSEQC_JUNCTIONSATURATION } from '../../../../modules/rseqc/junctionsaturation/main.nf' workflow test_rseqc_junctionsaturation { input = [ diff --git a/tests/modules/rseqc/junctionsaturation/nextflow.config b/tests/modules/rseqc/junctionsaturation/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/junctionsaturation/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/junctionsaturation/test.yml b/tests/modules/rseqc/junctionsaturation/test.yml index dfadb371..db977360 100644 --- a/tests/modules/rseqc/junctionsaturation/test.yml +++ b/tests/modules/rseqc/junctionsaturation/test.yml @@ -1,5 +1,5 @@ - name: rseqc junctionsaturation test_rseqc_junctionsaturation - command: nextflow run tests/modules/rseqc/junctionsaturation -entry test_rseqc_junctionsaturation -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/junctionsaturation -entry test_rseqc_junctionsaturation -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/junctionsaturation/nextflow.config tags: - rseqc/junctionsaturation - rseqc diff --git a/tests/modules/rseqc/readdistribution/main.nf b/tests/modules/rseqc/readdistribution/main.nf index 415aed9a..180367f2 100644 --- a/tests/modules/rseqc/readdistribution/main.nf +++ b/tests/modules/rseqc/readdistribution/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_READDISTRIBUTION } from '../../../../modules/rseqc/readdistribution/main.nf' addParams(options: [:]) +include { RSEQC_READDISTRIBUTION } from '../../../../modules/rseqc/readdistribution/main.nf' workflow test_rseqc_readdistribution { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/readdistribution/nextflow.config b/tests/modules/rseqc/readdistribution/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/readdistribution/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/readdistribution/test.yml b/tests/modules/rseqc/readdistribution/test.yml index 79e7e1d3..e530e92a 100644 --- a/tests/modules/rseqc/readdistribution/test.yml +++ b/tests/modules/rseqc/readdistribution/test.yml @@ -1,5 +1,5 @@ - name: rseqc readdistribution test_rseqc_readdistribution - command: nextflow run tests/modules/rseqc/readdistribution -entry test_rseqc_readdistribution -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/readdistribution -entry test_rseqc_readdistribution -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/readdistribution/nextflow.config tags: - rseqc - rseqc/readdistribution diff --git a/tests/modules/rseqc/readduplication/main.nf b/tests/modules/rseqc/readduplication/main.nf index b94f6945..bcccde5d 100644 --- a/tests/modules/rseqc/readduplication/main.nf +++ b/tests/modules/rseqc/readduplication/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { RSEQC_READDUPLICATION } from '../../../../modules/rseqc/readduplication/main.nf' addParams(options: [:]) +include { RSEQC_READDUPLICATION } from '../../../../modules/rseqc/readduplication/main.nf' workflow test_rseqc_readduplication { input = [ [ id:'test', single_end: false ], // meta map diff --git a/tests/modules/rseqc/readduplication/nextflow.config b/tests/modules/rseqc/readduplication/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/rseqc/readduplication/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/rseqc/readduplication/test.yml b/tests/modules/rseqc/readduplication/test.yml index 2a4c9546..b0c35071 100644 --- a/tests/modules/rseqc/readduplication/test.yml +++ b/tests/modules/rseqc/readduplication/test.yml @@ -1,5 +1,5 @@ - name: rseqc readduplication test_rseqc_readduplication - command: nextflow run tests/modules/rseqc/readduplication -entry test_rseqc_readduplication -c tests/config/nextflow.config + command: nextflow run ./tests/modules/rseqc/readduplication -entry test_rseqc_readduplication -c ./tests/config/nextflow.config -c ./tests/modules/rseqc/readduplication/nextflow.config tags: - rseqc/readduplication - rseqc diff --git a/tests/modules/salmon/index/main.nf b/tests/modules/salmon/index/main.nf index 98804733..680b4c6e 100644 --- a/tests/modules/salmon/index/main.nf +++ b/tests/modules/salmon/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [publish_dir:'salmon'] ) +include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' workflow test_salmon_index { genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/salmon/index/nextflow.config b/tests/modules/salmon/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/salmon/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/salmon/index/test.yml b/tests/modules/salmon/index/test.yml index acefb044..07815e37 100644 --- a/tests/modules/salmon/index/test.yml +++ b/tests/modules/salmon/index/test.yml @@ -1,5 +1,5 @@ - name: salmon index - command: nextflow run ./tests/modules/salmon/index -entry test_salmon_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/index -entry test_salmon_index -c ./tests/config/nextflow.config -c ./tests/modules/salmon/index/nextflow.config tags: - salmon - salmon/index diff --git a/tests/modules/salmon/quant/main.nf b/tests/modules/salmon/quant/main.nf index ad15870c..a970f6c5 100644 --- a/tests/modules/salmon/quant/main.nf +++ b/tests/modules/salmon/quant/main.nf @@ -2,14 +2,17 @@ nextflow.enable.dsl = 2 -include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' addParams( options: [:] ) -include { SALMON_QUANT } from '../../../../modules/salmon/quant/main.nf' addParams( options: [args: '--minAssignedFrags 1'] ) +include { SALMON_INDEX } from '../../../../modules/salmon/index/main.nf' +include { SALMON_QUANT } from '../../../../modules/salmon/quant/main.nf' workflow test_salmon_quant_single_end { - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -21,10 +24,13 @@ workflow test_salmon_quant_single_end { workflow test_salmon_quant_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) @@ -36,9 +42,12 @@ workflow test_salmon_quant_paired_end { workflow test_salmon_quant_single_end_lib_type_A { - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] genome_fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) transcript_fasta = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) diff --git a/tests/modules/salmon/quant/nextflow.config b/tests/modules/salmon/quant/nextflow.config new file mode 100644 index 00000000..7a8c911a --- /dev/null +++ b/tests/modules/salmon/quant/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SALMON_QUANT { + ext.args = '--minAssignedFrags 1' + } + +} diff --git a/tests/modules/salmon/quant/test.yml b/tests/modules/salmon/quant/test.yml index d7ed0d0f..514718fa 100644 --- a/tests/modules/salmon/quant/test.yml +++ b/tests/modules/salmon/quant/test.yml @@ -1,5 +1,5 @@ - name: salmon quant single-end - command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon - salmon/quant @@ -23,34 +23,34 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: ./output/salmon/test/libParams/flenDist.txt md5sum: 2de170bdc9f6fd237d286429b292bb28 - - path: ./output/index/salmon/ref_indexing.log - - path: ./output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: ./output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: ./output/index/salmon/versionInfo.json + - path: ./output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: ./output/index/salmon/complete_ref_lens.bin + - path: ./output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/mphf.bin + - path: ./output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: ./output/index/salmon/pre_indexing.log - - path: ./output/index/salmon/ctable.bin - - path: ./output/index/salmon/duplicate_clusters.tsv + - path: ./output/salmon/salmon/pre_indexing.log + - path: ./output/salmon/salmon/ctable.bin + - path: ./output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: ./output/index/salmon/reflengths.bin + - path: ./output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/info.json + - path: ./output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: ./output/index/salmon/refAccumLengths.bin + - path: ./output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: ./output/index/salmon/ctg_offsets.bin + - path: ./output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: ./output/index/salmon/rank.bin + - path: ./output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin - name: salmon quant paired end - command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon - salmon/quant @@ -74,35 +74,35 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: ./output/salmon/test/libParams/flenDist.txt md5sum: 221f754ed55dd1e34874f9b7b3f9d240 - - path: ./output/index/salmon/ref_indexing.log - - path: ./output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: ./output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: ./output/index/salmon/versionInfo.json + - path: ./output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: ./output/index/salmon/complete_ref_lens.bin + - path: ./output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/mphf.bin + - path: ./output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: ./output/index/salmon/pre_indexing.log - - path: ./output/index/salmon/ctable.bin - - path: ./output/index/salmon/duplicate_clusters.tsv + - path: ./output/salmon/salmon/pre_indexing.log + - path: ./output/salmon/salmon/ctable.bin + - path: ./output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: ./output/index/salmon/reflengths.bin + - path: ./output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: ./output/index/salmon/info.json + - path: ./output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: ./output/index/salmon/refAccumLengths.bin + - path: ./output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: ./output/index/salmon/ctg_offsets.bin + - path: ./output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: ./output/index/salmon/rank.bin + - path: ./output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin - name: salmon quant test_salmon_quant_single_end_lib_type_A - command: nextflow run tests/modules/salmon/quant -entry test_salmon_quant_single_end_lib_type_A -c tests/config/nextflow.config + command: nextflow run ./tests/modules/salmon/quant -entry test_salmon_quant_single_end_lib_type_A -c ./tests/config/nextflow.config -c ./tests/modules/salmon/quant/nextflow.config tags: - salmon/quant - salmon @@ -126,26 +126,26 @@ md5sum: ef13c06a538e9c34ca9f84212c82f44e - path: output/salmon/test/libParams/flenDist.txt md5sum: 2de170bdc9f6fd237d286429b292bb28 - - path: ./output/index/salmon/ref_indexing.log - - path: output/index/salmon/refseq.bin + - path: ./output/salmon/salmon/ref_indexing.log + - path: output/salmon/salmon/refseq.bin md5sum: 79c4ddf34be3a98d5a7b9d153629a6f7 - - path: output/index/salmon/versionInfo.json + - path: output/salmon/salmon/versionInfo.json md5sum: 6c764bd219b7bc17168a99d232c0fe09 - - path: output/index/salmon/complete_ref_lens.bin + - path: output/salmon/salmon/complete_ref_lens.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: output/index/salmon/mphf.bin + - path: output/salmon/salmon/mphf.bin md5sum: 53669a47610e33e031faafd32703b714 - - path: output/index/salmon/duplicate_clusters.tsv + - path: output/salmon/salmon/duplicate_clusters.tsv md5sum: 51b5292e3a874119c0e1aa566e95d70c - - path: output/index/salmon/reflengths.bin + - path: output/salmon/salmon/reflengths.bin md5sum: f57562f1fca3ae7b133f895ae13c3d08 - - path: output/index/salmon/info.json + - path: output/salmon/salmon/info.json md5sum: 61ff4d3471134c280668355ddd39e99f - - path: output/index/salmon/refAccumLengths.bin + - path: output/salmon/salmon/refAccumLengths.bin md5sum: 8d1970505b2b08ca0eb5ff7722b48cde - - path: output/index/salmon/ctg_offsets.bin + - path: output/salmon/salmon/ctg_offsets.bin md5sum: 27a76542337df436436e66017f66dd25 - - path: output/index/salmon/rank.bin + - path: output/salmon/salmon/rank.bin md5sum: 3f34dca1ec26cdf89a6d19b1d1c07e71 - - path: ./output/index/salmon/pos.bin - - path: ./output/index/salmon/seq.bin + - path: ./output/salmon/salmon/pos.bin + - path: ./output/salmon/salmon/seq.bin diff --git a/tests/modules/samblaster/main.nf b/tests/modules/samblaster/main.nf new file mode 100644 index 00000000..5831ecfc --- /dev/null +++ b/tests/modules/samblaster/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMBLASTER } from '../../../modules/samblaster/main.nf' + +workflow test_samblaster { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_unsorted_bam'], checkIfExists: true) ] + + SAMBLASTER ( input ) +} diff --git a/tests/modules/samblaster/nextflow.config b/tests/modules/samblaster/nextflow.config new file mode 100644 index 00000000..7ba8b23b --- /dev/null +++ b/tests/modules/samblaster/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMBLASTER { + ext.args = '-M --addMateTags' + ext.prefix = { "${meta.id}.processed" } + } + +} diff --git a/tests/modules/samblaster/test.yml b/tests/modules/samblaster/test.yml new file mode 100644 index 00000000..acc6d0f0 --- /dev/null +++ b/tests/modules/samblaster/test.yml @@ -0,0 +1,7 @@ +- name: samblaster test_samblaster + command: nextflow run ./tests/modules/samblaster -entry test_samblaster -c ./tests/config/nextflow.config -c ./tests/modules/samblaster/nextflow.config + tags: + - samblaster + files: + - path: output/samblaster/test.processed.bam + md5sum: 950f23d85f75be1cf872f45c0144bdf4 diff --git a/tests/modules/samtools/ampliconclip/main.nf b/tests/modules/samtools/ampliconclip/main.nf index a8d8609f..eae70b06 100644 --- a/tests/modules/samtools/ampliconclip/main.nf +++ b/tests/modules/samtools/ampliconclip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_AMPLICONCLIP } from '../../../../modules/samtools/ampliconclip/main.nf' addParams([:]) +include { SAMTOOLS_AMPLICONCLIP } from '../../../../modules/samtools/ampliconclip/main.nf' workflow test_samtools_ampliconclip_no_stats_no_rejects { diff --git a/tests/modules/samtools/ampliconclip/nextflow.config b/tests/modules/samtools/ampliconclip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/ampliconclip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml index 9e41ce5b..e8fd456c 100644 --- a/tests/modules/samtools/ampliconclip/test.yml +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -1,34 +1,32 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml samtools/ampliconclip - name: samtools ampliconclip no stats no rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_no_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_no_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 1c705ebe39f68f1dac164733ae99c9d2 + md5sum: 678f9ab04fbe3206f0f96e170fd833e9 - name: samtools ampliconclip no stats with rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 86c7bfb5378d57b16855c5b399000b2a + md5sum: bbf65ea626539d96c8271e17d1fc988b - path: output/samtools/test.cliprejects.bam - md5sum: 8e2eea2c0005b4d4e77c0eb549599133 + md5sum: a0bee15aead020d16d0c81bd9667df46 - name: samtools ampliconclip with stats with rejects - command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config tags: - samtools - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: d96f5eebef0ff4635e68090e89756d4a + md5sum: f5a3611ecad34ba2dde77096e1c7dd93 - path: output/samtools/test.cliprejects.bam - md5sum: ad83a523d6ff1c58caade4ddafbaaed7 + md5sum: 90ee7ce908b4bdb89ab41e4410de9012 - path: output/samtools/test.clipstats.txt - md5sum: 6fbde83d658cd2813b79900d33800d1d + md5sum: fc23355e1743d47f2541f2cb1a7a0cda diff --git a/tests/modules/samtools/bam2fq/main.nf b/tests/modules/samtools/bam2fq/main.nf index f8614ad0..928bfe08 100644 --- a/tests/modules/samtools/bam2fq/main.nf +++ b/tests/modules/samtools/bam2fq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' addParams( options: [args: "-T RX"] ) +include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' workflow test_samtools_bam2fq_nosplit { diff --git a/tests/modules/samtools/bam2fq/nextflow.config b/tests/modules/samtools/bam2fq/nextflow.config new file mode 100644 index 00000000..cf886bb2 --- /dev/null +++ b/tests/modules/samtools/bam2fq/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + +} diff --git a/tests/modules/samtools/bam2fq/test.yml b/tests/modules/samtools/bam2fq/test.yml index ff1762b3..feb994fd 100644 --- a/tests/modules/samtools/bam2fq/test.yml +++ b/tests/modules/samtools/bam2fq/test.yml @@ -1,5 +1,5 @@ - name: samtools bam2fq test_samtools_bam2fq_nosplit - command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bam2fq/nextflow.config tags: - samtools/bam2fq - samtools @@ -8,7 +8,7 @@ md5sum: d733e66d29a4b366bf9df8c42f845256 - name: samtools bam2fq test_samtools_bam2fq_withsplit - command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bam2fq/nextflow.config tags: - samtools/bam2fq - samtools diff --git a/tests/modules/samtools/depth/main.nf b/tests/modules/samtools/depth/main.nf index 90497534..c6d2dc0e 100644 --- a/tests/modules/samtools/depth/main.nf +++ b/tests/modules/samtools/depth/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' addParams( options: [:] ) +include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' workflow test_samtools_depth { diff --git a/tests/modules/samtools/depth/nextflow.config b/tests/modules/samtools/depth/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/depth/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/depth/test.yml b/tests/modules/samtools/depth/test.yml index 4d5007c8..978134ad 100644 --- a/tests/modules/samtools/depth/test.yml +++ b/tests/modules/samtools/depth/test.yml @@ -1,5 +1,5 @@ - name: samtools depth - command: nextflow run tests/modules/samtools/depth -entry test_samtools_depth -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/depth -entry test_samtools_depth -c ./tests/config/nextflow.config -c ./tests/modules/samtools/depth/nextflow.config tags: - samtools/depth - samtools diff --git a/tests/modules/samtools/faidx/main.nf b/tests/modules/samtools/faidx/main.nf index 0102af28..bc4dc5e3 100644 --- a/tests/modules/samtools/faidx/main.nf +++ b/tests/modules/samtools/faidx/main.nf @@ -2,10 +2,12 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FAIDX } from '../../../../modules/samtools/faidx/main.nf' workflow test_samtools_faidx { - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - SAMTOOLS_FAIDX ( fasta ) + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + SAMTOOLS_FAIDX ( input ) } diff --git a/tests/modules/samtools/faidx/nextflow.config b/tests/modules/samtools/faidx/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/faidx/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index 49a92265..dc2184ee 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -1,4 +1,4 @@ -- name: samtools faidx test workflow +- name: samtools faidx test_samtools_faidx command: nextflow run tests/modules/samtools/faidx -entry test_samtools_faidx -c tests/config/nextflow.config tags: - samtools @@ -6,3 +6,5 @@ files: - path: output/samtools/genome.fasta.fai md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 + - path: output/samtools/versions.yml + md5sum: d56671a7c8f8058944d3d536c3058f7f diff --git a/tests/modules/samtools/fastq/main.nf b/tests/modules/samtools/fastq/main.nf index 94ad9471..6e7e323c 100644 --- a/tests/modules/samtools/fastq/main.nf +++ b/tests/modules/samtools/fastq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FASTQ } from '../../../../modules/samtools/fastq/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FASTQ } from '../../../../modules/samtools/fastq/main.nf' workflow test_samtools_fastq { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/fastq/nextflow.config b/tests/modules/samtools/fastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/fastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/fastq/test.yml b/tests/modules/samtools/fastq/test.yml index bfcf5c92..39da9889 100644 --- a/tests/modules/samtools/fastq/test.yml +++ b/tests/modules/samtools/fastq/test.yml @@ -1,5 +1,5 @@ - name: samtools fastq test_samtools_fastq - command: nextflow run tests/modules/samtools/fastq -entry test_samtools_fastq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/fastq -entry test_samtools_fastq -c ./tests/config/nextflow.config -c ./tests/modules/samtools/fastq/nextflow.config tags: - samtools - samtools/fastq diff --git a/tests/modules/samtools/fixmate/main.nf b/tests/modules/samtools/fixmate/main.nf index 5174beab..cb7c136d 100644 --- a/tests/modules/samtools/fixmate/main.nf +++ b/tests/modules/samtools/fixmate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FIXMATE } from '../../../../modules/samtools/fixmate/main.nf' addParams( options: [args:'-r -c -m'] ) +include { SAMTOOLS_FIXMATE } from '../../../../modules/samtools/fixmate/main.nf' workflow test_samtools_fixmate { diff --git a/tests/modules/samtools/fixmate/nextflow.config b/tests/modules/samtools/fixmate/nextflow.config new file mode 100644 index 00000000..b9402bcf --- /dev/null +++ b/tests/modules/samtools/fixmate/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_FIXMATE { + ext.args = '-r -c -m' + } + +} diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml index c7864c04..8e87e059 100644 --- a/tests/modules/samtools/fixmate/test.yml +++ b/tests/modules/samtools/fixmate/test.yml @@ -1,8 +1,8 @@ - name: samtools fixmate test_samtools_fixmate - command: nextflow run tests/modules/samtools/fixmate -entry test_samtools_fixmate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/fixmate -entry test_samtools_fixmate -c ./tests/config/nextflow.config -c ./tests/modules/samtools/fixmate/nextflow.config tags: - - samtools/fixmate - samtools + - samtools/fixmate files: - path: output/samtools/test.bam - md5sum: 92c8463710cdcaef2010aa02ed9e01fd + md5sum: a4092657a4b17170c7702a76cbf192a1 diff --git a/tests/modules/samtools/flagstat/main.nf b/tests/modules/samtools/flagstat/main.nf index a31a7d22..a0e86422 100644 --- a/tests/modules/samtools/flagstat/main.nf +++ b/tests/modules/samtools/flagstat/main.nf @@ -2,13 +2,14 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_FLAGSTAT } from '../../../../modules/samtools/flagstat/main.nf' addParams( options: [:] ) +include { SAMTOOLS_FLAGSTAT } from '../../../../modules/samtools/flagstat/main.nf' workflow test_samtools_flagstat { - input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) - ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) + ] SAMTOOLS_FLAGSTAT ( input ) } diff --git a/tests/modules/samtools/flagstat/nextflow.config b/tests/modules/samtools/flagstat/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/flagstat/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/flagstat/test.yml b/tests/modules/samtools/flagstat/test.yml index 0da6c2f4..a5f28b36 100644 --- a/tests/modules/samtools/flagstat/test.yml +++ b/tests/modules/samtools/flagstat/test.yml @@ -1,5 +1,5 @@ - name: samtools flagstat - command: nextflow run ./tests/modules/samtools/flagstat -entry test_samtools_flagstat -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/flagstat -entry test_samtools_flagstat -c ./tests/config/nextflow.config -c ./tests/modules/samtools/flagstat/nextflow.config tags: - samtools - samtools/flagstat diff --git a/tests/modules/samtools/idxstats/main.nf b/tests/modules/samtools/idxstats/main.nf index 9919c3e4..f3de76a0 100644 --- a/tests/modules/samtools/idxstats/main.nf +++ b/tests/modules/samtools/idxstats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_IDXSTATS } from '../../../../modules/samtools/idxstats/main.nf' addParams( options: [:] ) +include { SAMTOOLS_IDXSTATS } from '../../../../modules/samtools/idxstats/main.nf' workflow test_samtools_idxstats { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/idxstats/nextflow.config b/tests/modules/samtools/idxstats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/idxstats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/idxstats/test.yml b/tests/modules/samtools/idxstats/test.yml index 6064ca56..88786eef 100644 --- a/tests/modules/samtools/idxstats/test.yml +++ b/tests/modules/samtools/idxstats/test.yml @@ -1,5 +1,5 @@ - name: samtools idxstats - command: nextflow run ./tests/modules/samtools/idxstats -entry test_samtools_idxstats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/idxstats -entry test_samtools_idxstats -c ./tests/config/nextflow.config -c ./tests/modules/samtools/idxstats/nextflow.config tags: - samtools - samtools/idxstats diff --git a/tests/modules/samtools/index/main.nf b/tests/modules/samtools/index/main.nf index be9014e0..3592a99a 100644 --- a/tests/modules/samtools/index/main.nf +++ b/tests/modules/samtools/index/main.nf @@ -2,8 +2,9 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' addParams( options: [:] ) -include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' addParams( options: [args:'-c'] ) +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_BAI } from '../../../../modules/samtools/index/main.nf' +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CRAI } from '../../../../modules/samtools/index/main.nf' +include { SAMTOOLS_INDEX as SAMTOOLS_INDEX_CSI } from '../../../../modules/samtools/index/main.nf' workflow test_samtools_index_bai { input = [ [ id:'test', single_end:false ], // meta map @@ -13,6 +14,14 @@ workflow test_samtools_index_bai { SAMTOOLS_INDEX_BAI ( input ) } +workflow test_samtools_index_crai { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true) + ] + + SAMTOOLS_INDEX_CRAI ( input ) +} + workflow test_samtools_index_csi { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) diff --git a/tests/modules/samtools/index/nextflow.config b/tests/modules/samtools/index/nextflow.config new file mode 100644 index 00000000..d3a4c785 --- /dev/null +++ b/tests/modules/samtools/index/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_INDEX_CSI { + ext.args = '-c' + } + +} diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 31941dd6..7184be8f 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -1,5 +1,5 @@ -- name: samtools index bai - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_bai -c tests/config/nextflow.config +- name: samtools index test_samtools_index_bai + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_bai -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index @@ -7,8 +7,17 @@ - path: output/samtools/test.paired_end.sorted.bam.bai md5sum: 704c10dd1326482448ca3073fdebc2f4 -- name: samtools index csi - command: nextflow run tests/modules/samtools/index -entry test_samtools_index_csi -c tests/config/nextflow.config +- name: samtools index test_samtools_index_crai + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_crai -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config + tags: + - samtools + - samtools/index + files: + - path: output/samtools/test.paired_end.recalibrated.sorted.cram.crai + md5sum: 14bc3bd5c89cacc8f4541f9062429029 + +- name: samtools index test_samtools_index_csi + command: nextflow run ./tests/modules/samtools/index -entry test_samtools_index_csi -c ./tests/config/nextflow.config -c ./tests/modules/samtools/index/nextflow.config tags: - samtools - samtools/index diff --git a/tests/modules/samtools/merge/main.nf b/tests/modules/samtools/merge/main.nf index 07485df1..ad5c56e3 100644 --- a/tests/modules/samtools/merge/main.nf +++ b/tests/modules/samtools/merge/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' addParams( options: [suffix:'_merged'] ) +include { SAMTOOLS_MERGE } from '../../../../modules/samtools/merge/main.nf' workflow test_samtools_merge { input = [ [ id: 'test' ], // meta map diff --git a/tests/modules/samtools/merge/nextflow.config b/tests/modules/samtools/merge/nextflow.config new file mode 100644 index 00000000..4ac70fa0 --- /dev/null +++ b/tests/modules/samtools/merge/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_MERGE { + ext.prefix = { "${meta.id}_merged" } + } + +} diff --git a/tests/modules/samtools/merge/test.yml b/tests/modules/samtools/merge/test.yml index b39ca2ec..948c6191 100644 --- a/tests/modules/samtools/merge/test.yml +++ b/tests/modules/samtools/merge/test.yml @@ -1,15 +1,15 @@ - name: samtools merge test_samtools_merge - command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/merge -entry test_samtools_merge -c ./tests/config/nextflow.config -c ./tests/modules/samtools/merge/nextflow.config tags: - - samtools/merge - samtools + - samtools/merge files: - path: output/samtools/test_merged.bam - name: samtools merge test_samtools_merge_cram - command: nextflow run tests/modules/samtools/merge -entry test_samtools_merge_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/merge -entry test_samtools_merge_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/merge/nextflow.config tags: - - samtools/merge - samtools + - samtools/merge files: - path: output/samtools/test_merged.cram diff --git a/tests/modules/samtools/mpileup/main.nf b/tests/modules/samtools/mpileup/main.nf index b8db0275..dc58cc2c 100644 --- a/tests/modules/samtools/mpileup/main.nf +++ b/tests/modules/samtools/mpileup/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_MPILEUP } from '../../../../modules/samtools/mpileup/main.nf' addParams( options: [:] ) +include { SAMTOOLS_MPILEUP } from '../../../../modules/samtools/mpileup/main.nf' workflow test_samtools_mpileup { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/mpileup/nextflow.config b/tests/modules/samtools/mpileup/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/mpileup/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/mpileup/test.yml b/tests/modules/samtools/mpileup/test.yml index 25c39d63..53a9c142 100644 --- a/tests/modules/samtools/mpileup/test.yml +++ b/tests/modules/samtools/mpileup/test.yml @@ -1,5 +1,5 @@ - name: samtools mpileup - command: nextflow run ./tests/modules/samtools/mpileup -entry test_samtools_mpileup -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/mpileup -entry test_samtools_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/samtools/mpileup/nextflow.config tags: - samtools - samtools/mpileup diff --git a/tests/modules/samtools/sort/main.nf b/tests/modules/samtools/sort/main.nf index b76cdb1a..9853b355 100644 --- a/tests/modules/samtools/sort/main.nf +++ b/tests/modules/samtools/sort/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' addParams( options: ['suffix': '.sorted'] ) +include { SAMTOOLS_SORT } from '../../../../modules/samtools/sort/main.nf' workflow test_samtools_sort { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/sort/nextflow.config b/tests/modules/samtools/sort/nextflow.config new file mode 100644 index 00000000..230bec5f --- /dev/null +++ b/tests/modules/samtools/sort/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_SORT { + ext.prefix = { "${meta.id}.sorted" } + } + +} diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 12e6669f..dfd2eb69 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -1,8 +1,8 @@ - name: samtools sort - command: nextflow run tests/modules/samtools/sort -entry test_samtools_sort -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/sort -entry test_samtools_sort -c ./tests/config/nextflow.config -c ./tests/modules/samtools/sort/nextflow.config tags: - samtools - samtools/sort files: - path: output/samtools/test.sorted.bam - md5sum: bbb2db225f140e69a4ac577f74ccc90f + md5sum: 4adc495469724a375d5e1a9f3485e38d diff --git a/tests/modules/samtools/stats/main.nf b/tests/modules/samtools/stats/main.nf index 8e8b0c88..d83cbf4a 100644 --- a/tests/modules/samtools/stats/main.nf +++ b/tests/modules/samtools/stats/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_STATS } from '../../../../modules/samtools/stats/main.nf' addParams( options: [:] ) +include { SAMTOOLS_STATS } from '../../../../modules/samtools/stats/main.nf' workflow test_samtools_stats { input = [ [ id:'test', single_end:false ], // meta map @@ -14,9 +14,9 @@ workflow test_samtools_stats { } workflow test_samtools_stats_cram { - input = [ [ id: 'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + input = [ [ id: 'test', single_end:true ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/samtools/stats/nextflow.config b/tests/modules/samtools/stats/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/stats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index a194c666..178eba72 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -1,17 +1,17 @@ - name: samtools stats test_samtools_stats - command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config tags: - - samtools - samtools/stats + - samtools files: - path: output/samtools/test.paired_end.sorted.bam.stats - md5sum: a7f36cf11fd3bf97e0a0ae29c0627296 + md5sum: 09146eeecfcae2a84fb8615c86cd8d64 - name: samtools stats test_samtools_stats_cram - command: nextflow run tests/modules/samtools/stats -entry test_samtools_stats_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config tags: - - samtools - samtools/stats + - samtools files: - path: output/samtools/test.paired_end.recalibrated.sorted.cram.stats - md5sum: bd55a1da30028403f4b66dacf7a2a20e + md5sum: 62377b29c3f6253e37308a28d13a496d diff --git a/tests/modules/samtools/view/main.nf b/tests/modules/samtools/view/main.nf index bd270cd8..8ee27ef8 100644 --- a/tests/modules/samtools/view/main.nf +++ b/tests/modules/samtools/view/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' addParams( options: [:] ) +include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' workflow test_samtools_view { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/samtools/view/nextflow.config b/tests/modules/samtools/view/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/view/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/view/test.yml b/tests/modules/samtools/view/test.yml index ceaa0e89..1287d455 100644 --- a/tests/modules/samtools/view/test.yml +++ b/tests/modules/samtools/view/test.yml @@ -1,5 +1,5 @@ - name: samtools view test_samtools_view - command: nextflow run tests/modules/samtools/view -entry test_samtools_view -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/view -entry test_samtools_view -c ./tests/config/nextflow.config -c ./tests/modules/samtools/view/nextflow.config tags: - samtools/view - samtools @@ -8,7 +8,7 @@ md5sum: 8fb1e82f76416e9e30fc6b2357e2cf13 - name: samtools view test_samtools_view_cram - command: nextflow run tests/modules/samtools/view -entry test_samtools_view_cram -c tests/config/nextflow.config + command: nextflow run ./tests/modules/samtools/view -entry test_samtools_view_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/view/nextflow.config tags: - samtools/view - samtools diff --git a/tests/modules/scoary/main.nf b/tests/modules/scoary/main.nf new file mode 100644 index 00000000..5f080b7d --- /dev/null +++ b/tests/modules/scoary/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SCOARY } from '../../../modules/scoary/main.nf' + +workflow test_scoary { + + input = [ [ id:'test', single_end:false ], // meta map + file("https://github.com/AdmiralenOla/Scoary/raw/master/scoary/exampledata/Gene_presence_absence.csv", checkIfExists: true), + file("https://github.com/AdmiralenOla/Scoary/raw/master/scoary/exampledata/Tetracycline_resistance.csv", checkIfExists: true) ] + + tree = [] + SCOARY ( input, tree) +} diff --git a/tests/modules/scoary/nextflow.config b/tests/modules/scoary/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/scoary/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/scoary/test.yml b/tests/modules/scoary/test.yml new file mode 100644 index 00000000..71344093 --- /dev/null +++ b/tests/modules/scoary/test.yml @@ -0,0 +1,9 @@ +- name: scoary test_scoary + command: nextflow run ./tests/modules/scoary -entry test_scoary -c ./tests/config/nextflow.config -c ./tests/modules/scoary/nextflow.config + tags: + - scoary + files: + - path: output/scoary/Bogus_trait.results.csv + md5sum: 9550c692bbe6ff0ac844357bfabb809b + - path: output/scoary/Tetracycline_resistance.results.csv + md5sum: a87740818ab4de69a758fc75d7b879dd diff --git a/tests/modules/seacr/callpeak/main.nf b/tests/modules/seacr/callpeak/main.nf index a1aeb76e..230d3a4c 100644 --- a/tests/modules/seacr/callpeak/main.nf +++ b/tests/modules/seacr/callpeak/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' addParams( options: [ args:'norm stringent' ] ) +include { SEACR_CALLPEAK } from '../../../../modules/seacr/callpeak/main.nf' workflow test_seacr_callpeak { input = [ [ id:'test_1'], diff --git a/tests/modules/seacr/callpeak/nextflow.config b/tests/modules/seacr/callpeak/nextflow.config new file mode 100644 index 00000000..54c19e6b --- /dev/null +++ b/tests/modules/seacr/callpeak/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEACR_CALLPEAK { + ext.args = 'norm stringent' + } + +} diff --git a/tests/modules/seacr/callpeak/test.yml b/tests/modules/seacr/callpeak/test.yml index 2cf75b06..63104bd0 100644 --- a/tests/modules/seacr/callpeak/test.yml +++ b/tests/modules/seacr/callpeak/test.yml @@ -1,5 +1,5 @@ - name: seacr callpeak - command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak -c ./tests/config/nextflow.config -c ./tests/modules/seacr/callpeak/nextflow.config tags: - seacr - seacr/callpeak @@ -8,7 +8,7 @@ md5sum: a3cb0c7c4ffa895788da3f0d6371b7df - name: seacr callpeak threshold - command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak_threshold -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seacr/callpeak -entry test_seacr_callpeak_threshold -c ./tests/config/nextflow.config -c ./tests/modules/seacr/callpeak/nextflow.config tags: - seacr - seacr/callpeak diff --git a/tests/modules/seqkit/split2/main.nf b/tests/modules/seqkit/split2/main.nf index 21626cac..acb9d41b 100644 --- a/tests/modules/seqkit/split2/main.nf +++ b/tests/modules/seqkit/split2/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_LENGTH } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-length 8K'] ) -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_SIZE } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-size 50' ] ) -include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_PART } from '../../../../modules/seqkit/split2/main.nf' addParams( options: ['args': '--by-part 3'] ) +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_LENGTH } from '../../../../modules/seqkit/split2/main.nf' +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_SIZE } from '../../../../modules/seqkit/split2/main.nf' +include { SEQKIT_SPLIT2 as SEQKIT_SPLIT2_PART } from '../../../../modules/seqkit/split2/main.nf' workflow test_seqkit_split2_single_end_length { input = [ [ id:'test', single_end:true ], // meta map diff --git a/tests/modules/seqkit/split2/nextflow.config b/tests/modules/seqkit/split2/nextflow.config new file mode 100644 index 00000000..e4f64931 --- /dev/null +++ b/tests/modules/seqkit/split2/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQKIT_SPLIT2_LENGTH { + ext.args = '--by-length 8K' + } + + withName: SEQKIT_SPLIT2_SIZE { + ext.args = '--by-size 50' + } + + withName: SEQKIT_SPLIT2_PART { + ext.args = '--by-part 3' + } + +} diff --git a/tests/modules/seqkit/split2/test.yml b/tests/modules/seqkit/split2/test.yml index 13f3b003..00368e22 100644 --- a/tests/modules/seqkit/split2/test.yml +++ b/tests/modules/seqkit/split2/test.yml @@ -1,83 +1,95 @@ -- name: seqkit split2 single-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c tests/config/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_length + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_length -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 + md5sum: 7f489b2374c5fcc155a60ce2365a7bb7 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: cf38c51506e45380fe25abdd1bd5ccc6 + md5sum: 45cccacb4676bca33beb17064322a781 + - path: output/seqkit/versions.yml + md5sum: 2d5a709d129be364687cc0b561efa532 -- name: seqkit split2 single-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c tests/config/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_size + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_size -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: bf835e685d597fc1ab5e5ac7dd689619 + md5sum: b09324606fb3636b51448d6a007d2c71 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 + md5sum: f7873475d463e3b4d21dccbf8e859270 + - path: output/seqkit/versions.yml + md5sum: 490d00accd1092a8eca4e83ed809bad3 -- name: seqkit split2 single-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c tests/config/nextflow.config +- name: seqkit split2 test_seqkit_split2_single_end_part + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_single_end_part -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: fa25951435471238d5567fd2cae31f55 + md5sum: a9d29d08e27246b6d36e21e5def405e3 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a + md5sum: 6d547a959adcd027dd1a8734e195dd7d - path: output/seqkit/test/test_1.part_003.fastq.gz - md5sum: 8bc86ba83a611c54f592f4eae19b680f + md5sum: 6d63cc8400dd2a96d808514fb18278ee + - path: output/seqkit/versions.yml + md5sum: 90431cd3d28954f656988230d4481115 -- name: seqkit split2 paired-end length - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c tests/config/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_length + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_length -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: 6f7d58ba35c254c0817fe9a7c69862e4 + md5sum: 7f489b2374c5fcc155a60ce2365a7bb7 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: cf38c51506e45380fe25abdd1bd5ccc6 + md5sum: 45cccacb4676bca33beb17064322a781 - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: 6b094b1ba7c439fe44c1bb5e99a02ba4 + md5sum: 160b5fd363ff7cad8af9d914269d6426 - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 927097c6ac7522199a9e016333181a8e + md5sum: 18bc5434cf55706394cccb44e6108561 + - path: output/seqkit/versions.yml + md5sum: 9272afc1a126ae997a712edeef317f22 -- name: seqkit split2 paired-end size - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c tests/config/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_size + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_size -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: bf835e685d597fc1ab5e5ac7dd689619 + md5sum: b09324606fb3636b51448d6a007d2c71 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 703d95ff4fbb5b7fb4da8a164ba9aa54 + md5sum: f7873475d463e3b4d21dccbf8e859270 - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: 09d0dd83b5b1b9b95d316eeed79ea5ba + md5sum: c0602b62aae860dd284c0eb0062c24dd - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 8796c3f327b1094244bfcdb36d536526 + md5sum: 5bc7a98b618100b29910eb41c4c9ac0d + - path: output/seqkit/versions.yml + md5sum: af66912ae8abc493f77f70e3bf473144 -- name: seqkit split2 paired-end part - command: nextflow run ./tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c tests/config/nextflow.config +- name: seqkit split2 test_seqkit_split2_paired_end_part + command: nextflow run tests/modules/seqkit/split2 -entry test_seqkit_split2_paired_end_part -c tests/config/nextflow.config tags: - seqkit - seqkit/split2 files: - path: output/seqkit/test/test_1.part_001.fastq.gz - md5sum: fa25951435471238d5567fd2cae31f55 + md5sum: a9d29d08e27246b6d36e21e5def405e3 - path: output/seqkit/test/test_1.part_002.fastq.gz - md5sum: 1dcf631aaaa5e7e0bd6c9668fbc6e04a + md5sum: 6d547a959adcd027dd1a8734e195dd7d - path: output/seqkit/test/test_1.part_003.fastq.gz - md5sum: 8bc86ba83a611c54f592f4eae19b680f + md5sum: 6d63cc8400dd2a96d808514fb18278ee - path: output/seqkit/test/test_2.part_001.fastq.gz - md5sum: f0055c99cd193fd97466b3cde9dd1b8f + md5sum: b51a1bed106e4ec0c9be7d9e224d0616 - path: output/seqkit/test/test_2.part_002.fastq.gz - md5sum: 8a90df768201785f7a7cd5dbb41e846a + md5sum: 079078a7f86114ae29cda8c00d5a7fc9 - path: output/seqkit/test/test_2.part_003.fastq.gz - md5sum: 890b90083e8e1606bd13ba34149cedd7 + md5sum: 6987941bf8c4a37565e333029ba41ca0 + - path: output/seqkit/versions.yml + md5sum: 193bc5f0c429076f816ab0a529c4c1fc diff --git a/tests/modules/seqsero2/main.nf b/tests/modules/seqsero2/main.nf index 04ee8e27..9587bf9f 100644 --- a/tests/modules/seqsero2/main.nf +++ b/tests/modules/seqsero2/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQSERO2 } from '../../../modules/seqsero2/main.nf' addParams( options: [args: '-m k -t 4'] ) +include { SEQSERO2 } from '../../../modules/seqsero2/main.nf' workflow test_seqsero2 { diff --git a/tests/modules/seqsero2/nextflow.config b/tests/modules/seqsero2/nextflow.config new file mode 100644 index 00000000..b46fa7e2 --- /dev/null +++ b/tests/modules/seqsero2/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQSERO2 { + ext.args = '-m k -t 4' + } + +} diff --git a/tests/modules/seqsero2/test.yml b/tests/modules/seqsero2/test.yml index 2aa49686..e2dec062 100644 --- a/tests/modules/seqsero2/test.yml +++ b/tests/modules/seqsero2/test.yml @@ -1,5 +1,5 @@ - name: seqsero2 test_seqsero2 - command: nextflow run tests/modules/seqsero2 -entry test_seqsero2 -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqsero2 -entry test_seqsero2 -c ./tests/config/nextflow.config -c ./tests/modules/seqsero2/nextflow.config tags: - seqsero2 files: diff --git a/tests/modules/seqtk/mergepe/main.nf b/tests/modules/seqtk/mergepe/main.nf index 13654dc6..b8e12213 100644 --- a/tests/modules/seqtk/mergepe/main.nf +++ b/tests/modules/seqtk/mergepe/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [ 'suffix':'.processed' ] ) +include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' // // Test with single-end data diff --git a/tests/modules/seqtk/mergepe/nextflow.config b/tests/modules/seqtk/mergepe/nextflow.config new file mode 100644 index 00000000..04eeef72 --- /dev/null +++ b/tests/modules/seqtk/mergepe/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_MERGEPE { + ext.prefix = { "${meta.id}.processed" } + } + +} diff --git a/tests/modules/seqtk/mergepe/test.yml b/tests/modules/seqtk/mergepe/test.yml index 8ae95354..2a6d4d33 100644 --- a/tests/modules/seqtk/mergepe/test.yml +++ b/tests/modules/seqtk/mergepe/test.yml @@ -1,5 +1,5 @@ - name: seqtk mergepe test_seqtk_mergepe_single_end - command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/mergepe/nextflow.config tags: - seqtk/mergepe - seqtk @@ -8,7 +8,7 @@ md5sum: e325ef7deb4023447a1f074e285761af - name: seqtk mergepe test_seqtk_mergepe_paired_end - command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/mergepe/nextflow.config tags: - seqtk/mergepe - seqtk diff --git a/tests/modules/seqtk/sample/main.nf b/tests/modules/seqtk/sample/main.nf index 4508db84..6899ef62 100644 --- a/tests/modules/seqtk/sample/main.nf +++ b/tests/modules/seqtk/sample/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_SAMPLE } from '../../../../modules/seqtk/sample/main.nf' addParams( options: [ 'args': '-s100', 'suffix':'.sampled' ] ) +include { SEQTK_SAMPLE } from '../../../../modules/seqtk/sample/main.nf' // // Test with single-end data diff --git a/tests/modules/seqtk/sample/nextflow.config b/tests/modules/seqtk/sample/nextflow.config new file mode 100644 index 00000000..a79ad290 --- /dev/null +++ b/tests/modules/seqtk/sample/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_SAMPLE { + ext.args = '-s100' + ext.prefix = { "${meta.id}.sampled" } + } + +} diff --git a/tests/modules/seqtk/sample/test.yml b/tests/modules/seqtk/sample/test.yml index d4cf2ca9..df24b3a4 100644 --- a/tests/modules/seqtk/sample/test.yml +++ b/tests/modules/seqtk/sample/test.yml @@ -1,5 +1,5 @@ - name: seqtk sample test_seqtk_sample_single_end - command: nextflow run tests/modules/seqtk/sample -entry test_seqtk_sample_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/sample -entry test_seqtk_sample_single_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/sample/nextflow.config tags: - seqtk - seqtk/sample @@ -8,7 +8,7 @@ md5sum: 73c3e8f113860244f3ed3866a8b9d555 - name: seqtk sample test_seqtk_sample_paired_end - command: nextflow run tests/modules/seqtk/sample -entry test_seqtk_sample_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/sample -entry test_seqtk_sample_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/sample/nextflow.config tags: - seqtk - seqtk/sample diff --git a/tests/modules/seqtk/subseq/main.nf b/tests/modules/seqtk/subseq/main.nf index 7c5dc7b2..608b7c2f 100644 --- a/tests/modules/seqtk/subseq/main.nf +++ b/tests/modules/seqtk/subseq/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQTK_SUBSEQ } from '../../../../modules/seqtk/subseq/main.nf' addParams( options: ['suffix':'.filtered'] ) +include { SEQTK_SUBSEQ } from '../../../../modules/seqtk/subseq/main.nf' workflow test_seqtk_subseq { diff --git a/tests/modules/seqtk/subseq/nextflow.config b/tests/modules/seqtk/subseq/nextflow.config new file mode 100644 index 00000000..8a8b9b45 --- /dev/null +++ b/tests/modules/seqtk/subseq/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQTK_SUBSEQ { + ext.prefix = { ".filtered" } + } + +} diff --git a/tests/modules/seqtk/subseq/test.yml b/tests/modules/seqtk/subseq/test.yml index fca64804..4003e3ab 100644 --- a/tests/modules/seqtk/subseq/test.yml +++ b/tests/modules/seqtk/subseq/test.yml @@ -1,5 +1,5 @@ - name: seqtk subseq test_seqtk_subseq - command: nextflow run tests/modules/seqtk/subseq -entry test_seqtk_subseq -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqtk/subseq -entry test_seqtk_subseq -c ./tests/config/nextflow.config -c ./tests/modules/seqtk/subseq/nextflow.config tags: - seqtk - seqtk/subseq diff --git a/tests/modules/sequenzautils/bam2seqz/main.nf b/tests/modules/sequenzautils/bam2seqz/main.nf index ae478b88..fcd4c7c7 100755 --- a/tests/modules/sequenzautils/bam2seqz/main.nf +++ b/tests/modules/sequenzautils/bam2seqz/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQUENZAUTILS_BAM2SEQZ } from '../../../../modules/sequenzautils/bam2seqz/main.nf' addParams( options: [:] ) +include { SEQUENZAUTILS_BAM2SEQZ } from '../../../../modules/sequenzautils/bam2seqz/main.nf' workflow test_sequenzautils_bam2seqz { diff --git a/tests/modules/sequenzautils/bam2seqz/nextflow.config b/tests/modules/sequenzautils/bam2seqz/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sequenzautils/bam2seqz/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sequenzautils/bam2seqz/test.yml b/tests/modules/sequenzautils/bam2seqz/test.yml index 0b9cac53..f3ea6cf0 100644 --- a/tests/modules/sequenzautils/bam2seqz/test.yml +++ b/tests/modules/sequenzautils/bam2seqz/test.yml @@ -1,5 +1,5 @@ - name: sequenzautils bam2seqz - command: nextflow run ./tests/modules/sequenzautils/bam2seqz -entry test_sequenzautils_bam2seqz -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sequenzautils/bam2seqz -entry test_sequenzautils_bam2seqz -c ./tests/config/nextflow.config -c ./tests/modules/sequenzautils/bam2seqz/nextflow.config tags: - sequenzautils - sequenzautils/bam2seqz diff --git a/tests/modules/sequenzautils/gcwiggle/main.nf b/tests/modules/sequenzautils/gcwiggle/main.nf index e314f1e0..b25e037e 100644 --- a/tests/modules/sequenzautils/gcwiggle/main.nf +++ b/tests/modules/sequenzautils/gcwiggle/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQUENZAUTILS_GCWIGGLE } from '../../../../modules/sequenzautils/gcwiggle/main.nf' addParams( options: [ 'args': '-w 50' ] ) +include { SEQUENZAUTILS_GCWIGGLE } from '../../../../modules/sequenzautils/gcwiggle/main.nf' workflow test_sequenzautils_gcwiggle { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/sequenzautils/gcwiggle/nextflow.config b/tests/modules/sequenzautils/gcwiggle/nextflow.config new file mode 100644 index 00000000..62e68935 --- /dev/null +++ b/tests/modules/sequenzautils/gcwiggle/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SEQUENZAUTILS_GCWIGGLE { + ext.args = '-w 50' + } + +} diff --git a/tests/modules/sequenzautils/gcwiggle/test.yml b/tests/modules/sequenzautils/gcwiggle/test.yml index aa7a3167..21ddc4ab 100644 --- a/tests/modules/sequenzautils/gcwiggle/test.yml +++ b/tests/modules/sequenzautils/gcwiggle/test.yml @@ -1,7 +1,5 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml sequenzautils/gcwiggle - name: sequenzautils gcwiggle - command: nextflow run ./tests/modules/sequenzautils/gcwiggle -entry test_sequenzautils_gcwiggle -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sequenzautils/gcwiggle -entry test_sequenzautils_gcwiggle -c ./tests/config/nextflow.config -c ./tests/modules/sequenzautils/gcwiggle/nextflow.config tags: - sequenzautils - sequenzautils/gcwiggle diff --git a/tests/modules/seqwish/induce/main.nf b/tests/modules/seqwish/induce/main.nf index 356ca705..6388fea2 100644 --- a/tests/modules/seqwish/induce/main.nf +++ b/tests/modules/seqwish/induce/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SEQWISH_INDUCE } from '../../../../modules/seqwish/induce/main.nf' addParams( options: [:] ) +include { SEQWISH_INDUCE } from '../../../../modules/seqwish/induce/main.nf' workflow test_seqwish_induce { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/seqwish/induce/nextflow.config b/tests/modules/seqwish/induce/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/seqwish/induce/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/seqwish/induce/test.yml b/tests/modules/seqwish/induce/test.yml index d27de3c4..d5a8a7cd 100644 --- a/tests/modules/seqwish/induce/test.yml +++ b/tests/modules/seqwish/induce/test.yml @@ -1,5 +1,5 @@ - name: seqwish induce - command: nextflow run ./tests/modules/seqwish/induce -entry test_seqwish_induce -c tests/config/nextflow.config + command: nextflow run ./tests/modules/seqwish/induce -entry test_seqwish_induce -c ./tests/config/nextflow.config -c ./tests/modules/seqwish/induce/nextflow.config tags: - seqwish - seqwish/induce diff --git a/tests/modules/shovill/main.nf b/tests/modules/shovill/main.nf index acc65169..2416022f 100644 --- a/tests/modules/shovill/main.nf +++ b/tests/modules/shovill/main.nf @@ -2,10 +2,10 @@ nextflow.enable.dsl = 2 -include { SHOVILL } from '../../../modules/shovill/main.nf' addParams( options: [args: '--gsize 2800000 --kmers 31'] ) -include { SHOVILL as SHOVILL_SKESA } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler skesa --gsize 2800000'] ) -include { SHOVILL as SHOVILL_MEGAHIT } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler megahit --gsize 2800000'] ) -include { SHOVILL as SHOVILL_VELVET } from '../../../modules/shovill/main.nf' addParams( options: [args: '--assembler velvet --gsize 2800000'] ) +include { SHOVILL } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_SKESA } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_MEGAHIT } from '../../../modules/shovill/main.nf' +include { SHOVILL as SHOVILL_VELVET } from '../../../modules/shovill/main.nf' workflow test_shovill { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/shovill/nextflow.config b/tests/modules/shovill/nextflow.config new file mode 100644 index 00000000..0599f80b --- /dev/null +++ b/tests/modules/shovill/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SHOVILL { + ext.args = '--gsize 2800000 --kmers 31' + } + + withName: SHOVILL_SKESA { + ext.args = '--assembler skesa --gsize 2800000' + } + + withName: SHOVILL_MEGAHIT { + ext.args = '--assembler megahit --gsize 2800000' + } + + withName: SHOVILL_VELVET { + ext.args = '--assembler velvet --gsize 2800000' + } + +} diff --git a/tests/modules/shovill/test.yml b/tests/modules/shovill/test.yml index a716bc66..6fdd2f3f 100644 --- a/tests/modules/shovill/test.yml +++ b/tests/modules/shovill/test.yml @@ -1,5 +1,5 @@ - name: shovill with spades - command: nextflow run ./tests/modules/shovill -entry test_shovill -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -13,7 +13,7 @@ - path: output/shovill/shovill.log - name: shovill with megahit - command: nextflow run ./tests/modules/shovill -entry test_shovill_megahit -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_megahit -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -26,7 +26,7 @@ - path: output/shovill/shovill.log - name: shovill with skesa - command: nextflow run ./tests/modules/shovill -entry test_shovill_skesa -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_skesa -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: @@ -39,7 +39,7 @@ - path: output/shovill/shovill.log - name: shovill with velvet - command: nextflow run ./tests/modules/shovill -entry test_shovill_velvet -c tests/config/nextflow.config + command: nextflow run ./tests/modules/shovill -entry test_shovill_velvet -c ./tests/config/nextflow.config -c ./tests/modules/shovill/nextflow.config tags: - shovill files: diff --git a/tests/modules/snpdists/main.nf b/tests/modules/snpdists/main.nf index 8a29effa..be6d745c 100644 --- a/tests/modules/snpdists/main.nf +++ b/tests/modules/snpdists/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SNPDISTS } from '../../../modules/snpdists/main.nf' addParams( options: [:] ) +include { SNPDISTS } from '../../../modules/snpdists/main.nf' workflow test_snpdists { diff --git a/tests/modules/snpdists/nextflow.config b/tests/modules/snpdists/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/snpdists/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/snpdists/test.yml b/tests/modules/snpdists/test.yml index d140ce6e..c23945ce 100644 --- a/tests/modules/snpdists/test.yml +++ b/tests/modules/snpdists/test.yml @@ -1,5 +1,5 @@ - name: snpdists - command: nextflow run ./tests/modules/snpdists -entry test_snpdists -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpdists -entry test_snpdists -c ./tests/config/nextflow.config -c ./tests/modules/snpdists/nextflow.config tags: - snpdists files: diff --git a/tests/modules/snpeff/main.nf b/tests/modules/snpeff/main.nf index 923f98f4..4e8a982d 100644 --- a/tests/modules/snpeff/main.nf +++ b/tests/modules/snpeff/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { SNPEFF } from '../../../modules/snpeff/main.nf' addParams( snpeff_tag: '5.0.WBcel235', use_cache: false ) +include { SNPEFF } from '../../../modules/snpeff/main.nf' workflow test_snpeff { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] - ] + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + SNPEFF ( input, "WBcel235.99", [] ) } diff --git a/tests/modules/snpeff/nextflow.config b/tests/modules/snpeff/nextflow.config new file mode 100644 index 00000000..f4042ab9 --- /dev/null +++ b/tests/modules/snpeff/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SNPEFF { + container = 'nfcore/snpeff:5.0.WBcel235' + } + +} diff --git a/tests/modules/snpeff/test.yml b/tests/modules/snpeff/test.yml index 44eba200..8f4d980f 100644 --- a/tests/modules/snpeff/test.yml +++ b/tests/modules/snpeff/test.yml @@ -1,5 +1,5 @@ - name: snpeff test_snpeff - command: nextflow run tests/modules/snpeff -entry test_snpeff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpeff -entry test_snpeff -c ./tests/config/nextflow.config -c ./tests/modules/snpeff/nextflow.config tags: - snpeff files: diff --git a/tests/modules/snpsites/main.nf b/tests/modules/snpsites/main.nf index df2a6852..f7801673 100644 --- a/tests/modules/snpsites/main.nf +++ b/tests/modules/snpsites/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SNPSITES } from '../../../modules/snpsites/main.nf' addParams( options: [:] ) +include { SNPSITES } from '../../../modules/snpsites/main.nf' workflow test_snpsites { diff --git a/tests/modules/snpsites/nextflow.config b/tests/modules/snpsites/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/snpsites/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/snpsites/test.yml b/tests/modules/snpsites/test.yml index 8361cd05..d9c19cd5 100644 --- a/tests/modules/snpsites/test.yml +++ b/tests/modules/snpsites/test.yml @@ -1,5 +1,5 @@ - name: snpsites - command: nextflow run ./tests/modules/snpsites -entry test_snpsites -c tests/config/nextflow.config + command: nextflow run ./tests/modules/snpsites -entry test_snpsites -c ./tests/config/nextflow.config -c ./tests/modules/snpsites/nextflow.config tags: - snpsites files: diff --git a/tests/modules/spades/main.nf b/tests/modules/spades/main.nf index a8518a0e..3710eeb7 100644 --- a/tests/modules/spades/main.nf +++ b/tests/modules/spades/main.nf @@ -2,11 +2,13 @@ nextflow.enable.dsl = 2 -include { SPADES } from '../../../modules/spades/main.nf' addParams( spades_hmm: false ,options: ['args': '--rnaviral'] ) +include { SPADES } from '../../../modules/spades/main.nf' workflow test_spades_single_end { input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [] ] SPADES ( input, [] ) } @@ -14,7 +16,32 @@ workflow test_spades_single_end { workflow test_spades_paired_end { input = [ [ id:'test', single_end:false ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [] + ] + + SPADES ( input, [] ) +} + +workflow test_spades_illumina_nanopore { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [], + [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + ] + + SPADES ( input, [] ) +} + +// that isnt perfect, because CCS reads should rather be used with -s instead of --pacbio +workflow test_spades_illumina_pacbio { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_2_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['homo_sapiens']['pacbio']['ccs_fq_gz'], checkIfExists: true) ], + [] ] SPADES ( input, [] ) diff --git a/tests/modules/spades/nextflow.config b/tests/modules/spades/nextflow.config new file mode 100644 index 00000000..5fabafae --- /dev/null +++ b/tests/modules/spades/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SPADES { + ext.args = '--rnaviral' + } + +} diff --git a/tests/modules/spades/test.yml b/tests/modules/spades/test.yml index 35beb1a7..98bc9c8c 100644 --- a/tests/modules/spades/test.yml +++ b/tests/modules/spades/test.yml @@ -1,23 +1,52 @@ -- name: spades single end - command: nextflow run ./tests/modules/spades -entry test_spades_single_end -c tests/config/nextflow.config +- name: spades test_spades_single_end + command: nextflow run ./tests/modules/spades -entry test_spades_single_end -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: - - path: output/spades/test.assembly.gfa - md5sum: a995d1d413031534180d2b3b715fa921 - - path: output/spades/test.contigs.fa - md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 - - path: output/spades/test.scaffolds.fa - md5sum: 65ba6a517c152dbe219bf4b5b92bdad7 + - path: output/spades/test.assembly.gfa.gz + md5sum: e5eab229363a906954a07df00e2495a6 + - path: output/spades/test.contigs.fa.gz + md5sum: 64f6b339872b934138c6efd6baa445f4 + - path: output/spades/test.scaffolds.fa.gz + md5sum: 64f6b339872b934138c6efd6baa445f4 - path: output/spades/test.spades.log -- name: spades paired end - command: nextflow run ./tests/modules/spades -entry test_spades_paired_end -c tests/config/nextflow.config +- name: spades test_spades_paired_end + command: nextflow run ./tests/modules/spades -entry test_spades_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config tags: - spades files: - - path: output/spades/test.assembly.gfa - md5sum: bb053ef4e9250829c980ca17fbdbe3e9 - - path: output/spades/test.contigs.fa - md5sum: 4476d409da70d9f7fc2aa8f25bbaf7fd + - path: output/spades/test.assembly.gfa.gz + md5sum: c8614fb69907ae832a1359a054af240f + - path: output/spades/test.contigs.fa.gz + md5sum: eab5165b3cda96c235aaa1388010cb27 - path: output/spades/test.spades.log + - path: output/spades/warnings.log + +- name: spades test_spades_illumina_nanopore + command: nextflow run ./tests/modules/spades -entry test_spades_illumina_nanopore -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config + tags: + - spades + files: + - path: output/spades/test.assembly.gfa.gz + md5sum: e438534f14e107f005efdd659adeba6a + - path: output/spades/test.contigs.fa.gz + md5sum: 027b0e54bfd8f4bc359e751e094133ef + - path: output/spades/test.scaffolds.fa.gz + md5sum: 027b0e54bfd8f4bc359e751e094133ef + - path: output/spades/test.spades.log + - path: output/spades/warnings.log + +- name: spades test_spades_illumina_pacbio + command: nextflow run ./tests/modules/spades -entry test_spades_illumina_pacbio -c ./tests/config/nextflow.config -c ./tests/modules/spades/nextflow.config + tags: + - spades + files: + - path: output/spades/test.assembly.gfa.gz + md5sum: e12aaf83d8dbfc313339b7636ba43447 + - path: output/spades/test.contigs.fa.gz + md5sum: 78523f66d34ac4d5a4890f353c1a6ec6 + - path: output/spades/test.scaffolds.fa.gz + md5sum: 78523f66d34ac4d5a4890f353c1a6ec6 + - path: output/spades/test.spades.log + - path: output/spades/warnings.log diff --git a/tests/modules/spatyper/main.nf b/tests/modules/spatyper/main.nf index 65729cc0..655845c7 100644 --- a/tests/modules/spatyper/main.nf +++ b/tests/modules/spatyper/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { SPATYPER } from '../../../modules/spatyper/main.nf' addParams( options: [:] ) -include { SPATYPER as SPATYPER_ENRICH } from '../../../modules/spatyper/main.nf' addParams( options: [args: '--do_enrich'] ) +include { SPATYPER } from '../../../modules/spatyper/main.nf' +include { SPATYPER as SPATYPER_ENRICH } from '../../../modules/spatyper/main.nf' workflow test_spatyper { input = [ [ id:'test' ], diff --git a/tests/modules/spatyper/nextflow.config b/tests/modules/spatyper/nextflow.config new file mode 100644 index 00000000..ac90a452 --- /dev/null +++ b/tests/modules/spatyper/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SPATYPER_ENRICH { + ext.args = '--do_enrich' + } + +} diff --git a/tests/modules/spatyper/test.yml b/tests/modules/spatyper/test.yml index 49516812..6e1f8144 100644 --- a/tests/modules/spatyper/test.yml +++ b/tests/modules/spatyper/test.yml @@ -1,5 +1,5 @@ - name: spatyper test_spatyper - command: nextflow run tests/modules/spatyper -entry test_spatyper -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spatyper -entry test_spatyper -c ./tests/config/nextflow.config -c ./tests/modules/spatyper/nextflow.config tags: - spatyper files: @@ -7,7 +7,7 @@ md5sum: a698352823875171696e5e7ed7015c13 - name: spatyper test_spatyper_enrich - command: nextflow run tests/modules/spatyper -entry test_spatyper_enrich -c tests/config/nextflow.config + command: nextflow run ./tests/modules/spatyper -entry test_spatyper_enrich -c ./tests/config/nextflow.config -c ./tests/modules/spatyper/nextflow.config tags: - spatyper files: diff --git a/tests/modules/sratools/fasterqdump/main.nf b/tests/modules/sratools/fasterqdump/main.nf index 1a0e0c7a..2f838fd2 100644 --- a/tests/modules/sratools/fasterqdump/main.nf +++ b/tests/modules/sratools/fasterqdump/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { UNTAR } from '../../../../modules/untar/main.nf' -include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/main.nf' addParams( options: [:] ) +include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/main.nf' workflow test_sratools_fasterqdump_single_end { diff --git a/tests/modules/sratools/fasterqdump/nextflow.config b/tests/modules/sratools/fasterqdump/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sratools/fasterqdump/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sratools/fasterqdump/test.yml b/tests/modules/sratools/fasterqdump/test.yml index 94da4ed8..64cf2404 100644 --- a/tests/modules/sratools/fasterqdump/test.yml +++ b/tests/modules/sratools/fasterqdump/test.yml @@ -1,5 +1,5 @@ - name: sratools fasterqdump test_sratools_fasterqdump_single_end - command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_single_end -c ./tests/config/nextflow.config -c ./tests/modules/sratools/fasterqdump/nextflow.config tags: - sratools - sratools/fasterqdump @@ -10,7 +10,7 @@ md5sum: 466d05dafb2eec672150754168010b4d - name: sratools fasterqdump test_sratools_fasterqdump_paired_end - command: nextflow run tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/fasterqdump -entry test_sratools_fasterqdump_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/sratools/fasterqdump/nextflow.config tags: - sratools - sratools/fasterqdump diff --git a/tests/modules/sratools/prefetch/main.nf b/tests/modules/sratools/prefetch/main.nf index 99439a7f..aa6252a1 100644 --- a/tests/modules/sratools/prefetch/main.nf +++ b/tests/modules/sratools/prefetch/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SRATOOLS_PREFETCH } from '../../../../modules/sratools/prefetch/main.nf' addParams( options: [:] ) +include { SRATOOLS_PREFETCH } from '../../../../modules/sratools/prefetch/main.nf' workflow test_sratools_prefetch { diff --git a/tests/modules/sratools/prefetch/nextflow.config b/tests/modules/sratools/prefetch/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/sratools/prefetch/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/sratools/prefetch/test.yml b/tests/modules/sratools/prefetch/test.yml index c23db12a..a2efef77 100644 --- a/tests/modules/sratools/prefetch/test.yml +++ b/tests/modules/sratools/prefetch/test.yml @@ -1,5 +1,5 @@ - name: sratools prefetch test_sratools_prefetch - command: nextflow run tests/modules/sratools/prefetch -entry test_sratools_prefetch -c tests/config/nextflow.config + command: nextflow run ./tests/modules/sratools/prefetch -entry test_sratools_prefetch -c ./tests/config/nextflow.config -c ./tests/modules/sratools/prefetch/nextflow.config tags: - sratools/prefetch - sratools diff --git a/tests/modules/staphopiasccmec/main.nf b/tests/modules/staphopiasccmec/main.nf index ec1b48e4..8ea310ce 100644 --- a/tests/modules/staphopiasccmec/main.nf +++ b/tests/modules/staphopiasccmec/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { STAPHOPIASCCMEC } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [:] ) -include { STAPHOPIASCCMEC as STAPHOPIASCCMEC_HAMMING } from '../../../modules/staphopiasccmec/main.nf' addParams( options: [args: '--hamming'] ) +include { STAPHOPIASCCMEC } from '../../../modules/staphopiasccmec/main.nf' +include { STAPHOPIASCCMEC as STAPHOPIASCCMEC_HAMMING } from '../../../modules/staphopiasccmec/main.nf' workflow test_staphopiasccmec { diff --git a/tests/modules/staphopiasccmec/nextflow.config b/tests/modules/staphopiasccmec/nextflow.config new file mode 100644 index 00000000..7ee97c2f --- /dev/null +++ b/tests/modules/staphopiasccmec/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAPHOPIASCCMEC_HAMMING { + ext.args = '--hamming' + } + +} diff --git a/tests/modules/staphopiasccmec/test.yml b/tests/modules/staphopiasccmec/test.yml index aadfec3e..ac3f66da 100644 --- a/tests/modules/staphopiasccmec/test.yml +++ b/tests/modules/staphopiasccmec/test.yml @@ -1,5 +1,5 @@ - name: staphopiasccmec test_staphopiasccmec - command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec -c tests/config/nextflow.config + command: nextflow run ./tests/modules/staphopiasccmec -entry test_staphopiasccmec -c ./tests/config/nextflow.config -c ./tests/modules/staphopiasccmec/nextflow.config tags: - staphopiasccmec files: @@ -7,7 +7,7 @@ md5sum: e6460d4164f3af5b290c5ccdb11343bf - name: staphopiasccmec test_staphopiasccmec_hamming - command: nextflow run tests/modules/staphopiasccmec -entry test_staphopiasccmec_hamming -c tests/config/nextflow.config + command: nextflow run ./tests/modules/staphopiasccmec -entry test_staphopiasccmec_hamming -c ./tests/config/nextflow.config -c ./tests/modules/staphopiasccmec/nextflow.config tags: - staphopiasccmec files: diff --git a/tests/modules/star/align/main.nf b/tests/modules/star/align/main.nf index d7a7ef96..bf305d54 100644 --- a/tests/modules/star/align/main.nf +++ b/tests/modules/star/align/main.nf @@ -2,51 +2,77 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [args: '--genomeSAindexNbases 9']) -include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat'], seq_platform: 'illumina') -include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50'], seq_platform: 'illumina') -include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' addParams( options: [args: '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' +include { STAR_ALIGN } from '../../../../modules/star/align/main.nf' +include { STAR_ALIGN as STAR_FOR_ARRIBA } from '../../../../modules/star/align/main.nf' +include { STAR_ALIGN as STAR_FOR_STARFUSION } from '../../../../modules/star/align/main.nf' workflow test_star_alignment_single_end { - input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true) ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_ALIGN ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end_for_fusion { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = 'illumina' + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_FOR_ARRIBA ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } workflow test_star_alignment_paired_end_for_starfusion { - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) ] - ] + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_rnaseq_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) + star_ignore_sjdbgtf = false + seq_platform = false + seq_center = false + STAR_GENOMEGENERATE ( fasta, gtf ) - STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf ) + STAR_FOR_STARFUSION ( input, STAR_GENOMEGENERATE.out.index, gtf, star_ignore_sjdbgtf, seq_platform, seq_center ) } diff --git a/tests/modules/star/align/nextflow.config b/tests/modules/star/align/nextflow.config new file mode 100644 index 00000000..751f7837 --- /dev/null +++ b/tests/modules/star/align/nextflow.config @@ -0,0 +1,21 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: STAR_GENOMEGENERATE { + ext.args = '--genomeSAindexNbases 9' + } + + withName: STAR_ALIGN { + ext.args = '--readFilesCommand zcat' + } + + withName: STAR_FOR_ARRIBA { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 --outFilterMultimapNmax 50 --peOverlapNbasesMin 10 --alignSplicedMateMapLminOverLmate 0.5 --alignSJstitchMismatchNmax 5 -1 5 5 --chimSegmentMin 10 --chimOutType WithinBAM HardClip --chimJunctionOverhangMin 10 --chimScoreDropMax 30 --chimScoreJunctionNonGTAG 0 --chimScoreSeparation 1 --chimSegmentReadGapMax 3 --chimMultimapNmax 50' + } + + withName: STAR_FOR_STARFUSION { + ext.args = '--readFilesCommand zcat --outSAMtype BAM Unsorted --outReadsUnmapped None --twopassMode Basic --outSAMstrandField intronMotif --outSAMunmapped Within --chimSegmentMin 12 --chimJunctionOverhangMin 8 --chimOutJunctionFormat 1 --alignSJDBoverhangMin 10 --alignMatesGapMax 100000 --alignIntronMax 100000 --alignSJstitchMismatchNmax 5 -1 5 5 --chimMultimapScoreRange 3 --chimScoreJunctionNonGTAG -4 --chimMultimapNmax 20 --chimNonchimScoreDropMin 10 --peOverlapNbasesMin 12 --peOverlapMMp 0.1 --alignInsertionFlush Right --alignSplicedMateMapLminOverLmate 0 --alignSplicedMateMapLmin 30' + } + +} diff --git a/tests/modules/star/align/test.yml b/tests/modules/star/align/test.yml index 47731c5c..af5bebe5 100644 --- a/tests/modules/star/align/test.yml +++ b/tests/modules/star/align/test.yml @@ -1,39 +1,39 @@ - name: star align test_star_alignment_single_end - command: nextflow run tests/modules/star/align -entry test_star_alignment_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_single_end -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: b9f5e2f6a624b64c300fe25dc3ac801f @@ -43,41 +43,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: 38d08f0b944a2a1b981a250d675aa0d9 @@ -87,41 +87,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end_for_fusion - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end_for_fusion -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: c740d5177067c1fcc48ab7a16cd639d7 @@ -131,41 +131,41 @@ - path: output/star/test.SJ.out.tab - name: star align test_star_alignment_paired_end_for_starfusion - command: nextflow run tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/align -entry test_star_alignment_paired_end_for_starfusion -c ./tests/config/nextflow.config -c ./tests/modules/star/align/nextflow.config tags: - star/align - star files: - - path: output/index/star/Genome + - path: output/star/star/Genome md5sum: a654229fbca6071dcb6b01ce7df704da - - path: output/index/star/Log.out - - path: output/index/star/SA + - path: output/star/star/Log.out + - path: output/star/star/SA md5sum: 8c3edc46697b72c9e92440d4cf43506c - - path: output/index/star/SAindex + - path: output/star/star/SAindex md5sum: 2a0c675d8b91d8e5e8c1826d3500482e - - path: output/index/star/chrLength.txt + - path: output/star/star/chrLength.txt md5sum: c81f40f27e72606d7d07097c1d56a5b5 - - path: output/index/star/chrName.txt + - path: output/star/star/chrName.txt md5sum: 5ae68a67b70976ee95342a7451cb5af1 - - path: output/index/star/chrNameLength.txt + - path: output/star/star/chrNameLength.txt md5sum: b190587cae0531f3cf25552d8aa674db - - path: output/index/star/chrStart.txt + - path: output/star/star/chrStart.txt md5sum: 8d3291e6bcdbe9902fbd7c887494173f - - path: output/index/star/exonGeTrInfo.tab + - path: output/star/star/exonGeTrInfo.tab md5sum: d04497f69d6ef889efd4d34fe63edcc4 - - path: output/index/star/exonInfo.tab + - path: output/star/star/exonInfo.tab md5sum: 0d560290fab688b7268d88d5494bf9fe - - path: output/index/star/geneInfo.tab + - path: output/star/star/geneInfo.tab md5sum: 8b608537307443ffaee4927d2b428805 - - path: output/index/star/genomeParameters.txt + - path: output/star/star/genomeParameters.txt md5sum: 3097677f4d8b2cb66770b9e55d343a7f - - path: output/index/star/sjdbInfo.txt + - path: output/star/star/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - - path: output/index/star/sjdbList.fromGTF.out.tab + - path: output/star/star/sjdbList.fromGTF.out.tab md5sum: 8760c33e966dad0b39f440301ebbdee4 - - path: output/index/star/sjdbList.out.tab + - path: output/star/star/sjdbList.out.tab md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1 - - path: output/index/star/transcriptInfo.tab + - path: output/star/star/transcriptInfo.tab md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 - path: output/star/test.Aligned.out.bam md5sum: a1bd1b40950a58ea2776908076160052 diff --git a/tests/modules/star/genomegenerate/main.nf b/tests/modules/star/genomegenerate/main.nf index 7f9e3072..31601478 100644 --- a/tests/modules/star/genomegenerate/main.nf +++ b/tests/modules/star/genomegenerate/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' addParams( options: [publish_dir:'star'] ) +include { STAR_GENOMEGENERATE } from '../../../../modules/star/genomegenerate/main.nf' workflow test_star_genomegenerate { fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/star/genomegenerate/nextflow.config b/tests/modules/star/genomegenerate/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/star/genomegenerate/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/star/genomegenerate/test.yml b/tests/modules/star/genomegenerate/test.yml index df8d5efc..0e397009 100644 --- a/tests/modules/star/genomegenerate/test.yml +++ b/tests/modules/star/genomegenerate/test.yml @@ -1,5 +1,5 @@ - name: star genomegenerate test_star_genomegenerate - command: nextflow run tests/modules/star/genomegenerate -entry test_star_genomegenerate -c tests/config/nextflow.config + command: nextflow run ./tests/modules/star/genomegenerate -entry test_star_genomegenerate -c ./tests/config/nextflow.config -c ./tests/modules/star/genomegenerate/nextflow.config tags: - star - star/genomegenerate diff --git a/tests/modules/strelka/germline/main.nf b/tests/modules/strelka/germline/main.nf index 0d5193bb..c50d76e1 100644 --- a/tests/modules/strelka/germline/main.nf +++ b/tests/modules/strelka/germline/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' addParams( options: [:] ) +include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' workflow test_strelka_germline { input = [ diff --git a/tests/modules/strelka/germline/nextflow.config b/tests/modules/strelka/germline/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/strelka/germline/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/strelka/germline/test.yml b/tests/modules/strelka/germline/test.yml index a3ab3ef6..8db81aa0 100644 --- a/tests/modules/strelka/germline/test.yml +++ b/tests/modules/strelka/germline/test.yml @@ -1,5 +1,5 @@ - name: strelka germline test_strelka_germline - command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline -c ./tests/config/nextflow.config -c ./tests/modules/strelka/germline/nextflow.config tags: - strelka - strelka/germline @@ -10,7 +10,7 @@ - path: output/strelka/test.variants.vcf.gz.tbi - name: strelka germline test_strelka_germline_target_bed - command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c ./tests/config/nextflow.config -c ./tests/modules/strelka/germline/nextflow.config tags: - strelka - strelka/germline diff --git a/tests/modules/strelka/somatic/main.nf b/tests/modules/strelka/somatic/main.nf index 60127f58..b1d4efeb 100644 --- a/tests/modules/strelka/somatic/main.nf +++ b/tests/modules/strelka/somatic/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRELKA_SOMATIC } from '../../../../modules/strelka/somatic/main.nf' addParams( options: [:] ) +include { STRELKA_SOMATIC } from '../../../../modules/strelka/somatic/main.nf' workflow test_strelka_somatic { diff --git a/tests/modules/strelka/somatic/nextflow.config b/tests/modules/strelka/somatic/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/strelka/somatic/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/strelka/somatic/test.yml b/tests/modules/strelka/somatic/test.yml index b461d335..a56f955a 100644 --- a/tests/modules/strelka/somatic/test.yml +++ b/tests/modules/strelka/somatic/test.yml @@ -1,5 +1,5 @@ - name: strelka somatic test_strelka_somatic - command: nextflow run tests/modules/strelka/somatic -entry test_strelka_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/somatic -entry test_strelka_somatic -c ./tests/config/nextflow.config -c ./tests/modules/strelka/somatic/nextflow.config tags: - strelka - strelka/somatic @@ -12,7 +12,7 @@ md5sum: 4cb176febbc8c26d717a6c6e67b9c905 - name: strelka somatic test_strelka__best_practices_somatic - command: nextflow run tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c tests/config/nextflow.config + command: nextflow run ./tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c ./tests/config/nextflow.config -c ./tests/modules/strelka/somatic/nextflow.config tags: - strelka - strelka/somatic diff --git a/tests/modules/stringtie/merge/main.nf b/tests/modules/stringtie/merge/main.nf index 49ff5a41..7851e755 100644 --- a/tests/modules/stringtie/merge/main.nf +++ b/tests/modules/stringtie/merge/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) -include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' addParams( options: [:] ) +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' +include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' /* * Test with forward strandedness diff --git a/tests/modules/stringtie/merge/nextflow.config b/tests/modules/stringtie/merge/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/stringtie/merge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/stringtie/merge/test.yml b/tests/modules/stringtie/merge/test.yml index e6436612..392a1d7c 100644 --- a/tests/modules/stringtie/merge/test.yml +++ b/tests/modules/stringtie/merge/test.yml @@ -1,5 +1,5 @@ - name: stringtie merge forward-strand - command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_forward_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/merge -entry test_stringtie_forward_merge -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/merge/nextflow.config tags: - stringtie - stringtie/merge @@ -24,7 +24,7 @@ md5sum: 0e42709bfe30c2c7f2574ba664f5fa9f - name: stringtie merge test_stringtie_reverse_merge - command: nextflow run tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/merge/nextflow.config tags: - stringtie - stringtie/merge diff --git a/tests/modules/stringtie/stringtie/main.nf b/tests/modules/stringtie/stringtie/main.nf index b902cc41..ae6abe67 100644 --- a/tests/modules/stringtie/stringtie/main.nf +++ b/tests/modules/stringtie/stringtie/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' addParams( options: [:] ) +include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' // // Test with forward strandedness // diff --git a/tests/modules/stringtie/stringtie/nextflow.config b/tests/modules/stringtie/stringtie/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/stringtie/stringtie/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/stringtie/stringtie/test.yml b/tests/modules/stringtie/stringtie/test.yml index 28c1b3c2..732b9fd1 100644 --- a/tests/modules/stringtie/stringtie/test.yml +++ b/tests/modules/stringtie/stringtie/test.yml @@ -1,5 +1,5 @@ - name: stringtie stringtie forward - command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_forward -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_forward -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/stringtie/nextflow.config tags: - stringtie - stringtie/stringtie @@ -21,7 +21,7 @@ md5sum: e981c0038295ae54b63cedb1083f1540 - name: stringtie stringtie reverse - command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_reverse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/stringtie/stringtie/ -entry test_stringtie_reverse -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/stringtie/nextflow.config tags: - stringtie - stringtie/stringtie diff --git a/tests/modules/subread/featurecounts/main.nf b/tests/modules/subread/featurecounts/main.nf index eae60f80..a8fa5c75 100644 --- a/tests/modules/subread/featurecounts/main.nf +++ b/tests/modules/subread/featurecounts/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { SUBREAD_FEATURECOUNTS } from '../../../../modules/subread/featurecounts/main.nf' addParams( options: [args:'-t CDS'] ) +include { SUBREAD_FEATURECOUNTS } from '../../../../modules/subread/featurecounts/main.nf' workflow test_subread_featurecounts_forward { diff --git a/tests/modules/subread/featurecounts/nextflow.config b/tests/modules/subread/featurecounts/nextflow.config new file mode 100644 index 00000000..d9fd4fd5 --- /dev/null +++ b/tests/modules/subread/featurecounts/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SUBREAD_FEATURECOUNTS { + ext.args = '-t CDS' + } + +} diff --git a/tests/modules/subread/featurecounts/test.yml b/tests/modules/subread/featurecounts/test.yml index be6bed47..7cc24457 100644 --- a/tests/modules/subread/featurecounts/test.yml +++ b/tests/modules/subread/featurecounts/test.yml @@ -1,5 +1,5 @@ - name: subread featurecounts test_subread_featurecounts_forward - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_forward -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_forward -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts @@ -10,7 +10,7 @@ md5sum: 8f602ff9a8ef467af43294e80b367cdf - name: subread featurecounts test_subread_featurecounts_reverse - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_reverse -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_reverse -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts @@ -21,7 +21,7 @@ md5sum: 7cfa30ad678b9bc1bc63afbb0281547b - name: subread featurecounts test_subread_featurecounts_unstranded - command: nextflow run tests/modules/subread/featurecounts -entry test_subread_featurecounts_unstranded -c tests/config/nextflow.config + command: nextflow run ./tests/modules/subread/featurecounts -entry test_subread_featurecounts_unstranded -c ./tests/config/nextflow.config -c ./tests/modules/subread/featurecounts/nextflow.config tags: - subread - subread/featurecounts diff --git a/tests/modules/tabix/bgzip/main.nf b/tests/modules/tabix/bgzip/main.nf index 8756b17d..4d349890 100644 --- a/tests/modules/tabix/bgzip/main.nf +++ b/tests/modules/tabix/bgzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TABIX_BGZIP } from '../../../../modules/tabix/bgzip/main.nf' addParams( options: [:] ) +include { TABIX_BGZIP } from '../../../../modules/tabix/bgzip/main.nf' workflow test_tabix_bgzip { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/tabix/bgzip/nextflow.config b/tests/modules/tabix/bgzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tabix/bgzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tabix/bgzip/test.yml b/tests/modules/tabix/bgzip/test.yml index 58412979..19357655 100644 --- a/tests/modules/tabix/bgzip/test.yml +++ b/tests/modules/tabix/bgzip/test.yml @@ -1,5 +1,5 @@ - name: tabix bgzip - command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgzip/nextflow.config tags: - tabix - tabix/bgzip diff --git a/tests/modules/tabix/bgziptabix/main.nf b/tests/modules/tabix/bgziptabix/main.nf index 51e242fd..b2ff70d0 100644 --- a/tests/modules/tabix/bgziptabix/main.nf +++ b/tests/modules/tabix/bgziptabix/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TABIX_BGZIPTABIX } from '../../../../modules/tabix/bgziptabix/main.nf' addParams( options: ['args2': '-p vcf'] ) +include { TABIX_BGZIPTABIX } from '../../../../modules/tabix/bgziptabix/main.nf' workflow test_tabix_bgziptabix { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/tabix/bgziptabix/nextflow.config b/tests/modules/tabix/bgziptabix/nextflow.config new file mode 100644 index 00000000..041bfa6a --- /dev/null +++ b/tests/modules/tabix/bgziptabix/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TABIX_BGZIPTABIX { + ext.args2 = '-p vcf' + } + +} diff --git a/tests/modules/tabix/bgziptabix/test.yml b/tests/modules/tabix/bgziptabix/test.yml index 31048109..1bcfa88a 100644 --- a/tests/modules/tabix/bgziptabix/test.yml +++ b/tests/modules/tabix/bgziptabix/test.yml @@ -1,5 +1,5 @@ - name: tabix bgziptabix - command: nextflow run ./tests/modules/tabix/bgziptabix -entry test_tabix_bgziptabix -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/bgziptabix -entry test_tabix_bgziptabix -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgziptabix/nextflow.config tags: - tabix - tabix/bgziptabix diff --git a/tests/modules/tabix/tabix/main.nf b/tests/modules/tabix/tabix/main.nf index 0963ffcd..993ee812 100644 --- a/tests/modules/tabix/tabix/main.nf +++ b/tests/modules/tabix/tabix/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p bed'] ) -include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p gff'] ) -include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf' addParams( options: ['args': '-p vcf'] ) +include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf' workflow test_tabix_tabix_bed { input = [ [ id:'B.bed' ], // meta map diff --git a/tests/modules/tabix/tabix/nextflow.config b/tests/modules/tabix/tabix/nextflow.config new file mode 100644 index 00000000..aa97a873 --- /dev/null +++ b/tests/modules/tabix/tabix/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TABIX_BED { + ext.args = '-p bed' + } + + withName: TABIX_GFF { + ext.args = '-p gff' + } + + withName: TABIX_VCF { + ext.args = '-p vcf' + } + +} diff --git a/tests/modules/tabix/tabix/test.yml b/tests/modules/tabix/tabix/test.yml index 646215c8..46be28dd 100644 --- a/tests/modules/tabix/tabix/test.yml +++ b/tests/modules/tabix/tabix/test.yml @@ -1,5 +1,5 @@ - name: tabix tabix bed - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_bed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_bed -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix @@ -7,7 +7,7 @@ - path: ./output/tabix/test.bed.gz.tbi md5sum: 5b40851ab6b8ccf7946313c86481c0df - name: tabix tabix gff - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_gff -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_gff -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix @@ -15,7 +15,7 @@ - path: ./output/tabix/genome.gff3.gz.tbi md5sum: f79a67d95a98076e04fbe0455d825926 - name: tabix tabix vcf - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix diff --git a/tests/modules/tbprofiler/profile/main.nf b/tests/modules/tbprofiler/profile/main.nf new file mode 100644 index 00000000..0141a77f --- /dev/null +++ b/tests/modules/tbprofiler/profile/main.nf @@ -0,0 +1,28 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { TBPROFILER_PROFILE } from '../../../../modules/tbprofiler/profile/main.nf' + +workflow test_tbprofiler_profile_illumina { + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + TBPROFILER_PROFILE ( input ) +} + +workflow test_tbprofiler_profile_nanopore { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) + ] + + TBPROFILER_PROFILE ( input ) +} diff --git a/tests/modules/tbprofiler/profile/nextflow.config b/tests/modules/tbprofiler/profile/nextflow.config new file mode 100644 index 00000000..50cb99c6 --- /dev/null +++ b/tests/modules/tbprofiler/profile/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: TBPROFILER_PROFILE_ILLUMINA { + ext.args = '--platform illumina' + } + + withName: TBPROFILER_PROFILE_NANOPORE { + ext.args = '--platform nanopore' + } + +} diff --git a/tests/modules/tbprofiler/profile/test.yml b/tests/modules/tbprofiler/profile/test.yml new file mode 100644 index 00000000..8b40f1fa --- /dev/null +++ b/tests/modules/tbprofiler/profile/test.yml @@ -0,0 +1,21 @@ +- name: tbprofiler profile illumina + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_illumina -c ./tests/config/nextflow.config -c ./tests/modules/tbprofiler/profile/nextflow.config + tags: + - tbprofiler + - tbprofiler/profile + files: + - path: output/tbprofiler/bam/test.bam + - path: output/tbprofiler/results/test.results.json + contains: ['genome_positions', 'locus_tag', 'tbprofiler_version'] + - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz + +- name: tbprofiler profile nanopore + command: nextflow run ./tests/modules/tbprofiler/profile -entry test_tbprofiler_profile_nanopore -c ./tests/config/nextflow.config -c ./tests/modules/tbprofiler/profile/nextflow.config + tags: + - tbprofiler + - tbprofiler/profile + files: + - path: output/tbprofiler/bam/test.bam + - path: output/tbprofiler/results/test.results.json + contains: ['genome_positions', 'locus_tag', 'tbprofiler_version'] + - path: output/tbprofiler/vcf/test.targets.csq.vcf.gz diff --git a/tests/modules/tiddit/cov/main.nf b/tests/modules/tiddit/cov/main.nf index aed3516c..1bb35145 100644 --- a/tests/modules/tiddit/cov/main.nf +++ b/tests/modules/tiddit/cov/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf' addParams( options: [:] ) +include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf' workflow test_tiddit_cov { diff --git a/tests/modules/tiddit/cov/nextflow.config b/tests/modules/tiddit/cov/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tiddit/cov/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tiddit/cov/test.yml b/tests/modules/tiddit/cov/test.yml index c2aa6439..90c4cbb3 100644 --- a/tests/modules/tiddit/cov/test.yml +++ b/tests/modules/tiddit/cov/test.yml @@ -1,5 +1,5 @@ - name: tiddit cov test_tiddit_cov - command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config tags: - tiddit - tiddit/cov @@ -8,7 +8,7 @@ md5sum: f7974948f809f94879d8a60b726194f5 - name: tiddit cov test_tiddit_cov_no_ref - command: nextflow run tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config tags: - tiddit - tiddit/cov diff --git a/tests/modules/tiddit/sv/main.nf b/tests/modules/tiddit/sv/main.nf index 8a5a8140..8dae4950 100644 --- a/tests/modules/tiddit/sv/main.nf +++ b/tests/modules/tiddit/sv/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TIDDIT_SV } from '../../../../modules/tiddit/sv/main.nf' addParams( options: [:] ) +include { TIDDIT_SV } from '../../../../modules/tiddit/sv/main.nf' workflow test_tiddit_sv { input = [ diff --git a/tests/modules/tiddit/sv/nextflow.config b/tests/modules/tiddit/sv/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/tiddit/sv/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/tiddit/sv/test.yml b/tests/modules/tiddit/sv/test.yml index ed19bf14..168d21c5 100644 --- a/tests/modules/tiddit/sv/test.yml +++ b/tests/modules/tiddit/sv/test.yml @@ -1,5 +1,5 @@ - name: tiddit sv - command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config tags: - tiddit - tiddit/sv @@ -11,7 +11,7 @@ - path: output/tiddit/test.vcf - name: tiddit sv no ref - command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c tests/config/nextflow.config + command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config tags: - tiddit - tiddit/sv diff --git a/tests/modules/trimgalore/main.nf b/tests/modules/trimgalore/main.nf index 3001469d..adeda539 100644 --- a/tests/modules/trimgalore/main.nf +++ b/tests/modules/trimgalore/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { TRIMGALORE } from '../../../modules/trimgalore/main.nf' addParams( options: [:] ) +include { TRIMGALORE } from '../../../modules/trimgalore/main.nf' // // Test with single-end data diff --git a/tests/modules/trimgalore/nextflow.config b/tests/modules/trimgalore/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/trimgalore/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/trimgalore/test.yml b/tests/modules/trimgalore/test.yml index c176f592..ecbd2b5a 100644 --- a/tests/modules/trimgalore/test.yml +++ b/tests/modules/trimgalore/test.yml @@ -1,5 +1,5 @@ - name: trimgalore single-end - command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_single_end -c ./tests/config/nextflow.config -c ./tests/modules/trimgalore/nextflow.config tags: - trimgalore files: @@ -9,7 +9,7 @@ - path: ./output/trimgalore/test_trimmed.fq.gz - name: trimgalore paired-end - command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/trimgalore/ -entry test_trimgalore_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/trimgalore/nextflow.config tags: - trimgalore files: diff --git a/tests/modules/ucsc/bed12tobigbed/main.nf b/tests/modules/ucsc/bed12tobigbed/main.nf index 8ed64166..7590fc0e 100644 --- a/tests/modules/ucsc/bed12tobigbed/main.nf +++ b/tests/modules/ucsc/bed12tobigbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BED12TOBIGBED } from '../../../../modules/ucsc/bed12tobigbed/main.nf' addParams( options: [:] ) +include { UCSC_BED12TOBIGBED } from '../../../../modules/ucsc/bed12tobigbed/main.nf' workflow test_ucsc_bed12tobigbed { input = [ [ id: 'test' ], // meta map diff --git a/tests/modules/ucsc/bed12tobigbed/nextflow.config b/tests/modules/ucsc/bed12tobigbed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bed12tobigbed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bed12tobigbed/test.yml b/tests/modules/ucsc/bed12tobigbed/test.yml index e0ee6f75..6bd4262d 100644 --- a/tests/modules/ucsc/bed12tobigbed/test.yml +++ b/tests/modules/ucsc/bed12tobigbed/test.yml @@ -1,5 +1,5 @@ - name: ucsc bed12tobigbed - command: nextflow run ./tests/modules/ucsc/bed12tobigbed -entry test_ucsc_bed12tobigbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bed12tobigbed -entry test_ucsc_bed12tobigbed -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bed12tobigbed/nextflow.config tags: - ucsc/bed12tobigbed files: diff --git a/tests/modules/ucsc/bedclip/main.nf b/tests/modules/ucsc/bedclip/main.nf index 162c2eb4..8ccfd3b0 100755 --- a/tests/modules/ucsc/bedclip/main.nf +++ b/tests/modules/ucsc/bedclip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BEDCLIP } from '../../../../modules/ucsc/bedclip/main.nf' addParams( options: [suffix:'.clip'] ) +include { UCSC_BEDCLIP } from '../../../../modules/ucsc/bedclip/main.nf' workflow test_ucsc_bedclip { input = [ [ id:'test', single_end:false ], // meta map diff --git a/tests/modules/ucsc/bedclip/nextflow.config b/tests/modules/ucsc/bedclip/nextflow.config new file mode 100644 index 00000000..46af4b0a --- /dev/null +++ b/tests/modules/ucsc/bedclip/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: UCSC_BEDCLIP { + ext.prefix = { "${meta.id}.clip" } + } + +} diff --git a/tests/modules/ucsc/bedclip/test.yml b/tests/modules/ucsc/bedclip/test.yml index 103795da..bcf22c71 100755 --- a/tests/modules/ucsc/bedclip/test.yml +++ b/tests/modules/ucsc/bedclip/test.yml @@ -1,5 +1,5 @@ - name: ucsc bedclip - command: nextflow run ./tests/modules/ucsc/bedclip -entry test_ucsc_bedclip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bedclip -entry test_ucsc_bedclip -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bedclip/nextflow.config tags: - ucsc - ucsc/bedclip diff --git a/tests/modules/ucsc/bedgraphtobigwig/main.nf b/tests/modules/ucsc/bedgraphtobigwig/main.nf index 8d83e235..c6db7225 100644 --- a/tests/modules/ucsc/bedgraphtobigwig/main.nf +++ b/tests/modules/ucsc/bedgraphtobigwig/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BEDGRAPHTOBIGWIG } from '../../../../modules/ucsc/bedgraphtobigwig/main.nf' addParams( options: [:] ) +include { UCSC_BEDGRAPHTOBIGWIG } from '../../../../modules/ucsc/bedgraphtobigwig/main.nf' workflow test_ucsc_bedgraphtobigwig { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/ucsc/bedgraphtobigwig/nextflow.config b/tests/modules/ucsc/bedgraphtobigwig/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bedgraphtobigwig/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bedgraphtobigwig/test.yml b/tests/modules/ucsc/bedgraphtobigwig/test.yml index 726a07ca..c00a0231 100644 --- a/tests/modules/ucsc/bedgraphtobigwig/test.yml +++ b/tests/modules/ucsc/bedgraphtobigwig/test.yml @@ -1,5 +1,5 @@ - name: ucsc bedgraphtobigwig - command: nextflow run ./tests/modules/ucsc/bedgraphtobigwig -entry test_ucsc_bedgraphtobigwig -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bedgraphtobigwig -entry test_ucsc_bedgraphtobigwig -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bedgraphtobigwig/nextflow.config tags: - ucsc/bedgraphtobigwig files: diff --git a/tests/modules/ucsc/bigwigaverageoverbed/main.nf b/tests/modules/ucsc/bigwigaverageoverbed/main.nf index 9bd5a5e2..3b20dc32 100644 --- a/tests/modules/ucsc/bigwigaverageoverbed/main.nf +++ b/tests/modules/ucsc/bigwigaverageoverbed/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_BIGWIGAVERAGEOVERBED } from '../../../../modules/ucsc/bigwigaverageoverbed/main.nf' addParams( options: [:] ) +include { UCSC_BIGWIGAVERAGEOVERBED } from '../../../../modules/ucsc/bigwigaverageoverbed/main.nf' workflow test_ucsc_bigwigaverageoverbed { input = [ diff --git a/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config b/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/bigwigaverageoverbed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/bigwigaverageoverbed/test.yml b/tests/modules/ucsc/bigwigaverageoverbed/test.yml index 641e9be5..7344c944 100644 --- a/tests/modules/ucsc/bigwigaverageoverbed/test.yml +++ b/tests/modules/ucsc/bigwigaverageoverbed/test.yml @@ -1,5 +1,5 @@ - name: ucsc bigwigaverageoverbed test_ucsc_bigwigaverageoverbed - command: nextflow run tests/modules/ucsc/bigwigaverageoverbed -entry test_ucsc_bigwigaverageoverbed -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/bigwigaverageoverbed -entry test_ucsc_bigwigaverageoverbed -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/bigwigaverageoverbed/nextflow.config tags: - ucsc - ucsc/bigwigaverageoverbed diff --git a/tests/modules/ucsc/liftover/main.nf b/tests/modules/ucsc/liftover/main.nf index 9670759a..168193f4 100644 --- a/tests/modules/ucsc/liftover/main.nf +++ b/tests/modules/ucsc/liftover/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_LIFTOVER } from '../../../../modules/ucsc/liftover/main.nf' addParams( options: [:] ) +include { UCSC_LIFTOVER } from '../../../../modules/ucsc/liftover/main.nf' workflow test_ucsc_liftover { diff --git a/tests/modules/ucsc/liftover/nextflow.config b/tests/modules/ucsc/liftover/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/liftover/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/liftover/test.yml b/tests/modules/ucsc/liftover/test.yml index 74df6512..c3016189 100644 --- a/tests/modules/ucsc/liftover/test.yml +++ b/tests/modules/ucsc/liftover/test.yml @@ -1,5 +1,5 @@ - name: ucsc liftover test_ucsc_liftover - command: nextflow run tests/modules/ucsc/liftover -entry test_ucsc_liftover -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/liftover -entry test_ucsc_liftover -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/liftover/nextflow.config tags: - ucsc - ucsc/liftover diff --git a/tests/modules/ucsc/wigtobigwig/main.nf b/tests/modules/ucsc/wigtobigwig/main.nf index 81296ac4..614d4150 100644 --- a/tests/modules/ucsc/wigtobigwig/main.nf +++ b/tests/modules/ucsc/wigtobigwig/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UCSC_WIGTOBIGWIG } from '../../../../modules/ucsc/wigtobigwig/main.nf' addParams( options: [:] ) +include { UCSC_WIGTOBIGWIG } from '../../../../modules/ucsc/wigtobigwig/main.nf' workflow test_ucsc_wigtobigwig { diff --git a/tests/modules/ucsc/wigtobigwig/nextflow.config b/tests/modules/ucsc/wigtobigwig/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/ucsc/wigtobigwig/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/ucsc/wigtobigwig/test.yml b/tests/modules/ucsc/wigtobigwig/test.yml index 15388adb..08d4bce4 100644 --- a/tests/modules/ucsc/wigtobigwig/test.yml +++ b/tests/modules/ucsc/wigtobigwig/test.yml @@ -1,5 +1,5 @@ - name: ucsc wigtobigwig test_ucsc_wigtobigwig - command: nextflow run tests/modules/ucsc/wigtobigwig -entry test_ucsc_wigtobigwig -c tests/config/nextflow.config + command: nextflow run ./tests/modules/ucsc/wigtobigwig -entry test_ucsc_wigtobigwig -c ./tests/config/nextflow.config -c ./tests/modules/ucsc/wigtobigwig/nextflow.config tags: - ucsc - ucsc/wigtobigwig diff --git a/tests/modules/ultra/pipeline/main.nf b/tests/modules/ultra/pipeline/main.nf new file mode 100644 index 00000000..483d48fc --- /dev/null +++ b/tests/modules/ultra/pipeline/main.nf @@ -0,0 +1,22 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' +include { GUNZIP } from '../../../../modules/gunzip/main.nf' +include { GFFREAD } from '../../../../modules/gffread/main.nf' + +workflow test_ultra_pipeline { + + input = [ + [ id:'test', single_end:false ], + file(params.test_data['homo_sapiens']['pacbio']['hifi'], checkIfExists: true) + ] + GUNZIP ( input ) + + gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'] , checkIfExists: true) + genome = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + GFFREAD ( gtf ) + + ULTRA_PIPELINE ( GUNZIP.out.gunzip, genome, GFFREAD.out.gtf ) +} diff --git a/tests/modules/ultra/pipeline/nextflow.config b/tests/modules/ultra/pipeline/nextflow.config new file mode 100644 index 00000000..16ed7f9b --- /dev/null +++ b/tests/modules/ultra/pipeline/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: GFFREAD { + ext.args = '--sort-alpha --keep-genes -T' + ext.prefix = { "${meta.id}_sorted" } + } + +} diff --git a/tests/modules/ultra/pipeline/test.yml b/tests/modules/ultra/pipeline/test.yml new file mode 100644 index 00000000..d424ba73 --- /dev/null +++ b/tests/modules/ultra/pipeline/test.yml @@ -0,0 +1,12 @@ +- name: ultra pipeline test_ultra_pipeline + command: nextflow run ./tests/modules/ultra/pipeline -entry test_ultra_pipeline -c ./tests/config/nextflow.config -c ./tests/modules/ultra/pipeline/nextflow.config + tags: + - ultra + - ultra/pipeline + files: + - path: output/gffread/genome_sorted.gtf + md5sum: c0b034860c679a354cd093109ed90437 + - path: output/gunzip/test_hifi.fastq + md5sum: 20e41c569d5828c1e87337e13a5185d3 + - path: output/ultra/test.sam + md5sum: a37a1f9594a3099522dc1f6a903b2b12 diff --git a/tests/modules/unicycler/main.nf b/tests/modules/unicycler/main.nf index 993310a1..861b139b 100644 --- a/tests/modules/unicycler/main.nf +++ b/tests/modules/unicycler/main.nf @@ -2,11 +2,12 @@ nextflow.enable.dsl = 2 -include { UNICYCLER } from '../../../modules/unicycler/main.nf' addParams( options: [:] ) +include { UNICYCLER } from '../../../modules/unicycler/main.nf' workflow test_unicycler_single_end { input = [ [ id:'test', single_end:true ], // meta map - [ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true) ], + [] ] UNICYCLER ( input ) @@ -14,8 +15,19 @@ workflow test_unicycler_single_end { workflow test_unicycler_paired_end { input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], + [] + ] + + UNICYCLER ( input ) +} + +workflow test_unicycler_shortreads_longreads { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], + [ file(params.test_data['bacteroides_fragilis']['nanopore']['test_fastq_gz'], checkIfExists: true) ] ] UNICYCLER ( input ) diff --git a/tests/modules/unicycler/nextflow.config b/tests/modules/unicycler/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/unicycler/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/unicycler/test.yml b/tests/modules/unicycler/test.yml index f12cc1ba..e25845aa 100644 --- a/tests/modules/unicycler/test.yml +++ b/tests/modules/unicycler/test.yml @@ -1,21 +1,32 @@ -- name: unicycler single-end - command: nextflow run ./tests/modules/unicycler -entry test_unicycler_single_end -c tests/config/nextflow.config +- name: unicycler test_unicycler_single_end + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_single_end -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: - - path: output/unicycler/test.scaffolds.fa - - path: output/unicycler/test.assembly.gfa + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz - path: output/unicycler/test.unicycler.log contains: - "Assembly complete" -- name: unicycler paired-end - command: nextflow run ./tests/modules/unicycler -entry test_unicycler_paired_end -c tests/config/nextflow.config +- name: unicycler test_unicycler_paired_end + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config tags: - unicycler files: - - path: output/unicycler/test.scaffolds.fa - - path: output/unicycler/test.assembly.gfa + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz + - path: output/unicycler/test.unicycler.log + contains: + - "Assembly complete" + +- name: unicycler test_unicycler_shortreads_longreads + command: nextflow run ./tests/modules/unicycler -entry test_unicycler_shortreads_longreads -c ./tests/config/nextflow.config -c ./tests/modules/unicycler/nextflow.config + tags: + - unicycler + files: + - path: output/unicycler/test.assembly.gfa.gz + - path: output/unicycler/test.scaffolds.fa.gz - path: output/unicycler/test.unicycler.log contains: - "Assembly complete" diff --git a/tests/modules/untar/main.nf b/tests/modules/untar/main.nf index b7317bd9..056e3ea7 100644 --- a/tests/modules/untar/main.nf +++ b/tests/modules/untar/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../modules/untar/main.nf' addParams( options: [:] ) +include { UNTAR } from '../../../modules/untar/main.nf' workflow test_untar { input = file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) diff --git a/tests/modules/untar/nextflow.config b/tests/modules/untar/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/untar/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/untar/test.yml b/tests/modules/untar/test.yml index 9f48e86c..6d0d1d12 100644 --- a/tests/modules/untar/test.yml +++ b/tests/modules/untar/test.yml @@ -1,5 +1,5 @@ - name: untar - command: nextflow run ./tests/modules/untar -entry test_untar -c tests/config/nextflow.config + command: nextflow run ./tests/modules/untar -entry test_untar -c ./tests/config/nextflow.config -c ./tests/modules/untar/nextflow.config tags: - untar files: diff --git a/tests/modules/unzip/main.nf b/tests/modules/unzip/main.nf index b5b208be..520fe31e 100644 --- a/tests/modules/unzip/main.nf +++ b/tests/modules/unzip/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../modules/unzip/main.nf' addParams( options: [:] ) +include { UNZIP } from '../../../modules/unzip/main.nf' workflow test_unzip { diff --git a/tests/modules/unzip/nextflow.config b/tests/modules/unzip/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/unzip/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/unzip/test.yml b/tests/modules/unzip/test.yml index 1b0b1a97..8016b4fa 100644 --- a/tests/modules/unzip/test.yml +++ b/tests/modules/unzip/test.yml @@ -1,5 +1,5 @@ - name: unzip - command: nextflow run ./tests/modules/unzip -entry test_unzip -c tests/config/nextflow.config + command: nextflow run ./tests/modules/unzip -entry test_unzip -c ./tests/config/nextflow.config -c ./tests/modules/unzip/nextflow.config tags: - unzip files: diff --git a/tests/modules/variantbam/main.nf b/tests/modules/variantbam/main.nf index 3ea09197..016a9104 100644 --- a/tests/modules/variantbam/main.nf +++ b/tests/modules/variantbam/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { VARIANTBAM } from '../../../modules/variantbam/main.nf' addParams( options: [args: '-m 1'] ) +include { VARIANTBAM } from '../../../modules/variantbam/main.nf' workflow test_variantbam { diff --git a/tests/modules/variantbam/nextflow.config b/tests/modules/variantbam/nextflow.config new file mode 100644 index 00000000..d0314010 --- /dev/null +++ b/tests/modules/variantbam/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: VARIANTBAM { + ext.args = '-m 1' + } + +} diff --git a/tests/modules/variantbam/test.yml b/tests/modules/variantbam/test.yml index 51b824cd..1c9550ed 100644 --- a/tests/modules/variantbam/test.yml +++ b/tests/modules/variantbam/test.yml @@ -1,5 +1,5 @@ - name: variantbam test_variantbam - command: nextflow run tests/modules/variantbam -entry test_variantbam -c tests/config/nextflow.config + command: nextflow run ./tests/modules/variantbam -entry test_variantbam -c ./tests/config/nextflow.config -c ./tests/modules/variantbam/nextflow.config tags: - variantbam files: diff --git a/tests/modules/vcftools/main.nf b/tests/modules/vcftools/main.nf index 2d4997de..21f9aa88 100644 --- a/tests/modules/vcftools/main.nf +++ b/tests/modules/vcftools/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { VCFTOOLS as VCFTOOLS_BASE } from '../../../modules/vcftools/main.nf' addParams( options: ['args': '--freq'] ) -include { VCFTOOLS as VCFTOOLS_OPTIONAL } from '../../../modules/vcftools/main.nf' addParams( options: ['args': '--freq --exclude-bed'] ) +include { VCFTOOLS as VCFTOOLS_BASE } from '../../../modules/vcftools/main.nf' +include { VCFTOOLS as VCFTOOLS_OPTIONAL } from '../../../modules/vcftools/main.nf' workflow test_vcftools_vcf_base { input = [ [ id:'test' ], // meta map diff --git a/tests/modules/vcftools/nextflow.config b/tests/modules/vcftools/nextflow.config new file mode 100644 index 00000000..6865bbea --- /dev/null +++ b/tests/modules/vcftools/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: VCFTOOLS_BASE { + ext.args = '--freq' + } + + withName: VCFTOOLS_OPTIONAL { + ext.args = '--freq --exclude-bed' + } + +} diff --git a/tests/modules/vcftools/test.yml b/tests/modules/vcftools/test.yml index 81529be2..5314ea75 100644 --- a/tests/modules/vcftools/test.yml +++ b/tests/modules/vcftools/test.yml @@ -1,5 +1,5 @@ - name: vcftools test_vcftools_vcf_base - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcf_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcf_base -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -7,7 +7,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcfgz_base - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcfgz_base -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcfgz_base -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -15,7 +15,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcf_optional - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcf_optional -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcf_optional -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: @@ -23,7 +23,7 @@ md5sum: 7f126655f17268fd1a338734f62868e9 - name: vcftools test_vcftools_vcfgz_optional - command: nextflow run tests/modules/vcftools -entry test_vcftools_vcfgz_optional -c tests/config/nextflow.config + command: nextflow run ./tests/modules/vcftools -entry test_vcftools_vcfgz_optional -c ./tests/config/nextflow.config -c ./tests/modules/vcftools/nextflow.config tags: - vcftools files: diff --git a/tests/modules/yara/index/main.nf b/tests/modules/yara/index/main.nf index 35a86182..89eb0f7d 100644 --- a/tests/modules/yara/index/main.nf +++ b/tests/modules/yara/index/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams( options: [publish_dir:'yara'] ) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' workflow test_yara_index { diff --git a/tests/modules/yara/index/nextflow.config b/tests/modules/yara/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/yara/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/yara/index/test.yml b/tests/modules/yara/index/test.yml index de6f1cf6..a8d17866 100644 --- a/tests/modules/yara/index/test.yml +++ b/tests/modules/yara/index/test.yml @@ -1,5 +1,5 @@ - name: yara index test_yara_index - command: nextflow run tests/modules/yara/index -entry test_yara_index -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/index -entry test_yara_index -c ./tests/config/nextflow.config -c ./tests/modules/yara/index/nextflow.config tags: - yara/index - yara diff --git a/tests/modules/yara/mapper/main.nf b/tests/modules/yara/mapper/main.nf index 9cdce40d..18800eb3 100644 --- a/tests/modules/yara/mapper/main.nf +++ b/tests/modules/yara/mapper/main.nf @@ -3,15 +3,18 @@ nextflow.enable.dsl = 2 -include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' addParams(options: ['args': '-e 3']) -include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' addParams(options: ['args': '-e 3']) +include { YARA_INDEX } from '../../../../modules/yara/index/main.nf' +include { YARA_MAPPER } from '../../../../modules/yara/mapper/main.nf' workflow test_yara_single_end { + input = [ + [ id:'test', single_end:true ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - input = [ [ id:'test', single_end:true ], // meta map - file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - YARA_INDEX ( fasta ) YARA_MAPPER ( input, YARA_INDEX.out.index ) @@ -19,12 +22,15 @@ workflow test_yara_single_end { workflow test_yara_paired_end { + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - input = [ [ id:'test', single_end:false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - YARA_INDEX ( fasta ) YARA_MAPPER ( input, YARA_INDEX.out.index ) } diff --git a/tests/modules/yara/mapper/nextflow.config b/tests/modules/yara/mapper/nextflow.config new file mode 100644 index 00000000..a626a8fc --- /dev/null +++ b/tests/modules/yara/mapper/nextflow.config @@ -0,0 +1,13 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: YARA_INDEX { + ext.args = '-e 3' + } + + withName: YARA_MAPPER { + ext.args = '-e 3' + } + +} diff --git a/tests/modules/yara/mapper/test.yml b/tests/modules/yara/mapper/test.yml index 51b056df..186f70b4 100644 --- a/tests/modules/yara/mapper/test.yml +++ b/tests/modules/yara/mapper/test.yml @@ -1,68 +1,68 @@ - name: yara mapper test_yara_single_end - command: nextflow run tests/modules/yara/mapper -entry test_yara_single_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/mapper -entry test_yara_single_end -c ./tests/config/nextflow.config -c ./tests/modules/yara/mapper/nextflow.config tags: - yara/mapper - yara files: - path: output/yara/test.mapped.bam - - path: output/index/yara/yara.txt.size + - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - - path: output/index/yara/yara.lf.drs + - path: output/yara/yara/yara.lf.drs md5sum: 55a54008ad1ba589aa210d2629c1df41 - - path: output/index/yara/yara.lf.pst + - path: output/yara/yara/yara.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/yara/yara.sa.len + - path: output/yara/yara/yara.sa.len md5sum: 45677f66c28c79c02250ceb8b58645e8 - - path: output/index/yara/yara.rid.concat + - path: output/yara/yara/yara.rid.concat md5sum: 1e4e4c88ddeaf907a12f02f0d88367c5 - - path: output/index/yara/yara.txt.concat + - path: output/yara/yara/yara.txt.concat md5sum: 6074d1933c9e7e5ab05fa0def5ce28c0 - - path: output/index/yara/yara.sa.val + - path: output/yara/yara/yara.sa.val md5sum: ce57cc82e2d3ae7b9824210f54168ce9 - - path: output/index/yara/yara.sa.ind + - path: output/yara/yara/yara.sa.ind md5sum: 464314583efb5f07260b0efecc29a1ce - - path: output/index/yara/yara.rid.limits + - path: output/yara/yara/yara.rid.limits md5sum: 8b814661f30a0c9e350bfbcb454930ce - - path: output/index/yara/yara.lf.drp + - path: output/yara/yara/yara.lf.drp md5sum: 3ef99a87a4e44513f46d42f4261f7842 - - path: output/index/yara/yara.txt.limits + - path: output/yara/yara/yara.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 - - path: output/index/yara/yara.lf.drv + - path: output/yara/yara/yara.lf.drv md5sum: cf6408307fe9fd7f99c33f521bf95550 - - path: output/index/yara/yara.fasta + - path: output/yara/yara/yara.fasta md5sum: 6e9fe4042a72f2345f644f239272b7e6 - name: yara mapper test_yara_paired_end - command: nextflow run tests/modules/yara/mapper -entry test_yara_paired_end -c tests/config/nextflow.config + command: nextflow run ./tests/modules/yara/mapper -entry test_yara_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/yara/mapper/nextflow.config tags: - yara/mapper - yara files: - path: output/yara/test_2.mapped.bam - path: output/yara/test_1.mapped.bam - - path: output/index/yara/yara.txt.size + - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - - path: output/index/yara/yara.lf.drs + - path: output/yara/yara/yara.lf.drs md5sum: 55a54008ad1ba589aa210d2629c1df41 - - path: output/index/yara/yara.lf.pst + - path: output/yara/yara/yara.lf.pst md5sum: e8daba34298e99e42942435286f9b3f0 - - path: output/index/yara/yara.sa.len + - path: output/yara/yara/yara.sa.len md5sum: 45677f66c28c79c02250ceb8b58645e8 - - path: output/index/yara/yara.rid.concat + - path: output/yara/yara/yara.rid.concat md5sum: 1e4e4c88ddeaf907a12f02f0d88367c5 - - path: output/index/yara/yara.txt.concat + - path: output/yara/yara/yara.txt.concat md5sum: 6074d1933c9e7e5ab05fa0def5ce28c0 - - path: output/index/yara/yara.sa.val + - path: output/yara/yara/yara.sa.val md5sum: ce57cc82e2d3ae7b9824210f54168ce9 - - path: output/index/yara/yara.sa.ind + - path: output/yara/yara/yara.sa.ind md5sum: 464314583efb5f07260b0efecc29a1ce - - path: output/index/yara/yara.rid.limits + - path: output/yara/yara/yara.rid.limits md5sum: 8b814661f30a0c9e350bfbcb454930ce - - path: output/index/yara/yara.lf.drp + - path: output/yara/yara/yara.lf.drp md5sum: 3ef99a87a4e44513f46d42f4261f7842 - - path: output/index/yara/yara.txt.limits + - path: output/yara/yara/yara.txt.limits md5sum: 4480a068db603e4c9a27bc4fa9ceaf14 - - path: output/index/yara/yara.lf.drv + - path: output/yara/yara/yara.lf.drv md5sum: cf6408307fe9fd7f99c33f521bf95550 - - path: output/index/yara/yara.fasta + - path: output/yara/yara/yara.fasta md5sum: 6e9fe4042a72f2345f644f239272b7e6 diff --git a/tests/subworkflows/nf-core/align_bowtie2/test.yml b/tests/subworkflows/nf-core/align_bowtie2/test.yml index 51261a14..beea38c2 100644 --- a/tests/subworkflows/nf-core/align_bowtie2/test.yml +++ b/tests/subworkflows/nf-core/align_bowtie2/test.yml @@ -1,18 +1,19 @@ - name: align bowtie2 single-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_single_end -c tests/config/nextflow.config tags: - - subworkflows/align_bowtie2 - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/align_bowtie2 + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules - - bowtie2 - - bowtie2/align - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - bowtie2 + # - bowtie2/align + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log @@ -37,23 +38,24 @@ - path: ./output/samtools/test.sorted.bam.idxstats md5sum: e16eb632f7f462514b0873c7ac8ac905 - path: ./output/samtools/test.sorted.bam.stats - md5sum: 2d837cd72432cd856fca70d33f02ffb5 + md5sum: d9eb909c2cde69d6ae83999a72d770d7 - name: align bowtie2 paired-end command: nextflow run ./tests/subworkflows/nf-core/align_bowtie2 -entry test_align_bowtie2_paired_end -c tests/config/nextflow.config tags: - - subworkflows/align_bowtie2 - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/align_bowtie2 + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules - - bowtie2 - - bowtie2/align - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - bowtie2 + # - bowtie2/align + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/bowtie2/test.bam - path: ./output/bowtie2/test.bowtie2.log @@ -78,4 +80,4 @@ - path: ./output/samtools/test.sorted.bam.idxstats md5sum: 29ff2fa56d35b2a47625b8f517f1a947 - path: ./output/samtools/test.sorted.bam.stats - md5sum: 98aa88a39d26244c89bd4e577953fb48 + md5sum: d0c7a1a4fbd2c1aed437ca419a9e344f diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf b/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf new file mode 100644 index 00000000..0f00c62e --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ANNOTATION_ENSEMBLVEP } from '../../../../subworkflows/nf-core/annotation_ensemblvep/main' + +workflow annotation_ensemblvep { + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + + ANNOTATION_ENSEMBLVEP ( input, "WBcel235", "caenorhabditis_elegans", "104", [] ) +} diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config b/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config new file mode 100644 index 00000000..4e8d2990 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config @@ -0,0 +1,14 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ENSEMBLVEP { + container = 'nfcore/vep:104.3.WBcel235' + publishDir = [ enabled: false ] + } + + withName: ANNOTATION_BGZIPTABIX { + ext.prefix = { "${meta.id}_VEP.ann.vcf" } + } + +} diff --git a/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml b/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml new file mode 100644 index 00000000..706d9d05 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_ensemblvep/test.yml @@ -0,0 +1,7 @@ +- name: ensemblvep annotation_ensemblvep + command: nextflow run ./tests/subworkflows/nf-core/annotation_ensemblvep -entry annotation_ensemblvep -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/annotation_ensemblvep/nextflow.config + tags: + - annotation_ensemblvep + files: + - path: output/annotation/test_VEP.ann.vcf.gz + - path: output/annotation/test_VEP.ann.vcf.gz.tbi diff --git a/tests/subworkflows/nf-core/annotation_snpeff/main.nf b/tests/subworkflows/nf-core/annotation_snpeff/main.nf new file mode 100644 index 00000000..c80197ee --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ANNOTATION_SNPEFF } from '../../../../subworkflows/nf-core/annotation_snpeff/main' + +workflow annotation_snpeff { + input = [ + [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + + ANNOTATION_SNPEFF ( input, "WBcel235.99", [] ) +} diff --git a/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config b/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config new file mode 100644 index 00000000..be76cb4a --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/nextflow.config @@ -0,0 +1,14 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SNPEFF { + container = 'nfcore/snpeff:5.0.WBcel235' + publishDir = [ enabled: false ] + } + + withName: ANNOTATION_BGZIPTABIX { + ext.prefix = { "${meta.id}_snpEff.ann.vcf" } + } + +} diff --git a/tests/subworkflows/nf-core/annotation_snpeff/test.yml b/tests/subworkflows/nf-core/annotation_snpeff/test.yml new file mode 100644 index 00000000..943b24e9 --- /dev/null +++ b/tests/subworkflows/nf-core/annotation_snpeff/test.yml @@ -0,0 +1,7 @@ +- name: snpeff annotation_snpeff + command: nextflow run ./tests/subworkflows/nf-core/annotation_snpeff -entry annotation_snpeff -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/annotation_snpeff/nextflow.config + tags: + - annotation_snpeff + files: + - path: output/annotation/test_snpEff.ann.vcf.gz + - path: output/annotation/test_snpEff.ann.vcf.gz.tbi diff --git a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml index e2fc27d8..7dc73c80 100644 --- a/tests/subworkflows/nf-core/bam_sort_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_sort_samtools/test.yml @@ -1,18 +1,19 @@ - name: bam sort samtools single-end command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_single_end -c tests/config/nextflow.config tags: - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam - md5sum: e4c77897d6824ce4df486d1b100618af + md5sum: 8b56bb7d26ced04112f712250d915aaa - path: ./output/samtools/test.sorted.bam.bai md5sum: a70940ce9ba2e700ec2984e0a6526099 # samtools stats @@ -25,18 +26,19 @@ - name: bam sort samtools paired-end command: nextflow run ./tests/subworkflows/nf-core/bam_sort_samtools -entry test_bam_sort_samtools_paired_end -c tests/config/nextflow.config tags: - - subworkflows/bam_sort_samtools - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_sort_samtools + # - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/index - - samtools/sort - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/index + # - samtools/sort + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.sorted.bam - md5sum: bbb2db225f140e69a4ac577f74ccc90f + md5sum: 4adc495469724a375d5e1a9f3485e38d - path: ./output/samtools/test.sorted.bam.bai md5sum: 20c91e3a0fd4661d7cb967f40d2486ba # samtools stats diff --git a/tests/subworkflows/nf-core/bam_stats_samtools/test.yml b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml index d93c95a5..2b2e45d1 100644 --- a/tests/subworkflows/nf-core/bam_stats_samtools/test.yml +++ b/tests/subworkflows/nf-core/bam_stats_samtools/test.yml @@ -1,12 +1,13 @@ - name: bam stats samtools single-end command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_single_end -c tests/config/nextflow.config tags: - - subworkflows/bam_stats_samtools + - subworkflows + # - subworkflows/bam_stats_samtools # Modules - - samtools - - samtools/stats - - samtools/idxstats - - samtools/flagstat + # - samtools + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.single_end.sorted.bam.flagstat md5sum: 2191911d72575a2358b08b1df64ccb53 @@ -17,12 +18,13 @@ - name: bam stats samtools paired-end command: nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools_paired_end -c tests/config/nextflow.config tags: - - subworkflows/bam_stats_samtools - # Modules - - samtools - - samtools/stats - - samtools/idxstats - - samtools/flagstat + - subworkflows + # - subworkflows/bam_stats_samtools + # # Modules + # - samtools + # - samtools/stats + # - samtools/idxstats + # - samtools/flagstat files: - path: ./output/samtools/test.paired_end.sorted.bam.flagstat md5sum: 4f7ffd1e6a5e85524d443209ac97d783 diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf new file mode 100644 index 00000000..6b02bbc8 --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CREATE_UMI_CONSENSUS } from '../../../../subworkflows/nf-core/fgbio_create_umi_consensus/main' + +workflow test_fgbio_create_umi_consensus_mem1 { + reads = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + read_structure = "+T 12M11S+T" + + CREATE_UMI_CONSENSUS( reads, fasta, read_structure, "Adjacency", "bwa-mem" ) +} + +workflow test_fgbio_create_umi_consensus_mem2 { + reads = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) + ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + read_structure = "+T 12M11S+T" + + CREATE_UMI_CONSENSUS( reads, fasta, read_structure, "Adjacency", "bwa-mem2" ) +} diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config new file mode 100644 index 00000000..a55a4213 --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config @@ -0,0 +1,31 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + + withName: BWA_MEM { + ext.args = '-p -C -M' + } + + withName: BWAMEM2_MEM { + ext.args = '-p -C -M' + } + + withName: FGBIO_CALLMOLECULARCONSENSUSREADS { + ext.args = '-M 1 -S Coordinate' + ext.prefix = { "${meta.id}_umiconsensus" } + } + + withName: SAMTOOLS_BAM2FQ { + ext.args = '-T RX' + } + + withName: SAMBLASTER { + ext.args = '-M --addMateTags' + ext.prefix = { "${meta.id}_processed" } + } + +} diff --git a/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml new file mode 100644 index 00000000..2db70d3f --- /dev/null +++ b/tests/subworkflows/nf-core/fgbio_create_umi_consensus/test.yml @@ -0,0 +1,22 @@ +- name: fgbio_create_umi_consensus_bwamem1 + command: nextflow run ./tests/subworkflows/nf-core/fgbio_create_umi_consensus -entry test_fgbio_create_umi_consensus_mem1 -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config + tags: + - subworkflows/fgbio_create_umi_consensus + files: + - path: ./output/fastqtobam/test_umi_converted.bam + md5sum: 9510735554e5eff29244077a72075fb6 + - path: ./output/groupreadsbyumi/test_umi-grouped.bam + md5sum: 44f31da850d5a8100b43b629426f2e17 + - path: ./output/callumiconsensus/test_umiconsensus.bam + md5sum: 24b48e3543de0ae7e8a95c116d5ca6a6 +- name: fgbio_create_umi_consensus_bwamem2 + command: nextflow run ./tests/subworkflows/nf-core/fgbio_create_umi_consensus -entry test_fgbio_create_umi_consensus_mem2 -c ./tests/config/nextflow.config -c ./tests/subworkflows/nf-core/fgbio_create_umi_consensus/nextflow.config + tags: + - subworkflows/fgbio_create_umi_consensus_bwamem2 + files: + - path: ./output/fastqtobam/test_umi_converted.bam + md5sum: 9510735554e5eff29244077a72075fb6 + - path: ./output/groupreadsbyumi/test_umi-grouped.bam + md5sum: c69333155038b9a968fd096627d4dfb0 + - path: ./output/callumiconsensus/test_umiconsensus.bam + md5sum: 24b48e3543de0ae7e8a95c116d5ca6a6 diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf new file mode 100644 index 00000000..42427a1f --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK_CREATE_SOM_PON } from '../../../../subworkflows/nf-core/gatk_create_som_pon/main' addParams( [:] ) + +workflow test_gatk_create_som_pon { + ch_mutect2_in = [ + [[ id:'test1' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ], + [[ id:'test2' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] ] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + pon_name = "test_panel" + interval_file = file(params.test_data['homo_sapiens']['genome']['genome_interval_list'], checkIfExists: true) + + GATK_CREATE_SOM_PON ( ch_mutect2_in, fasta, fai, dict, pon_name, interval_file ) +} diff --git a/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml new file mode 100644 index 00000000..63cf64f8 --- /dev/null +++ b/tests/subworkflows/nf-core/gatk_create_som_pon/test.yml @@ -0,0 +1,37 @@ +- name: gatk_create_som_pon + command: nextflow run ./tests/subworkflows/nf-core/gatk_create_som_pon -entry test_gatk_create_som_pon -c tests/config/nextflow.config + tags: + - subworkflows + # - subworkflows/gatk_create_som_pon + # - gatk4 + # Modules + # - gatk4/genomicsdbimport + # - gatk4/createsomaticpanelofnormals + files: + # gatk4 mutect2 + - path: output/gatk4/test1.vcf.gz + - path: output/gatk4/test1.vcf.gz.stats + md5sum: 4f77301a125913170b8e9e7828b4ca3f + - path: output/gatk4/test1.vcf.gz.tbi + - path: output/gatk4/test2.vcf.gz + - path: output/gatk4/test2.vcf.gz.stats + md5sum: 106c5828b02b906c97922618b6072169 + - path: output/gatk4/test2.vcf.gz.tbi + # gatk4 genomicsdbimport + - path: output/gatk4/test_panel/__tiledb_workspace.tdb + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_panel/callset.json + md5sum: 2ab411773b7267de61f8c04939de2a99 + - path: output/gatk4/test_panel/chr22$1$40001/.__consolidation_lock + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/gatk4/test_panel/chr22$1$40001/__array_schema.tdb + - path: output/gatk4/test_panel/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json + md5sum: 2502f79658bc000578ebcfddfc1194c0 + - path: output/gatk4/test_panel/vcfheader.vcf + contains: + - "FORMAT=