From 51bacb00e6e8d97a7f5c1b79a8076bbea59265ed Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Wed, 5 Oct 2022 03:30:53 +0000 Subject: [PATCH 1/5] Template update for nf-core/tools version 2.6 --- .github/workflows/awsfulltest.yml | 4 ++ .github/workflows/awstest.yml | 4 ++ .prettierignore | 1 + CITATION.cff | 8 +-- assets/adaptivecard.json | 67 +++++++++++++++++++ assets/methods_description_template.yml | 25 +++++++ assets/multiqc_config.yml | 6 +- docs/usage.md | 8 +++ lib/NfcoreTemplate.groovy | 55 +++++++++++++++ lib/Utils.groovy | 21 ++++-- lib/WorkflowTaxprofiler.groovy | 19 ++++++ main.nf | 3 +- modules.json | 27 ++++---- .../custom/dumpsoftwareversions/main.nf | 8 +-- .../custom/dumpsoftwareversions/meta.yml | 0 .../templates/dumpsoftwareversions.py | 0 modules/nf-core/{modules => }/fastqc/main.nf | 12 ++++ modules/nf-core/{modules => }/fastqc/meta.yml | 0 modules/nf-core/modules/multiqc/main.nf | 31 --------- modules/nf-core/multiqc/main.nf | 53 +++++++++++++++ .../nf-core/{modules => }/multiqc/meta.yml | 15 +++++ nextflow.config | 5 +- nextflow_schema.json | 18 +++++ workflows/taxprofiler.nf | 26 ++++--- 24 files changed, 345 insertions(+), 71 deletions(-) create mode 100644 assets/adaptivecard.json create mode 100644 assets/methods_description_template.yml mode change 100755 => 100644 lib/Utils.groovy rename modules/nf-core/{modules => }/custom/dumpsoftwareversions/main.nf (79%) rename modules/nf-core/{modules => }/custom/dumpsoftwareversions/meta.yml (100%) rename modules/nf-core/{modules => }/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py (100%) rename modules/nf-core/{modules => }/fastqc/main.nf (85%) rename modules/nf-core/{modules => }/fastqc/meta.yml (100%) delete mode 100644 modules/nf-core/modules/multiqc/main.nf create mode 100644 modules/nf-core/multiqc/main.nf rename modules/nf-core/{modules => }/multiqc/meta.yml (73%) diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml index bcbb910..fe81d27 100644 --- a/.github/workflows/awsfulltest.yml +++ b/.github/workflows/awsfulltest.yml @@ -28,3 +28,7 @@ jobs: "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/taxprofiler/results-${{ github.sha }}" } profiles: test_full,aws_tower + - uses: actions/upload-artifact@v3 + with: + name: Tower debug log file + path: tower_action_*.log diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index 5d35eea..a580097 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -23,3 +23,7 @@ jobs: "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/taxprofiler/results-test-${{ github.sha }}" } profiles: test,aws_tower + - uses: actions/upload-artifact@v3 + with: + name: Tower debug log file + path: tower_action_*.log diff --git a/.prettierignore b/.prettierignore index d0e7ae5..eb74a57 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,4 +1,5 @@ email_template.html +adaptivecard.json .nextflow* work/ data/ diff --git a/CITATION.cff b/CITATION.cff index 4533e2f..017666c 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -13,8 +13,8 @@ authors: given-names: Johannes - family-names: Wilm given-names: Andreas - - family-names: Ulysse Garcia - given-names: Maxime + - family-names: Garcia + given-names: Maxime Ulysse - family-names: Di Tommaso given-names: Paolo - family-names: Nahnsen @@ -39,8 +39,8 @@ prefered-citation: given-names: Johannes - family-names: Wilm given-names: Andreas - - family-names: Ulysse Garcia - given-names: Maxime + - family-names: Garcia + given-names: Maxime Ulysse - family-names: Di Tommaso given-names: Paolo - family-names: Nahnsen diff --git a/assets/adaptivecard.json b/assets/adaptivecard.json new file mode 100644 index 0000000..7880ea8 --- /dev/null +++ b/assets/adaptivecard.json @@ -0,0 +1,67 @@ +{ + "type": "message", + "attachments": [ + { + "contentType": "application/vnd.microsoft.card.adaptive", + "contentUrl": null, + "content": { + "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", + "msteams": { + "width": "Full" + }, + "type": "AdaptiveCard", + "version": "1.2", + "body": [ + { + "type": "TextBlock", + "size": "Large", + "weight": "Bolder", + "color": "<% if (success) { %>Good<% } else { %>Attention<%} %>", + "text": "nf-core/taxprofiler v${version} - ${runName}", + "wrap": true + }, + { + "type": "TextBlock", + "spacing": "None", + "text": "Completed at ${dateComplete} (duration: ${duration})", + "isSubtle": true, + "wrap": true + }, + { + "type": "TextBlock", + "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors. The full error message was: ${errorReport}.<% } %>", + "wrap": true + }, + { + "type": "TextBlock", + "text": "The command used to launch the workflow was as follows:", + "wrap": true + }, + { + "type": "TextBlock", + "text": "${commandLine}", + "isSubtle": true, + "wrap": true + } + ], + "actions": [ + { + "type": "Action.ShowCard", + "title": "Pipeline Configuration", + "card": { + "type": "AdaptiveCard", + "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", + "body": [ + { + "type": "FactSet", + "facts": [<% out << summary.collect{ k,v -> "{\"title\": \"$k\", \"value\" : \"$v\"}"}.join(",\n") %> + ] + } + ] + } + } + ] + } + } + ] +} diff --git a/assets/methods_description_template.yml b/assets/methods_description_template.yml new file mode 100644 index 0000000..e4c292f --- /dev/null +++ b/assets/methods_description_template.yml @@ -0,0 +1,25 @@ +id: "nf-core-taxprofiler-methods-description" +description: "Suggested text and references to use when describing pipeline usage within the methods section of a publication." +section_name: "nf-core/taxprofiler Methods Description" +section_href: "https://github.com/nf-core/taxprofiler" +plot_type: "html" +## TODO nf-core: Update the HTML below to your prefered methods description, e.g. add publication citation for this pipeline +## You inject any metadata in the Nextflow '${workflow}' object +data: | +

Methods

+

Data was processed using nf-core/taxprofiler v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (Ewels et al., 2020).

+

The pipeline was executed with Nextflow v${workflow.nextflow.version} (Di Tommaso et al., 2017) with the following command:

+
${workflow.commandLine}
+

References

+ +
+
Notes:
+ +
diff --git a/assets/multiqc_config.yml b/assets/multiqc_config.yml index b0a0b1c..1db0023 100644 --- a/assets/multiqc_config.yml +++ b/assets/multiqc_config.yml @@ -3,9 +3,11 @@ report_comment: > analysis pipeline. For information about how to interpret these results, please see the documentation. report_section_order: - software_versions: + "nf-core-taxprofiler-methods-description": order: -1000 - "nf-core-taxprofiler-summary": + software_versions: order: -1001 + "nf-core-taxprofiler-summary": + order: -1002 export_plots: true diff --git a/docs/usage.md b/docs/usage.md index f32cbfe..74d3cce 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -237,6 +237,14 @@ See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs). +## Azure Resource Requests + +To be used with the `azurebatch` profile by specifying the `-profile azurebatch`. +We recommend providing a compute `params.vm_type` of `Standard_D16_v3` VMs by default but these options can be changed if required. + +Note that the choice of VM size depends on your quota and the overall workload during the analysis. +For a thorough list, please refer the [Azure Sizes for virtual machines in Azure](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes). + ## Running in the background Nextflow handles job submissions and supervises the running jobs. The Nextflow process must run until the pipeline is finished. diff --git a/lib/NfcoreTemplate.groovy b/lib/NfcoreTemplate.groovy index 2fc0a9b..27feb00 100755 --- a/lib/NfcoreTemplate.groovy +++ b/lib/NfcoreTemplate.groovy @@ -145,6 +145,61 @@ class NfcoreTemplate { output_tf.withWriter { w -> w << email_txt } } + // + // Construct and send adaptive card + // https://adaptivecards.io + // + public static void adaptivecard(workflow, params, summary_params, projectDir, log) { + def hook_url = params.hook_url + + def summary = [:] + for (group in summary_params.keySet()) { + summary << summary_params[group] + } + + def misc_fields = [:] + misc_fields['start'] = workflow.start + misc_fields['complete'] = workflow.complete + misc_fields['scriptfile'] = workflow.scriptFile + misc_fields['scriptid'] = workflow.scriptId + if (workflow.repository) misc_fields['repository'] = workflow.repository + if (workflow.commitId) misc_fields['commitid'] = workflow.commitId + if (workflow.revision) misc_fields['revision'] = workflow.revision + misc_fields['nxf_version'] = workflow.nextflow.version + misc_fields['nxf_build'] = workflow.nextflow.build + misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp + + def msg_fields = [:] + msg_fields['version'] = workflow.manifest.version + msg_fields['runName'] = workflow.runName + msg_fields['success'] = workflow.success + msg_fields['dateComplete'] = workflow.complete + msg_fields['duration'] = workflow.duration + msg_fields['exitStatus'] = workflow.exitStatus + msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None') + msg_fields['errorReport'] = (workflow.errorReport ?: 'None') + msg_fields['commandLine'] = workflow.commandLine + msg_fields['projectDir'] = workflow.projectDir + msg_fields['summary'] = summary << misc_fields + + // Render the JSON template + def engine = new groovy.text.GStringTemplateEngine() + def hf = new File("$projectDir/assets/adaptivecard.json") + def json_template = engine.createTemplate(hf).make(msg_fields) + def json_message = json_template.toString() + + // POST + def post = new URL(hook_url).openConnection(); + post.setRequestMethod("POST") + post.setDoOutput(true) + post.setRequestProperty("Content-Type", "application/json") + post.getOutputStream().write(json_message.getBytes("UTF-8")); + def postRC = post.getResponseCode(); + if (! postRC.equals(200)) { + log.warn(post.getErrorStream().getText()); + } + } + // // Print pipeline summary on completion // diff --git a/lib/Utils.groovy b/lib/Utils.groovy old mode 100755 new mode 100644 index 28567bd..8d030f4 --- a/lib/Utils.groovy +++ b/lib/Utils.groovy @@ -21,19 +21,26 @@ class Utils { } // Check that all channels are present - def required_channels = ['conda-forge', 'bioconda', 'defaults'] - def conda_check_failed = !required_channels.every { ch -> ch in channels } + // This channel list is ordered by required channel priority. + def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] + def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean // Check that they are in the right order - conda_check_failed |= !(channels.indexOf('conda-forge') < channels.indexOf('bioconda')) - conda_check_failed |= !(channels.indexOf('bioconda') < channels.indexOf('defaults')) + def channel_priority_violation = false + def n = required_channels_in_order.size() + for (int i = 0; i < n - 1; i++) { + channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) + } - if (conda_check_failed) { + if (channels_missing | channel_priority_violation) { log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + " There is a problem with your Conda configuration!\n\n" + " You will need to set-up the conda-forge and bioconda channels correctly.\n" + - " Please refer to https://bioconda.github.io/user/install.html#set-up-channels\n" + - " NB: The order of the channels matters!\n" + + " Please refer to https://bioconda.github.io/\n" + + " The observed channel order is \n" + + " ${channels}\n" + + " but the following channel order is required:\n" + + " ${required_channels_in_order}\n" + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" } } diff --git a/lib/WorkflowTaxprofiler.groovy b/lib/WorkflowTaxprofiler.groovy index c82ade9..bbe7c2b 100755 --- a/lib/WorkflowTaxprofiler.groovy +++ b/lib/WorkflowTaxprofiler.groovy @@ -2,6 +2,8 @@ // This file holds several functions specific to the workflow/taxprofiler.nf in the nf-core/taxprofiler pipeline // +import groovy.text.SimpleTemplateEngine + class WorkflowTaxprofiler { // @@ -42,6 +44,23 @@ class WorkflowTaxprofiler { yaml_file_text += "data: |\n" yaml_file_text += "${summary_section}" return yaml_file_text + } + + public static String methodsDescriptionText(run_workflow, mqc_methods_yaml) { + // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file + def meta = [:] + meta.workflow = run_workflow.toMap() + meta["manifest_map"] = run_workflow.manifest.toMap() + + meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" + meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " + + def methods_text = mqc_methods_yaml.text + + def engine = new SimpleTemplateEngine() + def description_html = engine.createTemplate(methods_text).make(meta) + + return description_html }// // Exit pipeline if incorrect --genome key provided // diff --git a/main.nf b/main.nf index 0e8e9b8..ff1c4ec 100644 --- a/main.nf +++ b/main.nf @@ -4,7 +4,8 @@ nf-core/taxprofiler ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Github : https://github.com/nf-core/taxprofiler -Website: https://nf-co.re/taxprofiler + + Website: https://nf-co.re/taxprofiler Slack : https://nfcore.slack.com/channels/taxprofiler ---------------------------------------------------------------------------------------- */ diff --git a/modules.json b/modules.json index 82a2af9..bcea8f6 100644 --- a/modules.json +++ b/modules.json @@ -2,20 +2,21 @@ "name": "nf-core/taxprofiler", "homePage": "https://github.com/nf-core/taxprofiler", "repos": { - "nf-core/modules": { - "git_url": "https://github.com/nf-core/modules.git", + "https://github.com/nf-core/modules.git": { "modules": { - "custom/dumpsoftwareversions": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", - "branch": "master" - }, - "fastqc": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", - "branch": "master" - }, - "multiqc": { - "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d", - "branch": "master" + "nf-core": { + "custom/dumpsoftwareversions": { + "branch": "master", + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + }, + "fastqc": { + "branch": "master", + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + }, + "multiqc": { + "branch": "master", + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + } } } } diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf similarity index 79% rename from modules/nf-core/modules/custom/dumpsoftwareversions/main.nf rename to modules/nf-core/custom/dumpsoftwareversions/main.nf index 327d510..cebb6e0 100644 --- a/modules/nf-core/modules/custom/dumpsoftwareversions/main.nf +++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -1,11 +1,11 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { - label 'process_low' + label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda (params.enable_conda ? "bioconda::multiqc=1.11" : null) + conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.11--pyhdfd78af_0' : - 'quay.io/biocontainers/multiqc:1.11--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" input: path versions diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml b/modules/nf-core/custom/dumpsoftwareversions/meta.yml similarity index 100% rename from modules/nf-core/modules/custom/dumpsoftwareversions/meta.yml rename to modules/nf-core/custom/dumpsoftwareversions/meta.yml diff --git a/modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py similarity index 100% rename from modules/nf-core/modules/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py rename to modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py diff --git a/modules/nf-core/modules/fastqc/main.nf b/modules/nf-core/fastqc/main.nf similarity index 85% rename from modules/nf-core/modules/fastqc/main.nf rename to modules/nf-core/fastqc/main.nf index ed6b8c5..0573036 100644 --- a/modules/nf-core/modules/fastqc/main.nf +++ b/modules/nf-core/fastqc/main.nf @@ -44,4 +44,16 @@ process FASTQC { END_VERSIONS """ } + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.html + touch ${prefix}.zip + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS + """ } diff --git a/modules/nf-core/modules/fastqc/meta.yml b/modules/nf-core/fastqc/meta.yml similarity index 100% rename from modules/nf-core/modules/fastqc/meta.yml rename to modules/nf-core/fastqc/meta.yml diff --git a/modules/nf-core/modules/multiqc/main.nf b/modules/nf-core/modules/multiqc/main.nf deleted file mode 100644 index 1264aac..0000000 --- a/modules/nf-core/modules/multiqc/main.nf +++ /dev/null @@ -1,31 +0,0 @@ -process MULTIQC { - label 'process_medium' - - conda (params.enable_conda ? 'bioconda::multiqc=1.12' : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.12--pyhdfd78af_0' : - 'quay.io/biocontainers/multiqc:1.12--pyhdfd78af_0' }" - - input: - path multiqc_files - - output: - path "*multiqc_report.html", emit: report - path "*_data" , emit: data - path "*_plots" , optional:true, emit: plots - path "versions.yml" , emit: versions - - when: - task.ext.when == null || task.ext.when - - script: - def args = task.ext.args ?: '' - """ - multiqc -f $args . - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) - END_VERSIONS - """ -} diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf new file mode 100644 index 0000000..a8159a5 --- /dev/null +++ b/modules/nf-core/multiqc/main.nf @@ -0,0 +1,53 @@ +process MULTIQC { + label 'process_single' + + conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" + + input: + path multiqc_files, stageAs: "?/*" + path(multiqc_config) + path(extra_multiqc_config) + path(multiqc_logo) + + output: + path "*multiqc_report.html", emit: report + path "*_data" , emit: data + path "*_plots" , optional:true, emit: plots + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def config = multiqc_config ? "--config $multiqc_config" : '' + def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' + """ + multiqc \\ + --force \\ + $args \\ + $config \\ + $extra_config \\ + . + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS + """ + + stub: + """ + touch multiqc_data + touch multiqc_plots + touch multiqc_report.html + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS + """ +} diff --git a/modules/nf-core/modules/multiqc/meta.yml b/modules/nf-core/multiqc/meta.yml similarity index 73% rename from modules/nf-core/modules/multiqc/meta.yml rename to modules/nf-core/multiqc/meta.yml index 6fa891e..ebc29b2 100644 --- a/modules/nf-core/modules/multiqc/meta.yml +++ b/modules/nf-core/multiqc/meta.yml @@ -12,11 +12,25 @@ tools: homepage: https://multiqc.info/ documentation: https://multiqc.info/docs/ licence: ["GPL-3.0-or-later"] + input: - multiqc_files: type: file description: | List of reports / files recognised by MultiQC, for example the html and zip output of FastQC + - multiqc_config: + type: file + description: Optional config yml for MultiQC + pattern: "*.{yml,yaml}" + - extra_multiqc_config: + type: file + description: Second optional config yml for MultiQC. Will override common sections in multiqc_config. + pattern: "*.{yml,yaml}" + - multiqc_logo: + type: file + description: Optional logo file for MultiQC + pattern: "*.{png}" + output: - report: type: file @@ -38,3 +52,4 @@ authors: - "@abhi18av" - "@bunop" - "@drpatelh" + - "@jfy133" diff --git a/nextflow.config b/nextflow.config index 7acbbc0..4afe2f5 100644 --- a/nextflow.config +++ b/nextflow.config @@ -21,7 +21,9 @@ params { // MultiQC options multiqc_config = null multiqc_title = null + multiqc_logo = null max_multiqc_email_size = '25.MB' + multiqc_methods_description = null // Boilerplate options outdir = null @@ -31,6 +33,7 @@ params { email_on_fail = null plaintext_email = false monochrome_logs = false + hook_url = null help = false validate_params = true show_hidden_params = false @@ -74,7 +77,6 @@ try { // } - profiles { debug { process.beforeScript = 'echo $HOSTNAME' } conda { @@ -189,6 +191,7 @@ manifest { mainScript = 'main.nf' nextflowVersion = '!>=21.10.3' version = '1.0dev' + doi = '' } // Load modules.config for DSL2 module specific options diff --git a/nextflow_schema.json b/nextflow_schema.json index a8289a8..da76c3f 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -213,12 +213,30 @@ "fa_icon": "fas fa-palette", "hidden": true }, + "hook_url": { + "type": "string", + "description": "Incoming hook URL for messaging service", + "fa_icon": "fas fa-people-group", + "help_text": "Incoming hook URL for messaging service. Currently, only MS Teams is supported.", + "hidden": true + }, "multiqc_config": { "type": "string", "description": "Custom config file to supply to MultiQC.", "fa_icon": "fas fa-cog", "hidden": true }, + "multiqc_logo": { + "type": "string", + "description": "Custom logo file to supply to MultiQC. File name must also be set in the MultiQC config file", + "fa_icon": "fas fa-image", + "hidden": true + }, + "multiqc_methods_description": { + "type": "string", + "description": "Custom MultiQC yaml file containing HTML including a methods description.", + "fa_icon": "fas fa-cog" + }, "tracedir": { "type": "string", "description": "Directory to keep pipeline Nextflow logs and reports.", diff --git a/workflows/taxprofiler.nf b/workflows/taxprofiler.nf index 1e85e7f..1167cea 100644 --- a/workflows/taxprofiler.nf +++ b/workflows/taxprofiler.nf @@ -23,8 +23,10 @@ if (params.input) { ch_input = file(params.input) } else { exit 1, 'Input sample ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ -ch_multiqc_config = file("$projectDir/assets/multiqc_config.yml", checkIfExists: true) -ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config) : Channel.empty() +ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) +ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config, checkIfExists: true ) : Channel.empty() +ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo, checkIfExists: true ) : Channel.empty() +ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -46,9 +48,9 @@ include { INPUT_CHECK } from '../subworkflows/local/input_check' // // MODULE: Installed directly from nf-core/modules // -include { FASTQC } from '../modules/nf-core/modules/fastqc/main' -include { MULTIQC } from '../modules/nf-core/modules/multiqc/main' -include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/modules/custom/dumpsoftwareversions/main' +include { FASTQC } from '../modules/nf-core/fastqc/main' +include { MULTIQC } from '../modules/nf-core/multiqc/main' +include { CUSTOM_DUMPSOFTWAREVERSIONS } from '../modules/nf-core/custom/dumpsoftwareversions/main' /* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -89,15 +91,20 @@ workflow TAXPROFILER { workflow_summary = WorkflowTaxprofiler.paramsSummaryMultiqc(workflow, summary_params) ch_workflow_summary = Channel.value(workflow_summary) + methods_description = WorkflowTaxprofiler.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description) + ch_methods_description = Channel.value(methods_description) + ch_multiqc_files = Channel.empty() - ch_multiqc_files = ch_multiqc_files.mix(Channel.from(ch_multiqc_config)) - ch_multiqc_files = ch_multiqc_files.mix(ch_multiqc_custom_config.collect().ifEmpty([])) ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) + ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml')) ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect()) ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}.ifEmpty([])) MULTIQC ( - ch_multiqc_files.collect() + ch_multiqc_files.collect(), + ch_multiqc_config.collect().ifEmpty([]), + ch_multiqc_custom_config.collect().ifEmpty([]), + ch_multiqc_logo.collect().ifEmpty([]) ) multiqc_report = MULTIQC.out.report.toList() ch_versions = ch_versions.mix(MULTIQC.out.versions) @@ -114,6 +121,9 @@ workflow.onComplete { NfcoreTemplate.email(workflow, params, summary_params, projectDir, log, multiqc_report) } NfcoreTemplate.summary(workflow, params, log) + if (params.hook_url) { + NfcoreTemplate.adaptivecard(workflow, params, summary_params, projectDir, log) + } } /* From b407fbcdce27960e07e1f73233a8039d64574572 Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Wed, 5 Oct 2022 12:24:18 +0200 Subject: [PATCH 2/5] chore: force update all modules --- modules/nf-core/cat/fastq/main.nf | 10 +++---- modules/nf-core/centrifuge/kreport/main.nf | 2 +- .../templates/dumpsoftwareversions.py | 11 +++---- modules/nf-core/eido/convert/main.nf | 2 +- modules/nf-core/eido/validate/main.nf | 2 +- modules/nf-core/gunzip/main.nf | 2 +- modules/nf-core/kaiju/kaiju2krona/main.nf | 2 +- modules/nf-core/kaiju/kaiju2table/main.nf | 2 +- .../krakentools/combinekreports/main.nf | 2 +- .../nf-core/krakentools/kreport2krona/main.nf | 2 +- .../nf-core/krona/ktimporttaxonomy/main.nf | 2 +- modules/nf-core/krona/ktimporttext/main.nf | 2 +- modules/nf-core/megan/rma2info/main.nf | 2 +- modules/nf-core/minimap2/index/main.nf | 6 ++-- modules/nf-core/minimap2/index/meta.yml | 10 +++++++ modules/nf-core/motus/merge/main.nf | 2 +- modules/nf-core/samtools/view/main.nf | 26 ++++++++++------ modules/nf-core/samtools/view/meta.yml | 30 ++++++++++++++++--- modules/nf-core/untar/main.nf | 2 +- 19 files changed, 80 insertions(+), 39 deletions(-) diff --git a/modules/nf-core/cat/fastq/main.nf b/modules/nf-core/cat/fastq/main.nf index d275f19..4fa365d 100644 --- a/modules/nf-core/cat/fastq/main.nf +++ b/modules/nf-core/cat/fastq/main.nf @@ -1,6 +1,6 @@ process CAT_FASTQ { tag "$meta.id" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "conda-forge::sed=4.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? @@ -20,9 +20,9 @@ process CAT_FASTQ { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def readList = reads.collect{ it.toString() } + def readList = reads instanceof List ? reads.collect{ it.toString() } : [reads.toString()] if (meta.single_end) { - if (readList.size > 1) { + if (readList.size >= 1) { """ cat ${readList.join(' ')} > ${prefix}.merged.fastq.gz @@ -33,7 +33,7 @@ process CAT_FASTQ { """ } } else { - if (readList.size > 2) { + if (readList.size >= 2) { def read1 = [] def read2 = [] readList.eachWithIndex{ v, ix -> ( ix & 1 ? read2 : read1 ) << v } @@ -51,7 +51,7 @@ process CAT_FASTQ { stub: def prefix = task.ext.prefix ?: "${meta.id}" - def readList = reads.collect{ it.toString() } + def readList = reads instanceof List ? reads.collect{ it.toString() } : [reads.toString()] if (meta.single_end) { if (readList.size > 1) { """ diff --git a/modules/nf-core/centrifuge/kreport/main.nf b/modules/nf-core/centrifuge/kreport/main.nf index 381ddd6..8e5b741 100644 --- a/modules/nf-core/centrifuge/kreport/main.nf +++ b/modules/nf-core/centrifuge/kreport/main.nf @@ -1,6 +1,6 @@ process CENTRIFUGE_KREPORT { tag "$meta.id" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "bioconda::centrifuge=1.0.4_beta" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py index d139039..7c2abfa 100644 --- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py +++ b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py @@ -58,11 +58,12 @@ versions_by_module = {} for process, process_versions in versions_by_process.items(): module = process.split(":")[-1] try: - assert versions_by_module[module] == process_versions, ( - "We assume that software versions are the same between all modules. " - "If you see this error-message it means you discovered an edge-case " - "and should open an issue in nf-core/tools. " - ) + if versions_by_module[module] != process_versions: + raise AssertionError( + "We assume that software versions are the same between all modules. " + "If you see this error-message it means you discovered an edge-case " + "and should open an issue in nf-core/tools. " + ) except KeyError: versions_by_module[module] = process_versions diff --git a/modules/nf-core/eido/convert/main.nf b/modules/nf-core/eido/convert/main.nf index be4c02f..7ec4b8c 100644 --- a/modules/nf-core/eido/convert/main.nf +++ b/modules/nf-core/eido/convert/main.nf @@ -1,5 +1,5 @@ process EIDO_CONVERT { - tag '$samplesheet' + tag "$samplesheet" label 'process_single' conda (params.enable_conda ? "conda-forge::eido=0.1.9" : null) diff --git a/modules/nf-core/eido/validate/main.nf b/modules/nf-core/eido/validate/main.nf index e564e83..798d3a0 100644 --- a/modules/nf-core/eido/validate/main.nf +++ b/modules/nf-core/eido/validate/main.nf @@ -1,5 +1,5 @@ process EIDO_VALIDATE { - tag '$samplesheet' + tag "$samplesheet" label 'process_single' conda (params.enable_conda ? "conda-forge::eido=0.1.9" : null) diff --git a/modules/nf-core/gunzip/main.nf b/modules/nf-core/gunzip/main.nf index 7036704..fa6ba26 100644 --- a/modules/nf-core/gunzip/main.nf +++ b/modules/nf-core/gunzip/main.nf @@ -1,6 +1,6 @@ process GUNZIP { tag "$archive" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "conda-forge::sed=4.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/kaiju/kaiju2krona/main.nf b/modules/nf-core/kaiju/kaiju2krona/main.nf index c95d5a7..3f35ce7 100644 --- a/modules/nf-core/kaiju/kaiju2krona/main.nf +++ b/modules/nf-core/kaiju/kaiju2krona/main.nf @@ -1,6 +1,6 @@ process KAIJU_KAIJU2KRONA { tag "$meta.id" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "bioconda::kaiju=1.8.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/kaiju/kaiju2table/main.nf b/modules/nf-core/kaiju/kaiju2table/main.nf index 00739d1..8648d56 100644 --- a/modules/nf-core/kaiju/kaiju2table/main.nf +++ b/modules/nf-core/kaiju/kaiju2table/main.nf @@ -1,6 +1,6 @@ process KAIJU_KAIJU2TABLE { tag "$meta.id" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "bioconda::kaiju=1.8.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/krakentools/combinekreports/main.nf b/modules/nf-core/krakentools/combinekreports/main.nf index 849f39c..fdcc245 100644 --- a/modules/nf-core/krakentools/combinekreports/main.nf +++ b/modules/nf-core/krakentools/combinekreports/main.nf @@ -1,5 +1,5 @@ process KRAKENTOOLS_COMBINEKREPORTS { - label 'process_low' + label 'process_single' conda (params.enable_conda ? "bioconda::krakentools=1.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/krakentools/kreport2krona/main.nf b/modules/nf-core/krakentools/kreport2krona/main.nf index 1fcb1e4..8ed46f1 100644 --- a/modules/nf-core/krakentools/kreport2krona/main.nf +++ b/modules/nf-core/krakentools/kreport2krona/main.nf @@ -1,6 +1,6 @@ process KRAKENTOOLS_KREPORT2KRONA { tag "$meta.id" - label 'process_low' + label 'process_single' // WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions. conda (params.enable_conda ? "bioconda::krakentools=1.2" : null) diff --git a/modules/nf-core/krona/ktimporttaxonomy/main.nf b/modules/nf-core/krona/ktimporttaxonomy/main.nf index 9b03462..79c01d7 100644 --- a/modules/nf-core/krona/ktimporttaxonomy/main.nf +++ b/modules/nf-core/krona/ktimporttaxonomy/main.nf @@ -1,6 +1,6 @@ process KRONA_KTIMPORTTAXONOMY { tag "${meta.id}" - label 'process_high' + label 'process_single' // WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions. conda (params.enable_conda ? "bioconda::krona=2.8" : null) diff --git a/modules/nf-core/krona/ktimporttext/main.nf b/modules/nf-core/krona/ktimporttext/main.nf index de0cfc2..edf7aab 100644 --- a/modules/nf-core/krona/ktimporttext/main.nf +++ b/modules/nf-core/krona/ktimporttext/main.nf @@ -1,6 +1,6 @@ process KRONA_KTIMPORTTEXT { tag "$meta.id" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "bioconda::krona=2.8.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/megan/rma2info/main.nf b/modules/nf-core/megan/rma2info/main.nf index 80d1975..9c6a094 100644 --- a/modules/nf-core/megan/rma2info/main.nf +++ b/modules/nf-core/megan/rma2info/main.nf @@ -1,6 +1,6 @@ process MEGAN_RMA2INFO { tag "$meta.id" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "bioconda::megan=6.21.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/minimap2/index/main.nf b/modules/nf-core/minimap2/index/main.nf index 3dfeb86..25e9429 100644 --- a/modules/nf-core/minimap2/index/main.nf +++ b/modules/nf-core/minimap2/index/main.nf @@ -7,11 +7,11 @@ process MINIMAP2_INDEX { 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: - path fasta + tuple val(meta), path(fasta) output: - path "*.mmi" , emit: index - path "versions.yml" , emit: versions + tuple val(meta), path("*.mmi"), emit: index + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when diff --git a/modules/nf-core/minimap2/index/meta.yml b/modules/nf-core/minimap2/index/meta.yml index 3bf9f04..603c651 100644 --- a/modules/nf-core/minimap2/index/meta.yml +++ b/modules/nf-core/minimap2/index/meta.yml @@ -12,11 +12,21 @@ tools: documentation: https://github.com/lh3/minimap2#uguide licence: ["MIT"] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fasta: type: file description: | Reference database in FASTA format. output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - mmi: type: file description: Minimap2 fasta index. diff --git a/modules/nf-core/motus/merge/main.nf b/modules/nf-core/motus/merge/main.nf index a050c7a..d9f091e 100644 --- a/modules/nf-core/motus/merge/main.nf +++ b/modules/nf-core/motus/merge/main.nf @@ -2,7 +2,7 @@ VERSION = '3.0.1' process MOTUS_MERGE { tag "$meta.id" - label 'process_low' + label 'process_single' conda (params.enable_conda ? "bioconda::motus=3.0.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? diff --git a/modules/nf-core/samtools/view/main.nf b/modules/nf-core/samtools/view/main.nf index 59ded5c..94da5d6 100644 --- a/modules/nf-core/samtools/view/main.nf +++ b/modules/nf-core/samtools/view/main.nf @@ -10,31 +10,39 @@ process SAMTOOLS_VIEW { input: tuple val(meta), path(input), path(index) path fasta + path qname output: - tuple val(meta), path("*.bam") , emit: bam , optional: true - tuple val(meta), path("*.cram"), emit: cram, optional: true - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam"), emit: bam, optional: true + tuple val(meta), path("*.cram"), emit: cram, optional: true + tuple val(meta), path("*.sam"), emit: sam, optional: true + tuple val(meta), path("*.bai"), emit: bai, optional: true + tuple val(meta), path("*.csi"), emit: csi, optional: true + tuple val(meta), path("*.crai"), emit: crai, optional: true + path "versions.yml", emit: versions when: task.ext.when == null || task.ext.when script: def args = task.ext.args ?: '' - def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def reference = fasta ? "--reference ${fasta} -C" : "" - def file_type = input.getExtension() + def reference = fasta ? "--reference ${fasta}" : "" + def readnames = qname ? "--qname-file ${qname}": "" + def file_type = args.contains("--output-fmt sam") ? "sam" : + args.contains("--output-fmt bam") ? "bam" : + args.contains("--output-fmt cram") ? "cram" : + input.getExtension() if ("$input" == "${prefix}.${file_type}") error "Input and output names are the same, use \"task.ext.prefix\" to disambiguate!" """ samtools \\ view \\ --threads ${task.cpus-1} \\ ${reference} \\ + ${readnames} \\ $args \\ - $input \\ - $args2 \\ - > ${prefix}.${file_type} + -o ${prefix}.${file_type} \\ + $input cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/nf-core/samtools/view/meta.yml b/modules/nf-core/samtools/view/meta.yml index a8b43ec..a52e4f8 100644 --- a/modules/nf-core/samtools/view/meta.yml +++ b/modules/nf-core/samtools/view/meta.yml @@ -33,6 +33,10 @@ input: type: optional file description: Reference file the CRAM was created with pattern: "*.{fasta,fa}" + - qname: + type: file + description: Optional file with read names to output only select alignments + pattern: "*.{txt,list}" output: - meta: type: map @@ -41,12 +45,29 @@ output: e.g. [ id:'test', single_end:false ] - bam: type: file - description: filtered/converted BAM/SAM file - pattern: "*.{bam,sam}" + description: optional filtered/converted BAM file + pattern: "*.{bam}" - cram: type: file - description: filtered/converted CRAM file - pattern: "*.cram" + description: optional filtered/converted CRAM file + pattern: "*.{cram}" + - sam: + type: file + description: optional filtered/converted SAM file + pattern: "*.{sam}" + # bai, csi, and crai are created with `--write-index` + - bai: + type: file + description: optional BAM file index + pattern: "*.{bai}" + - csi: + type: file + description: optional tabix BAM file index + pattern: "*.{csi}" + - crai: + type: file + description: optional CRAM file index + pattern: "*.{crai}" - versions: type: file description: File containing software versions @@ -55,3 +76,4 @@ authors: - "@drpatelh" - "@joseespinosa" - "@FriederikeHanssen" + - "@priyanka-surana" diff --git a/modules/nf-core/untar/main.nf b/modules/nf-core/untar/main.nf index 007871b..71eea7b 100644 --- a/modules/nf-core/untar/main.nf +++ b/modules/nf-core/untar/main.nf @@ -27,7 +27,7 @@ process UNTAR { ## Ensures --strip-components only applied when top level of tar contents is a directory ## If just files or multiple directories, place all in output - if [[ \$(tar -tzf ${archive} | grep "/\$" | wc -l) -eq 1 ]]; then + if [[ \$(tar -tzf ${archive} | grep -o -P "^.*?\\/" | uniq | wc -l) -eq 1 ]]; then tar \\ -C output --strip-components 1 \\ -xzvf \\ From f5af9fd939667de6754d0647762eae8d89e0e156 Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Wed, 5 Oct 2022 13:40:43 +0200 Subject: [PATCH 3/5] fix: adjust import paths to re-organization --- subworkflows/local/db_check.nf | 2 +- subworkflows/local/input_check.nf | 4 ++-- subworkflows/local/longread_hostremoval.nf | 8 ++++---- subworkflows/local/longread_preprocessing.nf | 6 +++--- subworkflows/local/profiling.nf | 18 +++++++++--------- subworkflows/local/shortread_adapterremoval.nf | 6 +++--- .../local/shortread_complexityfiltering.nf | 4 ++-- subworkflows/local/shortread_fastp.nf | 4 ++-- subworkflows/local/shortread_hostremoval.nf | 4 ++-- subworkflows/local/shortread_preprocessing.nf | 2 +- subworkflows/local/standardisation_profiles.nf | 12 ++++++------ subworkflows/local/visualization_krona.nf | 12 ++++++------ 12 files changed, 41 insertions(+), 41 deletions(-) diff --git a/subworkflows/local/db_check.nf b/subworkflows/local/db_check.nf index 7b440c6..5d0c4eb 100644 --- a/subworkflows/local/db_check.nf +++ b/subworkflows/local/db_check.nf @@ -3,7 +3,7 @@ // include { DATABASE_CHECK } from '../../modules/local/database_check' -include { UNTAR } from '../../modules/nf-core/modules/untar/main' +include { UNTAR } from '../../modules/nf-core/untar/main' workflow DB_CHECK { take: diff --git a/subworkflows/local/input_check.nf b/subworkflows/local/input_check.nf index e8d5e7a..46baff2 100644 --- a/subworkflows/local/input_check.nf +++ b/subworkflows/local/input_check.nf @@ -2,8 +2,8 @@ // Check input samplesheet and get read channels // -include { EIDO_VALIDATE } from '../../modules/nf-core/modules/eido/validate/main' -include { EIDO_CONVERT } from '../../modules/nf-core/modules/eido/convert/main' +include { EIDO_VALIDATE } from '../../modules/nf-core/eido/validate/main' +include { EIDO_CONVERT } from '../../modules/nf-core/eido/convert/main' workflow INPUT_CHECK { take: diff --git a/subworkflows/local/longread_hostremoval.nf b/subworkflows/local/longread_hostremoval.nf index 7db020b..5b4413f 100644 --- a/subworkflows/local/longread_hostremoval.nf +++ b/subworkflows/local/longread_hostremoval.nf @@ -2,10 +2,10 @@ // Remove host reads via alignment and export off-target reads // -include { MINIMAP2_INDEX } from '../../modules/nf-core/modules/minimap2/index/main' -include { MINIMAP2_ALIGN } from '../../modules/nf-core/modules/minimap2/align/main' -include { SAMTOOLS_VIEW } from '../../modules/nf-core/modules/samtools/view/main' -include { SAMTOOLS_BAM2FQ } from '../../modules/nf-core/modules/samtools/bam2fq/main' +include { MINIMAP2_INDEX } from '../../modules/nf-core/minimap2/index/main' +include { MINIMAP2_ALIGN } from '../../modules/nf-core/minimap2/align/main' +include { SAMTOOLS_VIEW } from '../../modules/nf-core/samtools/view/main' +include { SAMTOOLS_BAM2FQ } from '../../modules/nf-core/samtools/bam2fq/main' workflow LONGREAD_HOSTREMOVAL { take: diff --git a/subworkflows/local/longread_preprocessing.nf b/subworkflows/local/longread_preprocessing.nf index c04207e..ce537e8 100644 --- a/subworkflows/local/longread_preprocessing.nf +++ b/subworkflows/local/longread_preprocessing.nf @@ -2,9 +2,9 @@ // Process long raw reads with porechop // -include { FASTQC as FASTQC_PROCESSED } from '../../modules/nf-core/modules/fastqc/main' -include { PORECHOP } from '../../modules/nf-core/modules/porechop/main' -include { FILTLONG } from '../../modules/nf-core/modules/filtlong/main' +include { FASTQC as FASTQC_PROCESSED } from '../../modules/nf-core/fastqc/main' +include { PORECHOP } from '../../modules/nf-core/porechop/main' +include { FILTLONG } from '../../modules/nf-core/filtlong/main' workflow LONGREAD_PREPROCESSING { take: diff --git a/subworkflows/local/profiling.nf b/subworkflows/local/profiling.nf index c2ef508..11c4a72 100644 --- a/subworkflows/local/profiling.nf +++ b/subworkflows/local/profiling.nf @@ -2,15 +2,15 @@ // Run profiling // -include { MALT_RUN } from '../../modules/nf-core/modules/malt/run/main' -include { MEGAN_RMA2INFO as MEGAN_RMA2INFO_TSV } from '../../modules/nf-core/modules/megan/rma2info/main' -include { KRAKEN2_KRAKEN2 } from '../../modules/nf-core/modules/kraken2/kraken2/main' -include { CENTRIFUGE_CENTRIFUGE } from '../../modules/nf-core/modules/centrifuge/centrifuge/main' -include { CENTRIFUGE_KREPORT } from '../../modules/nf-core/modules/centrifuge/kreport/main' -include { METAPHLAN3_METAPHLAN3 } from '../../modules/nf-core/modules/metaphlan3/metaphlan3/main' -include { KAIJU_KAIJU } from '../../modules/nf-core/modules/kaiju/kaiju/main' -include { DIAMOND_BLASTX } from '../../modules/nf-core/modules/diamond/blastx/main' -include { MOTUS_PROFILE } from '../../modules/nf-core/modules/motus/profile/main' +include { MALT_RUN } from '../../modules/nf-core/malt/run/main' +include { MEGAN_RMA2INFO as MEGAN_RMA2INFO_TSV } from '../../modules/nf-core/megan/rma2info/main' +include { KRAKEN2_KRAKEN2 } from '../../modules/nf-core/kraken2/kraken2/main' +include { CENTRIFUGE_CENTRIFUGE } from '../../modules/nf-core/centrifuge/centrifuge/main' +include { CENTRIFUGE_KREPORT } from '../../modules/nf-core/centrifuge/kreport/main' +include { METAPHLAN3_METAPHLAN3 } from '../../modules/nf-core/metaphlan3/metaphlan3/main' +include { KAIJU_KAIJU } from '../../modules/nf-core/kaiju/kaiju/main' +include { DIAMOND_BLASTX } from '../../modules/nf-core/diamond/blastx/main' +include { MOTUS_PROFILE } from '../../modules/nf-core/motus/profile/main' workflow PROFILING { take: diff --git a/subworkflows/local/shortread_adapterremoval.nf b/subworkflows/local/shortread_adapterremoval.nf index e491423..a5a43fe 100644 --- a/subworkflows/local/shortread_adapterremoval.nf +++ b/subworkflows/local/shortread_adapterremoval.nf @@ -2,9 +2,9 @@ // Process short raw reads with AdapterRemoval // -include { ADAPTERREMOVAL as ADAPTERREMOVAL_SINGLE } from '../../modules/nf-core/modules/adapterremoval/main' -include { ADAPTERREMOVAL as ADAPTERREMOVAL_PAIRED } from '../../modules/nf-core/modules/adapterremoval/main' -include { CAT_FASTQ } from '../../modules/nf-core/modules/cat/fastq/main' +include { ADAPTERREMOVAL as ADAPTERREMOVAL_SINGLE } from '../../modules/nf-core/adapterremoval/main' +include { ADAPTERREMOVAL as ADAPTERREMOVAL_PAIRED } from '../../modules/nf-core/adapterremoval/main' +include { CAT_FASTQ } from '../../modules/nf-core/cat/fastq/main' workflow SHORTREAD_ADAPTERREMOVAL { diff --git a/subworkflows/local/shortread_complexityfiltering.nf b/subworkflows/local/shortread_complexityfiltering.nf index a34440d..844cd15 100644 --- a/subworkflows/local/shortread_complexityfiltering.nf +++ b/subworkflows/local/shortread_complexityfiltering.nf @@ -2,8 +2,8 @@ // Check input samplesheet and get read channels // -include { BBMAP_BBDUK } from '../../modules/nf-core/modules/bbmap/bbduk/main' -include { PRINSEQPLUSPLUS } from '../../modules/nf-core/modules/prinseqplusplus/main' +include { BBMAP_BBDUK } from '../../modules/nf-core/bbmap/bbduk/main' +include { PRINSEQPLUSPLUS } from '../../modules/nf-core/prinseqplusplus/main' workflow SHORTREAD_COMPLEXITYFILTERING { take: diff --git a/subworkflows/local/shortread_fastp.nf b/subworkflows/local/shortread_fastp.nf index 05e0f3d..d466041 100644 --- a/subworkflows/local/shortread_fastp.nf +++ b/subworkflows/local/shortread_fastp.nf @@ -2,8 +2,8 @@ // Process short raw reads with FastP // -include { FASTP as FASTP_SINGLE } from '../../modules/nf-core/modules/fastp/main' -include { FASTP as FASTP_PAIRED } from '../../modules/nf-core/modules/fastp/main' +include { FASTP as FASTP_SINGLE } from '../../modules/nf-core/fastp/main' +include { FASTP as FASTP_PAIRED } from '../../modules/nf-core/fastp/main' workflow SHORTREAD_FASTP { take: diff --git a/subworkflows/local/shortread_hostremoval.nf b/subworkflows/local/shortread_hostremoval.nf index c5f15c7..d181a34 100644 --- a/subworkflows/local/shortread_hostremoval.nf +++ b/subworkflows/local/shortread_hostremoval.nf @@ -2,8 +2,8 @@ // Remove host reads via alignment and export off-target reads // -include { BOWTIE2_BUILD } from '../../modules/nf-core/modules/bowtie2/build/main' -include { BOWTIE2_ALIGN } from '../../modules/nf-core/modules/bowtie2/align/main' +include { BOWTIE2_BUILD } from '../../modules/nf-core/bowtie2/build/main' +include { BOWTIE2_ALIGN } from '../../modules/nf-core/bowtie2/align/main' workflow SHORTREAD_HOSTREMOVAL { take: diff --git a/subworkflows/local/shortread_preprocessing.nf b/subworkflows/local/shortread_preprocessing.nf index 977a317..859c1d5 100644 --- a/subworkflows/local/shortread_preprocessing.nf +++ b/subworkflows/local/shortread_preprocessing.nf @@ -5,7 +5,7 @@ include { SHORTREAD_FASTP } from './shortread_fastp' include { SHORTREAD_ADAPTERREMOVAL } from './shortread_adapterremoval' -include { FASTQC as FASTQC_PROCESSED } from '../../modules/nf-core/modules/fastqc/main' +include { FASTQC as FASTQC_PROCESSED } from '../../modules/nf-core/fastqc/main' workflow SHORTREAD_PREPROCESSING { take: diff --git a/subworkflows/local/standardisation_profiles.nf b/subworkflows/local/standardisation_profiles.nf index 7a441ff..cbb0fab 100644 --- a/subworkflows/local/standardisation_profiles.nf +++ b/subworkflows/local/standardisation_profiles.nf @@ -2,11 +2,11 @@ // Standardise output files e.g. aggregation // -include { KAIJU_KAIJU2TABLE } from '../../modules/nf-core/modules/kaiju/kaiju2table/main' -include { KRAKENTOOLS_COMBINEKREPORTS } from '../../modules/nf-core/modules/krakentools/combinekreports/main' -include { KRAKENTOOLS_COMBINEKREPORTS as KRAKENTOOLS_COMBINEKREPORTS_CENTRIFUGE } from '../../modules/nf-core/modules/krakentools/combinekreports/main' -include { METAPHLAN3_MERGEMETAPHLANTABLES } from '../../modules/nf-core/modules/metaphlan3/mergemetaphlantables/main' -include { MOTUS_MERGE } from '../../modules/nf-core/modules/motus/merge/main' +include { KAIJU_KAIJU2TABLE } from '../../modules/nf-core/kaiju/kaiju2table/main' +include { KRAKENTOOLS_COMBINEKREPORTS } from '../../modules/nf-core/krakentools/combinekreports/main' +include { KRAKENTOOLS_COMBINEKREPORTS as KRAKENTOOLS_COMBINEKREPORTS_CENTRIFUGE } from '../../modules/nf-core/krakentools/combinekreports/main' +include { METAPHLAN3_MERGEMETAPHLANTABLES } from '../../modules/nf-core/metaphlan3/mergemetaphlantables/main' +include { MOTUS_MERGE } from '../../modules/nf-core/motus/merge/main' workflow STANDARDISATION_PROFILES { take: @@ -99,7 +99,7 @@ workflow STANDARDISATION_PROFILES { ch_versions = ch_versions.mix( KRAKENTOOLS_COMBINEKREPORTS.out.versions ) // MetaPhlAn3 - + ch_profiles_for_metaphlan3 = ch_input_profiles.metaphlan3 .map { [it[0]['db_name'], it[1]] } .groupTuple() diff --git a/subworkflows/local/visualization_krona.nf b/subworkflows/local/visualization_krona.nf index f06768a..7d8e4f0 100644 --- a/subworkflows/local/visualization_krona.nf +++ b/subworkflows/local/visualization_krona.nf @@ -2,13 +2,13 @@ // Create Krona visualizations // -include { MEGAN_RMA2INFO as MEGAN_RMA2INFO_KRONA } from '../../modules/nf-core/modules/megan/rma2info/main' -include { KAIJU_KAIJU2KRONA } from '../../modules/nf-core/modules/kaiju/kaiju2krona/main' -include { KRAKENTOOLS_KREPORT2KRONA } from '../../modules/nf-core/modules/krakentools/kreport2krona/main' +include { MEGAN_RMA2INFO as MEGAN_RMA2INFO_KRONA } from '../../modules/nf-core/megan/rma2info/main' +include { KAIJU_KAIJU2KRONA } from '../../modules/nf-core/kaiju/kaiju2krona/main' +include { KRAKENTOOLS_KREPORT2KRONA } from '../../modules/nf-core/krakentools/kreport2krona/main' include { KRONA_CLEANUP } from '../../modules/local/krona_cleanup' -include { KRONA_KTIMPORTTEXT } from '../../modules/nf-core/modules/krona/ktimporttext/main' -include { KRONA_KTIMPORTTAXONOMY } from '../../modules/nf-core/modules/krona/ktimporttaxonomy/main' -include { GUNZIP } from '../../modules/nf-core/modules/gunzip/main' +include { KRONA_KTIMPORTTEXT } from '../../modules/nf-core/krona/ktimporttext/main' +include { KRONA_KTIMPORTTAXONOMY } from '../../modules/nf-core/krona/ktimporttaxonomy/main' +include { GUNZIP } from '../../modules/nf-core/gunzip/main' workflow VISUALIZATION_KRONA { take: From 180cac0395d82d5e91049445c41d26b64caee46f Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Wed, 5 Oct 2022 13:49:35 +0200 Subject: [PATCH 4/5] fix: adapt to new samtools/view input --- subworkflows/local/longread_hostremoval.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subworkflows/local/longread_hostremoval.nf b/subworkflows/local/longread_hostremoval.nf index 5b4413f..751f4e1 100644 --- a/subworkflows/local/longread_hostremoval.nf +++ b/subworkflows/local/longread_hostremoval.nf @@ -33,7 +33,7 @@ workflow LONGREAD_HOSTREMOVAL { } - SAMTOOLS_VIEW ( ch_minimap2_mapped , [] ) + SAMTOOLS_VIEW ( ch_minimap2_mapped , [], [] ) ch_versions = ch_versions.mix( SAMTOOLS_VIEW.out.versions.first() ) SAMTOOLS_BAM2FQ ( SAMTOOLS_VIEW.out.bam, false ) From 5f3dbd28dea341618a6246f2bee960d9621de34b Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Wed, 5 Oct 2022 13:51:15 +0200 Subject: [PATCH 5/5] fix: patch inputs to correspond to previous version --- modules.json | 3 ++- modules/nf-core/minimap2/index/main.nf | 6 +++--- .../minimap2/index/minimap2-index.diff | 20 +++++++++++++++++++ 3 files changed, 25 insertions(+), 4 deletions(-) create mode 100644 modules/nf-core/minimap2/index/minimap2-index.diff diff --git a/modules.json b/modules.json index 10d6c74..e9af665 100644 --- a/modules.json +++ b/modules.json @@ -119,7 +119,8 @@ }, "minimap2/index": { "branch": "master", - "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905" + "git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905", + "patch": "modules/nf-core/minimap2/index/minimap2-index.diff" }, "motus/merge": { "branch": "master", diff --git a/modules/nf-core/minimap2/index/main.nf b/modules/nf-core/minimap2/index/main.nf index 25e9429..3dfeb86 100644 --- a/modules/nf-core/minimap2/index/main.nf +++ b/modules/nf-core/minimap2/index/main.nf @@ -7,11 +7,11 @@ process MINIMAP2_INDEX { 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" input: - tuple val(meta), path(fasta) + path fasta output: - tuple val(meta), path("*.mmi"), emit: index - path "versions.yml" , emit: versions + path "*.mmi" , emit: index + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when diff --git a/modules/nf-core/minimap2/index/minimap2-index.diff b/modules/nf-core/minimap2/index/minimap2-index.diff new file mode 100644 index 0000000..de306d5 --- /dev/null +++ b/modules/nf-core/minimap2/index/minimap2-index.diff @@ -0,0 +1,20 @@ +Changes in module 'nf-core/minimap2/index' +--- modules/nf-core/minimap2/index/main.nf ++++ modules/nf-core/minimap2/index/main.nf +@@ -7,11 +7,11 @@ + 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" + + input: +- tuple val(meta), path(fasta) ++ path fasta + + output: +- tuple val(meta), path("*.mmi"), emit: index +- path "versions.yml" , emit: versions ++ path "*.mmi" , emit: index ++ path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + +************************************************************