1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-25 21:59:55 +00:00

Merge pull request #138 from nf-core/nf-core-template-merge-2.6

Important! Template update for nf-core/tools v2.6
This commit is contained in:
James A. Fellows Yates 2022-10-06 13:03:45 +02:00 committed by GitHub
commit ff1670169a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
104 changed files with 552 additions and 259 deletions

View file

@ -28,3 +28,7 @@ jobs:
"outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/taxprofiler/results-${{ github.sha }}"
}
profiles: test_full,aws_tower
- uses: actions/upload-artifact@v3
with:
name: Tower debug log file
path: tower_action_*.log

View file

@ -23,3 +23,7 @@ jobs:
"outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/taxprofiler/results-test-${{ github.sha }}"
}
profiles: test,aws_tower
- uses: actions/upload-artifact@v3
with:
name: Tower debug log file
path: tower_action_*.log

View file

@ -1,4 +1,5 @@
email_template.html
adaptivecard.json
.nextflow*
work/
data/

View file

@ -13,8 +13,8 @@ authors:
given-names: Johannes
- family-names: Wilm
given-names: Andreas
- family-names: Ulysse Garcia
given-names: Maxime
- family-names: Garcia
given-names: Maxime Ulysse
- family-names: Di Tommaso
given-names: Paolo
- family-names: Nahnsen
@ -39,8 +39,8 @@ prefered-citation:
given-names: Johannes
- family-names: Wilm
given-names: Andreas
- family-names: Ulysse Garcia
given-names: Maxime
- family-names: Garcia
given-names: Maxime Ulysse
- family-names: Di Tommaso
given-names: Paolo
- family-names: Nahnsen

67
assets/adaptivecard.json Normal file
View file

@ -0,0 +1,67 @@
{
"type": "message",
"attachments": [
{
"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": null,
"content": {
"\$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"msteams": {
"width": "Full"
},
"type": "AdaptiveCard",
"version": "1.2",
"body": [
{
"type": "TextBlock",
"size": "Large",
"weight": "Bolder",
"color": "<% if (success) { %>Good<% } else { %>Attention<%} %>",
"text": "nf-core/taxprofiler v${version} - ${runName}",
"wrap": true
},
{
"type": "TextBlock",
"spacing": "None",
"text": "Completed at ${dateComplete} (duration: ${duration})",
"isSubtle": true,
"wrap": true
},
{
"type": "TextBlock",
"text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors. The full error message was: ${errorReport}.<% } %>",
"wrap": true
},
{
"type": "TextBlock",
"text": "The command used to launch the workflow was as follows:",
"wrap": true
},
{
"type": "TextBlock",
"text": "${commandLine}",
"isSubtle": true,
"wrap": true
}
],
"actions": [
{
"type": "Action.ShowCard",
"title": "Pipeline Configuration",
"card": {
"type": "AdaptiveCard",
"\$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"body": [
{
"type": "FactSet",
"facts": [<% out << summary.collect{ k,v -> "{\"title\": \"$k\", \"value\" : \"$v\"}"}.join(",\n") %>
]
}
]
}
}
]
}
}
]
}

View file

@ -0,0 +1,25 @@
id: "nf-core-taxprofiler-methods-description"
description: "Suggested text and references to use when describing pipeline usage within the methods section of a publication."
section_name: "nf-core/taxprofiler Methods Description"
section_href: "https://github.com/nf-core/taxprofiler"
plot_type: "html"
## TODO nf-core: Update the HTML below to your prefered methods description, e.g. add publication citation for this pipeline
## You inject any metadata in the Nextflow '${workflow}' object
data: |
<h4>Methods</h4>
<p>Data was processed using nf-core/taxprofiler v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (<a href="https://doi.org/10.1038/s41587-020-0439-x">Ewels <em>et al.</em>, 2020</a>).</p>
<p>The pipeline was executed with Nextflow v${workflow.nextflow.version} (<a href="https://doi.org/10.1038/nbt.3820">Di Tommaso <em>et al.</em>, 2017</a>) with the following command:</p>
<pre><code>${workflow.commandLine}</code></pre>
<h4>References</h4>
<ul>
<li>Di Tommaso, P., Chatzou, M., Floden, E. W., Barja, P. P., Palumbo, E., & Notredame, C. (2017). Nextflow enables reproducible computational workflows. Nature Biotechnology, 35(4), 316-319. <a href="https://doi.org/10.1038/nbt.3820">https://doi.org/10.1038/nbt.3820</a></li>
<li>Ewels, P. A., Peltzer, A., Fillinger, S., Patel, H., Alneberg, J., Wilm, A., Garcia, M. U., Di Tommaso, P., & Nahnsen, S. (2020). The nf-core framework for community-curated bioinformatics pipelines. Nature Biotechnology, 38(3), 276-278. <a href="https://doi.org/10.1038/s41587-020-0439-x">https://doi.org/10.1038/s41587-020-0439-x</a></li>
</ul>
<div class="alert alert-info">
<h5>Notes:</h5>
<ul>
${nodoi_text}
<li>The command above does not include parameters contained in any configs or profiles that may have been used. Ensure the config file is also uploaded with your publication!</li>
<li>You should also cite all software used within this run. Check the "Software Versions" of this report to get version information.</li>
</ul>
</div>

View file

@ -3,10 +3,12 @@ report_comment: >
analysis pipeline. For information about how to interpret these results, please see the
<a href="https://nf-co.re/taxprofiler" target="_blank">documentation</a>.
report_section_order:
software_versions:
"nf-core-taxprofiler-methods-description":
order: -1000
"nf-core-taxprofiler-summary":
software_versions:
order: -1001
"nf-core-taxprofiler-summary":
order: -1002
export_plots: true

View file

@ -400,6 +400,14 @@ See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config
If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs).
## Azure Resource Requests
To be used with the `azurebatch` profile by specifying the `-profile azurebatch`.
We recommend providing a compute `params.vm_type` of `Standard_D16_v3` VMs by default but these options can be changed if required.
Note that the choice of VM size depends on your quota and the overall workload during the analysis.
For a thorough list, please refer the [Azure Sizes for virtual machines in Azure](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes).
## Running in the background
Nextflow handles job submissions and supervises the running jobs. The Nextflow process must run until the pipeline is finished.

View file

@ -145,6 +145,61 @@ class NfcoreTemplate {
output_tf.withWriter { w -> w << email_txt }
}
//
// Construct and send adaptive card
// https://adaptivecards.io
//
public static void adaptivecard(workflow, params, summary_params, projectDir, log) {
def hook_url = params.hook_url
def summary = [:]
for (group in summary_params.keySet()) {
summary << summary_params[group]
}
def misc_fields = [:]
misc_fields['start'] = workflow.start
misc_fields['complete'] = workflow.complete
misc_fields['scriptfile'] = workflow.scriptFile
misc_fields['scriptid'] = workflow.scriptId
if (workflow.repository) misc_fields['repository'] = workflow.repository
if (workflow.commitId) misc_fields['commitid'] = workflow.commitId
if (workflow.revision) misc_fields['revision'] = workflow.revision
misc_fields['nxf_version'] = workflow.nextflow.version
misc_fields['nxf_build'] = workflow.nextflow.build
misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp
def msg_fields = [:]
msg_fields['version'] = workflow.manifest.version
msg_fields['runName'] = workflow.runName
msg_fields['success'] = workflow.success
msg_fields['dateComplete'] = workflow.complete
msg_fields['duration'] = workflow.duration
msg_fields['exitStatus'] = workflow.exitStatus
msg_fields['errorMessage'] = (workflow.errorMessage ?: 'None')
msg_fields['errorReport'] = (workflow.errorReport ?: 'None')
msg_fields['commandLine'] = workflow.commandLine
msg_fields['projectDir'] = workflow.projectDir
msg_fields['summary'] = summary << misc_fields
// Render the JSON template
def engine = new groovy.text.GStringTemplateEngine()
def hf = new File("$projectDir/assets/adaptivecard.json")
def json_template = engine.createTemplate(hf).make(msg_fields)
def json_message = json_template.toString()
// POST
def post = new URL(hook_url).openConnection();
post.setRequestMethod("POST")
post.setDoOutput(true)
post.setRequestProperty("Content-Type", "application/json")
post.getOutputStream().write(json_message.getBytes("UTF-8"));
def postRC = post.getResponseCode();
if (! postRC.equals(200)) {
log.warn(post.getErrorStream().getText());
}
}
//
// Print pipeline summary on completion
//

21
lib/Utils.groovy Executable file → Normal file
View file

@ -21,19 +21,26 @@ class Utils {
}
// Check that all channels are present
def required_channels = ['conda-forge', 'bioconda', 'defaults']
def conda_check_failed = !required_channels.every { ch -> ch in channels }
// This channel list is ordered by required channel priority.
def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults']
def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean
// Check that they are in the right order
conda_check_failed |= !(channels.indexOf('conda-forge') < channels.indexOf('bioconda'))
conda_check_failed |= !(channels.indexOf('bioconda') < channels.indexOf('defaults'))
def channel_priority_violation = false
def n = required_channels_in_order.size()
for (int i = 0; i < n - 1; i++) {
channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1]))
}
if (conda_check_failed) {
if (channels_missing | channel_priority_violation) {
log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
" There is a problem with your Conda configuration!\n\n" +
" You will need to set-up the conda-forge and bioconda channels correctly.\n" +
" Please refer to https://bioconda.github.io/user/install.html#set-up-channels\n" +
" NB: The order of the channels matters!\n" +
" Please refer to https://bioconda.github.io/\n" +
" The observed channel order is \n" +
" ${channels}\n" +
" but the following channel order is required:\n" +
" ${required_channels_in_order}\n" +
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
}
}

View file

@ -2,6 +2,8 @@
// This file holds several functions specific to the workflow/taxprofiler.nf in the nf-core/taxprofiler pipeline
//
import groovy.text.SimpleTemplateEngine
class WorkflowTaxprofiler {
//
@ -42,6 +44,23 @@ class WorkflowTaxprofiler {
yaml_file_text += "data: |\n"
yaml_file_text += "${summary_section}"
return yaml_file_text
}
public static String methodsDescriptionText(run_workflow, mqc_methods_yaml) {
// Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file
def meta = [:]
meta.workflow = run_workflow.toMap()
meta["manifest_map"] = run_workflow.manifest.toMap()
meta["doi_text"] = meta.manifest_map.doi ? "(doi: <a href=\'https://doi.org/${meta.manifest_map.doi}\'>${meta.manifest_map.doi}</a>)" : ""
meta["nodoi_text"] = meta.manifest_map.doi ? "": "<li>If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. </li>"
def methods_text = mqc_methods_yaml.text
def engine = new SimpleTemplateEngine()
def description_html = engine.createTemplate(methods_text).make(meta)
return description_html
}//
// Exit pipeline if incorrect --genome key provided
//

View file

@ -4,7 +4,8 @@
nf-core/taxprofiler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Github : https://github.com/nf-core/taxprofiler
Website: https://nf-co.re/taxprofiler
Website: https://nf-co.re/taxprofiler
Slack : https://nfcore.slack.com/channels/taxprofiler
----------------------------------------------------------------------------------------
*/

View file

@ -2,156 +2,158 @@
"name": "nf-core/taxprofiler",
"homePage": "https://github.com/nf-core/taxprofiler",
"repos": {
"nf-core/modules": {
"git_url": "https://github.com/nf-core/modules.git",
"https://github.com/nf-core/modules.git": {
"modules": {
"nf-core": {
"adapterremoval": {
"branch": "master",
"git_sha": "879d42c5e28661fe0a5e744c9e2c515868f9e08a"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"bbmap/bbduk": {
"branch": "master",
"git_sha": "848ee9a215d02d80be033bfa60881700f2bd914c"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"bowtie2/align": {
"branch": "master",
"git_sha": "848ee9a215d02d80be033bfa60881700f2bd914c"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"bowtie2/build": {
"branch": "master",
"git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"cat/fastq": {
"branch": "master",
"git_sha": "b034029b59b1198075da8019074bc02051a6100e"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"centrifuge/centrifuge": {
"branch": "master",
"git_sha": "d2726fcf75063960f06b36d2229a4c0966614108"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"centrifuge/kreport": {
"branch": "master",
"git_sha": "734d0db6079a4aa43b6509b207e5d6feb35d4838"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"custom/dumpsoftwareversions": {
"branch": "master",
"git_sha": "5e7b1ef9a5a2d9258635bcbf70fcf37dacd1b247"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"diamond/blastx": {
"branch": "master",
"git_sha": "3531824af826c16cd252bc5aa82ae169b244ebaa"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"eido/convert": {
"branch": "master",
"git_sha": "9764eef361ded86e9242075bda64c2662421386a"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"eido/validate": {
"branch": "master",
"git_sha": "38383cfaefc06cd35e25de99989a3e6ab9ed2980"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"fastp": {
"branch": "master",
"git_sha": "2c70c1c1951aaf884d2e8d8d9c871db79f7b35aa"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"fastqc": {
"branch": "master",
"git_sha": "49b18b1639f4f7104187058866a8fab33332bdfe"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"filtlong": {
"branch": "master",
"git_sha": "957cb9b83668075f4af101fc99502908cca487e3"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"gunzip": {
"branch": "master",
"git_sha": "fa37e0662690c4ec4260dae282fbce08777503e6"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"kaiju/kaiju": {
"branch": "master",
"git_sha": "8856f127c58f6af479128be8b8df4d42e442ddbe"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"kaiju/kaiju2krona": {
"branch": "master",
"git_sha": "2f0b19240430de6807b1232e6d9d0e8084e8a28f"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"kaiju/kaiju2table": {
"branch": "master",
"git_sha": "538dbac98ba9c8f799536cd5a617195501439457"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"kraken2/kraken2": {
"branch": "master",
"git_sha": "409a308ba46284d8ebb48c2c1befd6f6433db3f7"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"krakentools/combinekreports": {
"branch": "master",
"git_sha": "ee0346b4d14ffdc15ce7e093ca1363cd07c9bd78"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"krakentools/kreport2krona": {
"branch": "master",
"git_sha": "233fa70811a03a4cecb2ece483b5c8396e2cee1d"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"krona/ktimporttaxonomy": {
"branch": "master",
"git_sha": "0e9fd9370ad1845870b8a9c63fcc47d999a1739e"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"krona/ktimporttext": {
"branch": "master",
"git_sha": "cdefbec66999c0b49d8bfeea9d6f9d19056635a2"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"malt/run": {
"branch": "master",
"git_sha": "be8d7b3293cac26cc63e4dbfb364deb8ed6ec7e5"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"megan/rma2info": {
"branch": "master",
"git_sha": "2d38566eca4cc15142b2ffa7c11837569b39aece"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"metaphlan3/mergemetaphlantables": {
"branch": "master",
"git_sha": "36bcd675ae76a379a38165898a203f4915823f4f"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"metaphlan3/metaphlan3": {
"branch": "master",
"git_sha": "978087354eb72ac1f6e18a3f790fad9bc4d05840"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"minimap2/align": {
"branch": "master",
"git_sha": "1a5a9e7b4009dcf34e6867dd1a5a1d9a718b027b"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"minimap2/index": {
"branch": "master",
"git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905",
"patch": "modules/nf-core/minimap2/index/minimap2-index.diff"
},
"motus/merge": {
"branch": "master",
"git_sha": "54ff289487244bf15543ecfa62bd4df49be72b73"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"motus/profile": {
"branch": "master",
"git_sha": "b6ed584443ad68ac41e6975994139454a4f23c18"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"multiqc": {
"branch": "master",
"git_sha": "16eee433b87b303bda650131ac5a0b1ad725e166"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"porechop": {
"branch": "master",
"git_sha": "b78e19b9dae3671db2c7d4346fe04452c1debfab"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"prinseqplusplus": {
"branch": "master",
"git_sha": "f1c5384c31e985591716afdd732cf8c2ae29d05b"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"samtools/bam2fq": {
"branch": "master",
"git_sha": "5510ea39fe638594bc26ac34cadf4a84bf27d159"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"samtools/view": {
"branch": "master",
"git_sha": "5e7b1ef9a5a2d9258635bcbf70fcf37dacd1b247"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
},
"untar": {
"branch": "master",
"git_sha": "393dbd6ddafe3f18eac02893dd4a21e4d45de679"
"git_sha": "5e34754d42cd2d5d248ca8673c0a53cdf5624905"
}
}
}
}

View file

@ -1,6 +1,6 @@
process CAT_FASTQ {
tag "$meta.id"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "conda-forge::sed=4.7" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
@ -20,9 +20,9 @@ process CAT_FASTQ {
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def readList = reads.collect{ it.toString() }
def readList = reads instanceof List ? reads.collect{ it.toString() } : [reads.toString()]
if (meta.single_end) {
if (readList.size > 1) {
if (readList.size >= 1) {
"""
cat ${readList.join(' ')} > ${prefix}.merged.fastq.gz
@ -33,7 +33,7 @@ process CAT_FASTQ {
"""
}
} else {
if (readList.size > 2) {
if (readList.size >= 2) {
def read1 = []
def read2 = []
readList.eachWithIndex{ v, ix -> ( ix & 1 ? read2 : read1 ) << v }
@ -51,7 +51,7 @@ process CAT_FASTQ {
stub:
def prefix = task.ext.prefix ?: "${meta.id}"
def readList = reads.collect{ it.toString() }
def readList = reads instanceof List ? reads.collect{ it.toString() } : [reads.toString()]
if (meta.single_end) {
if (readList.size > 1) {
"""

View file

@ -1,6 +1,6 @@
process CENTRIFUGE_KREPORT {
tag "$meta.id"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "bioconda::centrifuge=1.0.4_beta" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -2,10 +2,10 @@ process CUSTOM_DUMPSOFTWAREVERSIONS {
label 'process_single'
// Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container
conda (params.enable_conda ? 'bioconda::multiqc=1.13a' : null)
conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/multiqc:1.13a--pyhdfd78af_1' :
'quay.io/biocontainers/multiqc:1.13a--pyhdfd78af_1' }"
'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' :
'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }"
input:
path versions

View file

@ -58,7 +58,8 @@ versions_by_module = {}
for process, process_versions in versions_by_process.items():
module = process.split(":")[-1]
try:
assert versions_by_module[module] == process_versions, (
if versions_by_module[module] != process_versions:
raise AssertionError(
"We assume that software versions are the same between all modules. "
"If you see this error-message it means you discovered an edge-case "
"and should open an issue in nf-core/tools. "

View file

@ -1,5 +1,5 @@
process EIDO_CONVERT {
tag '$samplesheet'
tag "$samplesheet"
label 'process_single'
conda (params.enable_conda ? "conda-forge::eido=0.1.9" : null)

View file

@ -1,5 +1,5 @@
process EIDO_VALIDATE {
tag '$samplesheet'
tag "$samplesheet"
label 'process_single'
conda (params.enable_conda ? "conda-forge::eido=0.1.9" : null)

View file

@ -1,6 +1,6 @@
process GUNZIP {
tag "$archive"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "conda-forge::sed=4.7" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -1,6 +1,6 @@
process KAIJU_KAIJU2KRONA {
tag "$meta.id"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "bioconda::kaiju=1.8.2" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -1,6 +1,6 @@
process KAIJU_KAIJU2TABLE {
tag "$meta.id"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "bioconda::kaiju=1.8.2" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -1,5 +1,5 @@
process KRAKENTOOLS_COMBINEKREPORTS {
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "bioconda::krakentools=1.2" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -1,6 +1,6 @@
process KRAKENTOOLS_KREPORT2KRONA {
tag "$meta.id"
label 'process_low'
label 'process_single'
// WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions.
conda (params.enable_conda ? "bioconda::krakentools=1.2" : null)

View file

@ -1,6 +1,6 @@
process KRONA_KTIMPORTTAXONOMY {
tag "${meta.id}"
label 'process_high'
label 'process_single'
// WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions.
conda (params.enable_conda ? "bioconda::krona=2.8" : null)

View file

@ -1,6 +1,6 @@
process KRONA_KTIMPORTTEXT {
tag "$meta.id"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "bioconda::krona=2.8.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -1,6 +1,6 @@
process MEGAN_RMA2INFO {
tag "$meta.id"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "bioconda::megan=6.21.7" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -12,11 +12,21 @@ tools:
documentation: https://github.com/lh3/minimap2#uguide
licence: ["MIT"]
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- fasta:
type: file
description: |
Reference database in FASTA format.
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- mmi:
type: file
description: Minimap2 fasta index.

View file

@ -0,0 +1,20 @@
Changes in module 'nf-core/minimap2/index'
--- modules/nf-core/minimap2/index/main.nf
+++ modules/nf-core/minimap2/index/main.nf
@@ -7,11 +7,11 @@
'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }"
input:
- tuple val(meta), path(fasta)
+ path fasta
output:
- tuple val(meta), path("*.mmi"), emit: index
- path "versions.yml" , emit: versions
+ path "*.mmi" , emit: index
+ path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
************************************************************

View file

@ -2,7 +2,7 @@ VERSION = '3.0.1'
process MOTUS_MERGE {
tag "$meta.id"
label 'process_low'
label 'process_single'
conda (params.enable_conda ? "bioconda::motus=3.0.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?

View file

@ -1,14 +1,15 @@
process MULTIQC {
label 'process_medium'
label 'process_single'
conda (params.enable_conda ? 'bioconda::multiqc=1.13a' : null)
conda (params.enable_conda ? 'bioconda::multiqc=1.13' : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/multiqc:1.13a--pyhdfd78af_1' :
'quay.io/biocontainers/multiqc:1.13a--pyhdfd78af_1' }"
'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' :
'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }"
input:
path multiqc_files, stageAs: "?/*"
path(multiqc_config)
path(extra_multiqc_config)
path(multiqc_logo)
output:
@ -23,11 +24,13 @@ process MULTIQC {
script:
def args = task.ext.args ?: ''
def config = multiqc_config ? "--config $multiqc_config" : ''
def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : ''
"""
multiqc \\
--force \\
$config \\
$args \\
$config \\
$extra_config \\
.
cat <<-END_VERSIONS > versions.yml

View file

@ -22,6 +22,10 @@ input:
type: file
description: Optional config yml for MultiQC
pattern: "*.{yml,yaml}"
- extra_multiqc_config:
type: file
description: Second optional config yml for MultiQC. Will override common sections in multiqc_config.
pattern: "*.{yml,yaml}"
- multiqc_logo:
type: file
description: Optional logo file for MultiQC

View file

@ -10,31 +10,39 @@ process SAMTOOLS_VIEW {
input:
tuple val(meta), path(input), path(index)
path fasta
path qname
output:
tuple val(meta), path("*.bam") , emit: bam , optional: true
tuple val(meta), path("*.bam"), emit: bam, optional: true
tuple val(meta), path("*.cram"), emit: cram, optional: true
path "versions.yml" , emit: versions
tuple val(meta), path("*.sam"), emit: sam, optional: true
tuple val(meta), path("*.bai"), emit: bai, optional: true
tuple val(meta), path("*.csi"), emit: csi, optional: true
tuple val(meta), path("*.crai"), emit: crai, optional: true
path "versions.yml", emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def args2 = task.ext.args2 ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def reference = fasta ? "--reference ${fasta} -C" : ""
def file_type = input.getExtension()
def reference = fasta ? "--reference ${fasta}" : ""
def readnames = qname ? "--qname-file ${qname}": ""
def file_type = args.contains("--output-fmt sam") ? "sam" :
args.contains("--output-fmt bam") ? "bam" :
args.contains("--output-fmt cram") ? "cram" :
input.getExtension()
if ("$input" == "${prefix}.${file_type}") error "Input and output names are the same, use \"task.ext.prefix\" to disambiguate!"
"""
samtools \\
view \\
--threads ${task.cpus-1} \\
${reference} \\
${readnames} \\
$args \\
$input \\
$args2 \\
> ${prefix}.${file_type}
-o ${prefix}.${file_type} \\
$input
cat <<-END_VERSIONS > versions.yml
"${task.process}":

View file

@ -33,6 +33,10 @@ input:
type: optional file
description: Reference file the CRAM was created with
pattern: "*.{fasta,fa}"
- qname:
type: file
description: Optional file with read names to output only select alignments
pattern: "*.{txt,list}"
output:
- meta:
type: map
@ -41,12 +45,29 @@ output:
e.g. [ id:'test', single_end:false ]
- bam:
type: file
description: filtered/converted BAM/SAM file
pattern: "*.{bam,sam}"
description: optional filtered/converted BAM file
pattern: "*.{bam}"
- cram:
type: file
description: filtered/converted CRAM file
pattern: "*.cram"
description: optional filtered/converted CRAM file
pattern: "*.{cram}"
- sam:
type: file
description: optional filtered/converted SAM file
pattern: "*.{sam}"
# bai, csi, and crai are created with `--write-index`
- bai:
type: file
description: optional BAM file index
pattern: "*.{bai}"
- csi:
type: file
description: optional tabix BAM file index
pattern: "*.{csi}"
- crai:
type: file
description: optional CRAM file index
pattern: "*.{crai}"
- versions:
type: file
description: File containing software versions
@ -55,3 +76,4 @@ authors:
- "@drpatelh"
- "@joseespinosa"
- "@FriederikeHanssen"
- "@priyanka-surana"

View file

@ -27,7 +27,7 @@ process UNTAR {
## Ensures --strip-components only applied when top level of tar contents is a directory
## If just files or multiple directories, place all in output
if [[ \$(tar -tzf ${archive} | grep "/\$" | wc -l) -eq 1 ]]; then
if [[ \$(tar -tzf ${archive} | grep -o -P "^.*?\\/" | uniq | wc -l) -eq 1 ]]; then
tar \\
-C output --strip-components 1 \\
-xzvf \\

View file

@ -21,7 +21,9 @@ params {
// MultiQC options
multiqc_config = null
multiqc_title = null
multiqc_logo = null
max_multiqc_email_size = '25.MB'
multiqc_methods_description = null
// Boilerplate options
outdir = null
@ -31,6 +33,7 @@ params {
email_on_fail = null
plaintext_email = false
monochrome_logs = false
hook_url = null
help = false
validate_params = true
show_hidden_params = false
@ -159,7 +162,6 @@ try {
}
profiles {
debug { process.beforeScript = 'echo $HOSTNAME' }
conda {
@ -280,6 +282,7 @@ manifest {
mainScript = 'main.nf'
nextflowVersion = '!>=21.10.3'
version = '1.0dev'
doi = ''
}
// Load modules.config for DSL2 module specific options

View file

@ -586,12 +586,30 @@
"fa_icon": "fas fa-palette",
"hidden": true
},
"hook_url": {
"type": "string",
"description": "Incoming hook URL for messaging service",
"fa_icon": "fas fa-people-group",
"help_text": "Incoming hook URL for messaging service. Currently, only MS Teams is supported.",
"hidden": true
},
"multiqc_config": {
"type": "string",
"description": "Custom config file to supply to MultiQC.",
"fa_icon": "fas fa-cog",
"hidden": true
},
"multiqc_logo": {
"type": "string",
"description": "Custom logo file to supply to MultiQC. File name must also be set in the MultiQC config file",
"fa_icon": "fas fa-image",
"hidden": true
},
"multiqc_methods_description": {
"type": "string",
"description": "Custom MultiQC yaml file containing HTML including a methods description.",
"fa_icon": "fas fa-cog"
},
"tracedir": {
"type": "string",
"description": "Directory to keep pipeline Nextflow logs and reports.",

View file

@ -3,7 +3,7 @@
//
include { DATABASE_CHECK } from '../../modules/local/database_check'
include { UNTAR } from '../../modules/nf-core/modules/untar/main'
include { UNTAR } from '../../modules/nf-core/untar/main'
workflow DB_CHECK {
take:

View file

@ -2,8 +2,8 @@
// Check input samplesheet and get read channels
//
include { EIDO_VALIDATE } from '../../modules/nf-core/modules/eido/validate/main'
include { EIDO_CONVERT } from '../../modules/nf-core/modules/eido/convert/main'
include { EIDO_VALIDATE } from '../../modules/nf-core/eido/validate/main'
include { EIDO_CONVERT } from '../../modules/nf-core/eido/convert/main'
workflow INPUT_CHECK {
take:

View file

@ -2,10 +2,10 @@
// Remove host reads via alignment and export off-target reads
//
include { MINIMAP2_INDEX } from '../../modules/nf-core/modules/minimap2/index/main'
include { MINIMAP2_ALIGN } from '../../modules/nf-core/modules/minimap2/align/main'
include { SAMTOOLS_VIEW } from '../../modules/nf-core/modules/samtools/view/main'
include { SAMTOOLS_BAM2FQ } from '../../modules/nf-core/modules/samtools/bam2fq/main'
include { MINIMAP2_INDEX } from '../../modules/nf-core/minimap2/index/main'
include { MINIMAP2_ALIGN } from '../../modules/nf-core/minimap2/align/main'
include { SAMTOOLS_VIEW } from '../../modules/nf-core/samtools/view/main'
include { SAMTOOLS_BAM2FQ } from '../../modules/nf-core/samtools/bam2fq/main'
workflow LONGREAD_HOSTREMOVAL {
take:
@ -33,7 +33,7 @@ workflow LONGREAD_HOSTREMOVAL {
}
SAMTOOLS_VIEW ( ch_minimap2_mapped , [] )
SAMTOOLS_VIEW ( ch_minimap2_mapped , [], [] )
ch_versions = ch_versions.mix( SAMTOOLS_VIEW.out.versions.first() )
SAMTOOLS_BAM2FQ ( SAMTOOLS_VIEW.out.bam, false )

View file

@ -2,9 +2,9 @@
// Process long raw reads with porechop
//
include { FASTQC as FASTQC_PROCESSED } from '../../modules/nf-core/modules/fastqc/main'
include { PORECHOP } from '../../modules/nf-core/modules/porechop/main'
include { FILTLONG } from '../../modules/nf-core/modules/filtlong/main'
include { FASTQC as FASTQC_PROCESSED } from '../../modules/nf-core/fastqc/main'
include { PORECHOP } from '../../modules/nf-core/porechop/main'
include { FILTLONG } from '../../modules/nf-core/filtlong/main'
workflow LONGREAD_PREPROCESSING {
take:

View file

@ -2,15 +2,15 @@
// Run profiling
//
include { MALT_RUN } from '../../modules/nf-core/modules/malt/run/main'
include { MEGAN_RMA2INFO as MEGAN_RMA2INFO_TSV } from '../../modules/nf-core/modules/megan/rma2info/main'
include { KRAKEN2_KRAKEN2 } from '../../modules/nf-core/modules/kraken2/kraken2/main'
include { CENTRIFUGE_CENTRIFUGE } from '../../modules/nf-core/modules/centrifuge/centrifuge/main'
include { CENTRIFUGE_KREPORT } from '../../modules/nf-core/modules/centrifuge/kreport/main'
include { METAPHLAN3_METAPHLAN3 } from '../../modules/nf-core/modules/metaphlan3/metaphlan3/main'
include { KAIJU_KAIJU } from '../../modules/nf-core/modules/kaiju/kaiju/main'
include { DIAMOND_BLASTX } from '../../modules/nf-core/modules/diamond/blastx/main'
include { MOTUS_PROFILE } from '../../modules/nf-core/modules/motus/profile/main'
include { MALT_RUN } from '../../modules/nf-core/malt/run/main'
include { MEGAN_RMA2INFO as MEGAN_RMA2INFO_TSV } from '../../modules/nf-core/megan/rma2info/main'
include { KRAKEN2_KRAKEN2 } from '../../modules/nf-core/kraken2/kraken2/main'
include { CENTRIFUGE_CENTRIFUGE } from '../../modules/nf-core/centrifuge/centrifuge/main'
include { CENTRIFUGE_KREPORT } from '../../modules/nf-core/centrifuge/kreport/main'
include { METAPHLAN3_METAPHLAN3 } from '../../modules/nf-core/metaphlan3/metaphlan3/main'
include { KAIJU_KAIJU } from '../../modules/nf-core/kaiju/kaiju/main'
include { DIAMOND_BLASTX } from '../../modules/nf-core/diamond/blastx/main'
include { MOTUS_PROFILE } from '../../modules/nf-core/motus/profile/main'
workflow PROFILING {
take:

View file

@ -2,9 +2,9 @@
// Process short raw reads with AdapterRemoval
//
include { ADAPTERREMOVAL as ADAPTERREMOVAL_SINGLE } from '../../modules/nf-core/modules/adapterremoval/main'
include { ADAPTERREMOVAL as ADAPTERREMOVAL_PAIRED } from '../../modules/nf-core/modules/adapterremoval/main'
include { CAT_FASTQ } from '../../modules/nf-core/modules/cat/fastq/main'
include { ADAPTERREMOVAL as ADAPTERREMOVAL_SINGLE } from '../../modules/nf-core/adapterremoval/main'
include { ADAPTERREMOVAL as ADAPTERREMOVAL_PAIRED } from '../../modules/nf-core/adapterremoval/main'
include { CAT_FASTQ } from '../../modules/nf-core/cat/fastq/main'
workflow SHORTREAD_ADAPTERREMOVAL {

View file

@ -2,8 +2,8 @@
// Check input samplesheet and get read channels
//
include { BBMAP_BBDUK } from '../../modules/nf-core/modules/bbmap/bbduk/main'
include { PRINSEQPLUSPLUS } from '../../modules/nf-core/modules/prinseqplusplus/main'
include { BBMAP_BBDUK } from '../../modules/nf-core/bbmap/bbduk/main'
include { PRINSEQPLUSPLUS } from '../../modules/nf-core/prinseqplusplus/main'
workflow SHORTREAD_COMPLEXITYFILTERING {
take:

View file

@ -2,8 +2,8 @@
// Process short raw reads with FastP
//
include { FASTP as FASTP_SINGLE } from '../../modules/nf-core/modules/fastp/main'
include { FASTP as FASTP_PAIRED } from '../../modules/nf-core/modules/fastp/main'
include { FASTP as FASTP_SINGLE } from '../../modules/nf-core/fastp/main'
include { FASTP as FASTP_PAIRED } from '../../modules/nf-core/fastp/main'
workflow SHORTREAD_FASTP {
take:

View file

@ -2,8 +2,8 @@
// Remove host reads via alignment and export off-target reads
//
include { BOWTIE2_BUILD } from '../../modules/nf-core/modules/bowtie2/build/main'
include { BOWTIE2_ALIGN } from '../../modules/nf-core/modules/bowtie2/align/main'
include { BOWTIE2_BUILD } from '../../modules/nf-core/bowtie2/build/main'
include { BOWTIE2_ALIGN } from '../../modules/nf-core/bowtie2/align/main'
workflow SHORTREAD_HOSTREMOVAL {
take:

Some files were not shown because too many files have changed in this diff Show more