Merge branch 'nf-core:master' into master

This commit is contained in:
James A. Fellows Yates 2022-04-02 22:12:24 +02:00 committed by GitHub
commit 5e58d3bcd5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
46 changed files with 869 additions and 90 deletions

View file

@ -49,6 +49,8 @@ process CAT_CAT {
""" """
stub: stub:
def file_list = files_in.collect { it.toString() }
prefix = task.ext.prefix ?: "${meta.id}${file_list[0].substring(file_list[0].lastIndexOf('.'))}"
""" """
touch $prefix touch $prefix

View file

@ -4,7 +4,7 @@ process CNVPYTOR_CALLCNVS {
conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2':
'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }"
input: input:
@ -30,4 +30,15 @@ process CNVPYTOR_CALLCNVS {
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS END_VERSIONS
""" """
stub:
def prefix = task.ext.prefix ?: "${meta.id}"
"""
touch ${prefix}.tsv
cat <<-END_VERSIONS > versions.yml
"${task.process}":
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS
"""
} }

View file

@ -4,7 +4,7 @@ process CNVPYTOR_HISTOGRAM {
conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2':
'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }"
input: input:
@ -29,4 +29,14 @@ process CNVPYTOR_HISTOGRAM {
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS END_VERSIONS
""" """
stub:
"""
touch test.pytor
cat <<-END_VERSIONS > versions.yml
"${task.process}":
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS
"""
} }

View file

@ -4,7 +4,7 @@ process CNVPYTOR_IMPORTREADDEPTH {
conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2':
'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }"
input: input:
@ -35,4 +35,15 @@ process CNVPYTOR_IMPORTREADDEPTH {
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS END_VERSIONS
""" """
stub:
def prefix = task.ext.prefix ?: "${meta.id}"
"""
touch ${prefix}.pytor
cat <<-END_VERSIONS > versions.yml
"${task.process}":
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS
"""
} }

View file

@ -4,7 +4,7 @@ process CNVPYTOR_PARTITION {
conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2':
'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }"
input: input:
@ -18,7 +18,7 @@ process CNVPYTOR_PARTITION {
task.ext.when == null || task.ext.when task.ext.when == null || task.ext.when
script: script:
def args = task.ext.args ?: '1000' def args = task.ext.args ?: ''
""" """
cnvpytor \\ cnvpytor \\
-root $pytor \\ -root $pytor \\
@ -29,4 +29,14 @@ process CNVPYTOR_PARTITION {
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS END_VERSIONS
""" """
stub:
"""
touch test.pytor
cat <<-END_VERSIONS > versions.yml
"${task.process}":
cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' ))
END_VERSIONS
"""
} }

View file

@ -2,27 +2,28 @@ process DASTOOL_DASTOOL {
tag "$meta.id" tag "$meta.id"
label 'process_medium' label 'process_medium'
conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) conda (params.enable_conda ? "bioconda::das_tool=1.1.4" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : 'https://depot.galaxyproject.org/singularity/das_tool:1.1.4--r41hdfd78af_1' :
'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" 'quay.io/biocontainers/das_tool:1.1.4--r41hdfd78af_1' }"
input: input:
tuple val(meta), path(contigs), path(bins) tuple val(meta), path(contigs), path(bins)
path(proteins) path(proteins)
path(db_directory) path(db_directory)
val(search_engine)
output: output:
tuple val(meta), path("*.log") , emit: log tuple val(meta), path("*.log") , emit: log
tuple val(meta), path("*_summary.txt") , emit: summary tuple val(meta), path("*_summary.tsv") , emit: summary
tuple val(meta), path("*_DASTool_scaffolds2bin.txt") , emit: scaffolds2bin tuple val(meta), path("*_DASTool_contig2bin.tsv") , emit: contig2bin
tuple val(meta), path("*.eval") , optional: true, emit: eval tuple val(meta), path("*.eval") , optional: true, emit: eval
tuple val(meta), path("*_DASTool_bins/*.fa") , optional: true, emit: bins tuple val(meta), path("*_DASTool_bins/*.fa") , optional: true, emit: bins
tuple val(meta), path("*.pdf") , optional: true, emit: pdfs tuple val(meta), path("*.pdf") , optional: true, emit: pdfs
tuple val(meta), path("*.proteins.faa") , optional: true, emit: fasta_proteins tuple val(meta), path("*.candidates.faa") , optional: true, emit: fasta_proteins
tuple val(meta), path("*.faa") , optional: true, emit: candidates_faa
tuple val(meta), path("*.archaea.scg") , optional: true, emit: fasta_archaea_scg tuple val(meta), path("*.archaea.scg") , optional: true, emit: fasta_archaea_scg
tuple val(meta), path("*.bacteria.scg") , optional: true, emit: fasta_bacteria_scg tuple val(meta), path("*.bacteria.scg") , optional: true, emit: fasta_bacteria_scg
tuple val(meta), path("*.b6") , optional: true, emit: b6
tuple val(meta), path("*.seqlength") , optional: true, emit: seqlength tuple val(meta), path("*.seqlength") , optional: true, emit: seqlength
path "versions.yml" , emit: versions path "versions.yml" , emit: versions
@ -33,17 +34,12 @@ process DASTOOL_DASTOOL {
def args = task.ext.args ?: '' def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}" def prefix = task.ext.prefix ?: "${meta.id}"
def bin_list = bins instanceof List ? bins.join(",") : "$bins" def bin_list = bins instanceof List ? bins.join(",") : "$bins"
def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond"
def db_dir = db_directory ? "--db_directory $db_directory" : "" def db_dir = db_directory ? "--db_directory $db_directory" : ""
def clean_contigs = contigs.toString() - ".gz" def clean_contigs = contigs.toString() - ".gz"
def decompress_contigs = contigs.toString() == clean_contigs ? "" : "gunzip -q -f $contigs" def decompress_contigs = contigs.toString() == clean_contigs ? "" : "gunzip -q -f $contigs"
def decompress_proteins = proteins ? "gunzip -f $proteins" : ""
def clean_proteins = proteins ? proteins.toString() - ".gz" : "" def clean_proteins = proteins ? proteins.toString() - ".gz" : ""
def proteins_pred = proteins ? "--proteins $clean_proteins" : "" def decompress_proteins = proteins ? "gunzip -f $proteins" : ""
def proteins_pred = proteins ? "-p $clean_proteins" : ""
if (! search_engine) {
log.info('[DAS_Tool] Default search engine (USEARCH) is proprietary software and not available in bioconda. Using DIAMOND as alternative.')
}
""" """
$decompress_proteins $decompress_proteins
@ -53,15 +49,14 @@ process DASTOOL_DASTOOL {
$args \\ $args \\
$proteins_pred \\ $proteins_pred \\
$db_dir \\ $db_dir \\
$engine \\
-t $task.cpus \\ -t $task.cpus \\
--bins $bin_list \\ -i $bin_list \\
-c $clean_contigs \\ -c $clean_contigs \\
-o $prefix -o $prefix
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":
dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool //' )
END_VERSIONS END_VERSIONS
""" """
} }

View file

@ -34,8 +34,8 @@ input:
pattern: "*.{fa.gz,fas.gz,fasta.gz}" pattern: "*.{fa.gz,fas.gz,fasta.gz}"
- bins: - bins:
type: file type: file
description: "Scaffolds2bin tabular file generated with dastool/scaffolds2bin" description: "FastaToContig2Bin tabular file generated with dastool/fastatocontig2bin"
pattern: "*.scaffolds2bin.tsv" pattern: "*.tsv"
- proteins: - proteins:
type: file type: file
description: Predicted proteins in prodigal fasta format (>scaffoldID_geneNo) description: Predicted proteins in prodigal fasta format (>scaffoldID_geneNo)
@ -43,9 +43,6 @@ input:
- db_directory: - db_directory:
type: file type: file
description: (optional) Directory of single copy gene database. description: (optional) Directory of single copy gene database.
- search_engine:
type: val
description: Engine used for single copy gene identification. USEARCH is not supported due to it being proprietary [blast/diamond]
output: output:
- meta: - meta:
@ -65,14 +62,17 @@ output:
type: file type: file
description: Summary of output bins including quality and completeness estimates description: Summary of output bins including quality and completeness estimates
pattern: "*summary.txt" pattern: "*summary.txt"
- scaffolds2bin: - contig2bin:
type: file type: file
description: Scaffolds to bin file of output bins description: Scaffolds to bin file of output bins
pattern: "*.scaffolds2bin.txt" pattern: "*.contig2bin.txt"
- eval: - eval:
type: file type: file
description: Quality and completeness estimates of input bin sets description: Quality and completeness estimates of input bin sets
pattern: "*.eval" pattern: "*.eval"
- bins:
description: Final refined bins in fasta format
pattern: "*.fa"
- pdfs: - pdfs:
type: file type: file
description: Plots showing the amount of high quality bins and score distribution of bins per method description: Plots showing the amount of high quality bins and score distribution of bins per method
@ -89,6 +89,10 @@ output:
type: file type: file
description: Results of bacterial single-copy-gene prediction description: Results of bacterial single-copy-gene prediction
pattern: "*.bacteria.scg" pattern: "*.bacteria.scg"
- b6:
type: file
description: Results in b6 format
pattern: "*.b6"
- seqlength: - seqlength:
type: file type: file
description: Summary of contig lengths description: Summary of contig lengths

View file

@ -0,0 +1,41 @@
process DASTOOL_FASTATOCONTIG2BIN {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::das_tool=1.1.4" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/das_tool:1.1.4--r41hdfd78af_1' :
'quay.io/biocontainers/das_tool:1.1.4--r41hdfd78af_1' }"
input:
tuple val(meta), path(fasta)
val(extension)
output:
tuple val(meta), path("*.tsv"), emit: fastatocontig2bin
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def file_extension = extension ? extension : "fasta"
def clean_fasta = fasta.toString() - ".gz"
def decompress_fasta = fasta.toString() == clean_fasta ? "" : "gunzip -q -f $fasta"
"""
$decompress_fasta
Fasta_to_Contig2Bin.sh \\
$args \\
-i . \\
-e $file_extension \\
> ${prefix}.tsv
cat <<-END_VERSIONS > versions.yml
"${task.process}":
dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool //' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,56 @@
name: dastool_fastatocontig2bin
description: Helper script to convert a set of bins in fasta format to tabular scaffolds2bin format
keywords:
- binning
- das tool
- table
- de novo
- bins
- contigs
- assembly
- das_tool
tools:
- dastool:
description: |
DAS Tool is an automated method that integrates the results
of a flexible number of binning algorithms to calculate an optimized, non-redundant
set of bins from a single assembly.
homepage: https://github.com/cmks/DAS_Tool
documentation: https://github.com/cmks/DAS_Tool
tool_dev_url: https://github.com/cmks/DAS_Tool
doi: "10.1038/s41564-018-0171-1"
licence: ["BSD"]
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- fasta:
type: file
description: Fasta of list of fasta files recommended to be gathered via with .collect() of bins
pattern: "*.{fa,fa.gz,fas,fas.gz,fna,fna.gz,fasta,fasta.gz}"
- extension:
type: val
description: Fasta file extension (fa | fas | fasta | ...), without .gz suffix, if gzipped input.
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- fastatocontig2bin:
type: file
description: tabular contig2bin file for DAS tool input
pattern: "*.tsv"
authors:
- "@maxibor"
- "@jfy133"

View file

@ -46,7 +46,7 @@ process DEEPVARIANT {
""" """
stub: stub:
def prefix = task.ext.prefix ?: "${meta.id}" prefix = task.ext.prefix ?: "${meta.id}"
""" """
touch ${prefix}.vcf.gz touch ${prefix}.vcf.gz
touch ${prefix}.g.vcf.gz touch ${prefix}.g.vcf.gz

View file

@ -39,9 +39,8 @@ process GATK4_CREATESEQUENCEDICTIONARY {
""" """
stub: stub:
def prefix = task.ext.prefix ?: "${meta.id}"
""" """
touch ${prefix}.dict touch test.dict
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":

View file

@ -0,0 +1,40 @@
process GSTAMA_POLYACLEANUP {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::gs-tama=1.0.3" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0':
'quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0' }"
input:
tuple val(meta), path(fasta)
output:
tuple val(meta), path("*_tama.fa.gz") , emit: fasta
tuple val(meta), path("*_tama_polya_flnc_report.txt.gz"), emit: report
tuple val(meta), path("*_tama_tails.fa.gz") , emit: tails
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
if( "$fasta" == "${prefix}.fasta" | "$fasta" == "${prefix}.fa" ) error "Input and output names are the same, set prefix in module configuration"
"""
tama_flnc_polya_cleanup.py \\
-f $fasta \\
-p ${prefix} \\
$args
gzip ${prefix}.fa
gzip ${prefix}_polya_flnc_report.txt
gzip ${prefix}_tails.fa
cat <<-END_VERSIONS > versions.yml
"${task.process}":
gstama: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,55 @@
name: gstama_polyacleanup
description: Helper script, remove remaining polyA sequences from Full Length Non Chimeric reads (Pacbio isoseq3)
keywords:
- gstama
- gstama/polyacleanup
- long-read
- isoseq
- tama
- trancriptome
- annotation
tools:
- gstama:
description: Gene-Switch Transcriptome Annotation by Modular Algorithms
homepage: https://github.com/sguizard/gs-tama
documentation: https://github.com/GenomeRIK/tama/wiki
tool_dev_url: https://github.com/sguizard/gs-tama
doi: "https://doi.org/10.1186/s12864-020-07123-7"
licence: ["GPL v3 License"]
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- fasta:
type: file
description: Full Length Non Chimeric reads in fasta format
pattern: "*.{fa,fasta}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- fasta:
type: file
description: The Full Length Non Chimeric reads clened from remaining polyA tails. The sequences are in FASTA format compressed with gzip.
pattern: "*_tama.fa.gz"
- report:
type: file
description: A text file describing the number of polyA tails removed and their length. Compressed with gzip.
pattern: "*_tama_polya_flnc_report.txt.gz"
- tails:
type: file
description: A gzip compressed FASTA file of trimmed polyA tails.
pattern: "*_tama_tails.fa.gz"
authors:
- "@sguizard"

View file

@ -8,9 +8,10 @@ process MANTA_GERMLINE {
'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }"
input: input:
tuple val(meta), path(input), path(index), path(target_bed), path(target_bed_tbi) tuple val(meta), path(input), path(index)
path fasta path fasta
path fasta_fai path fasta_fai
tuple path(target_bed), path(target_bed_tbi)
output: output:

View file

@ -26,7 +26,7 @@ process PHANTOMPEAKQUALTOOLS {
def prefix = task.ext.prefix ?: "${meta.id}" def prefix = task.ext.prefix ?: "${meta.id}"
""" """
RUN_SPP=`which run_spp.R` RUN_SPP=`which run_spp.R`
Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":

View file

@ -0,0 +1,60 @@
name: "phantompeakqualtools"
description:
keywords:
- "ChIP-Seq"
- "QC"
- "phantom peaks"
tools:
- "phantompeakqualtools":
description: |
"This package computes informative enrichment and quality measures
for ChIP-seq/DNase-seq/FAIRE-seq/MNase-seq data. It can also be used
to obtain robust estimates of the predominant fragment length or
characteristic tag shift values in these assays."
homepage: "None"
documentation: "https://github.com/kundajelab/phantompeakqualtools"
tool_dev_url: "https://github.com/kundajelab/phantompeakqualtools"
doi: "https://doi.org/10.1101/gr.136184.111"
licence: "['BSD-3-clause']"
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- bam:
type: file
description: BAM/CRAM/SAM file
pattern: "*.{bam,cram,sam}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- spp:
type: file
description: |
A ChIP-Seq Processing Pipeline file containing
peakshift/phantomPeak results
pattern: "*.{out}"
- pdf:
type: file
description: A pdf containing save cross-correlation plots
pattern: "*.{pdf}"
- rdata:
type: file
description: Rdata file containing the R session
pattern: "*.{Rdata}"
authors:
- "@drpatelh"
- "@Emiller88"
- "@JoseEspinosa"

View file

@ -2,10 +2,10 @@ process PYDAMAGE_ANALYZE {
tag "$meta.id" tag "$meta.id"
label 'process_medium' label 'process_medium'
conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) conda (params.enable_conda ? "bioconda::pydamage=0.70" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : 'https://depot.galaxyproject.org/singularity/pydamage:0.70--pyhdfd78af_0' :
'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" 'quay.io/biocontainers/pydamage:0.70--pyhdfd78af_0' }"
input: input:
tuple val(meta), path(bam), path(bai) tuple val(meta), path(bam), path(bai)

View file

@ -2,10 +2,10 @@ process PYDAMAGE_FILTER {
tag "$meta.id" tag "$meta.id"
label 'process_low' label 'process_low'
conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) conda (params.enable_conda ? "bioconda::pydamage=0.70" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : 'https://depot.galaxyproject.org/singularity/pydamage:0.70--pyhdfd78af_0' :
'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" 'quay.io/biocontainers/pydamage:0.70--pyhdfd78af_0' }"
input: input:
tuple val(meta), path(csv) tuple val(meta), path(csv)

View file

@ -0,0 +1,34 @@
process SEQKIT_STATS {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::seqkit=2.2.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/seqkit:2.2.0--h9ee0642_0':
'quay.io/biocontainers/seqkit:2.2.0--h9ee0642_0' }"
input:
tuple val(meta), path(reads)
output:
tuple val(meta), path("*.tsv"), emit: stats
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: '--all'
def prefix = task.ext.prefix ?: "${meta.id}"
"""
seqkit stats \\
--tabular \\
$args \\
$reads > '${prefix}.tsv'
cat <<-END_VERSIONS > versions.yml
"${task.process}":
seqkit: \$( seqkit version | sed 's/seqkit v//' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,44 @@
name: "seqkit_stats"
description: simple statistics of FASTA/Q files
keywords:
- seqkit
- stats
tools:
- "seqkit":
description: Cross-platform and ultrafast toolkit for FASTA/Q file manipulation, written by Wei Shen.
homepage: https://bioinf.shenwei.me/seqkit/usage/
documentation: https://bioinf.shenwei.me/seqkit/usage/
tool_dev_url: https://github.com/shenwei356/seqkit/
doi: "10.1371/journal.pone.0163962"
licence: ["MIT"]
input:
- meta:
type: map
description: >
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- reads:
type: file
description: >
Either FASTA or FASTQ files.
pattern: "*.{fa,fna,faa,fasta,fq,fastq}[.gz]"
output:
- meta:
type: map
description: >
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- stats:
type: file
description: >
Tab-separated output file with basic sequence statistics.
pattern: "*.tsv"
authors:
- "@Midnighter"

View file

@ -2,10 +2,10 @@ process SVDB_MERGE {
tag "$meta.id" tag "$meta.id"
label 'process_medium' label 'process_medium'
conda (params.enable_conda ? "bioconda::svdb=2.5.2" : null) conda (params.enable_conda ? "bioconda::svdb=2.6.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/svdb:2.5.2--py39h5371cbf_0': 'https://depot.galaxyproject.org/singularity/svdb:2.6.0--py39h5371cbf_0':
'quay.io/biocontainers/svdb:2.5.2--py39h5371cbf_0' }" 'quay.io/biocontainers/svdb:2.6.0--py39h5371cbf_0' }"
input: input:
tuple val(meta), path(vcfs) tuple val(meta), path(vcfs)

View file

@ -2,10 +2,10 @@ process SVDB_QUERY {
tag "$meta.id" tag "$meta.id"
label 'process_medium' label 'process_medium'
conda (params.enable_conda ? "bioconda::svdb=2.5.2" : null) conda (params.enable_conda ? "bioconda::svdb=2.6.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/svdb:2.5.2--py39h5371cbf_0': 'https://depot.galaxyproject.org/singularity/svdb:2.6.0--py39h5371cbf_0':
'quay.io/biocontainers/svdb:2.5.2--py39h5371cbf_0' }" 'quay.io/biocontainers/svdb:2.6.0--py39h5371cbf_0' }"
input: input:
tuple val(meta), path(vcf) tuple val(meta), path(vcf)

View file

@ -40,8 +40,8 @@ process TIDDIT_COV {
stub: stub:
def prefix = task.ext.prefix ?: "${meta.id}" def prefix = task.ext.prefix ?: "${meta.id}"
""" """
touch $prefix.wig touch ${prefix}.wig
touch $prefix.tab touch ${prefix}.tab
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":

View file

@ -42,9 +42,9 @@ process TIDDIT_SV {
stub: stub:
def prefix = task.ext.prefix ?: "${meta.id}" def prefix = task.ext.prefix ?: "${meta.id}"
""" """
touch $prefix.vcf touch ${prefix}.vcf
touch $prefix.ploidy.tab touch ${prefix}.ploidy.tab
touch $prefix.signals.tab touch ${prefix}.signals.tab
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":

View file

@ -487,6 +487,10 @@ dastool/dastool:
- modules/dastool/dastool/** - modules/dastool/dastool/**
- tests/modules/dastool/dastool/** - tests/modules/dastool/dastool/**
dastool/fastatocontig2bin:
- modules/dastool/fastatocontig2bin/**
- tests/modules/dastool/fastatocontig2bin/**
dastool/scaffolds2bin: dastool/scaffolds2bin:
- modules/dastool/scaffolds2bin/** - modules/dastool/scaffolds2bin/**
- tests/modules/dastool/scaffolds2bin/** - tests/modules/dastool/scaffolds2bin/**
@ -811,6 +815,10 @@ gstama/merge:
- modules/gstama/merge/** - modules/gstama/merge/**
- tests/modules/gstama/merge/** - tests/modules/gstama/merge/**
gstama/polyacleanup:
- modules/gstama/polyacleanup/**
- tests/modules/gstama/polyacleanup/**
gtdbtk/classifywf: gtdbtk/classifywf:
- modules/gtdbtk/classifywf/** - modules/gtdbtk/classifywf/**
- tests/modules/gtdbtk/classifywf/** - tests/modules/gtdbtk/classifywf/**
@ -1299,6 +1307,10 @@ peddy:
- modules/peddy/** - modules/peddy/**
- tests/modules/peddy/** - tests/modules/peddy/**
phantompeakqualtools:
- modules/phantompeakqualtools/**
- tests/modules/phantompeakqualtools/**
phyloflash: phyloflash:
- modules/phyloflash/** - modules/phyloflash/**
- tests/modules/phyloflash/** - tests/modules/phyloflash/**
@ -1591,6 +1603,10 @@ seqkit/split2:
- modules/seqkit/split2/** - modules/seqkit/split2/**
- tests/modules/seqkit/split2/** - tests/modules/seqkit/split2/**
seqkit/stats:
- modules/seqkit/stats/**
- tests/modules/seqkit/stats/**
seqsero2: seqsero2:
- modules/seqsero2/** - modules/seqsero2/**
- tests/modules/seqsero2/** - tests/modules/seqsero2/**

View file

@ -7,6 +7,14 @@
- path: output/cat/test.fasta - path: output/cat/test.fasta
md5sum: f44b33a0e441ad58b2d3700270e2dbe2 md5sum: f44b33a0e441ad58b2d3700270e2dbe2
- name: cat unzipped unzipped stub
command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run
tags:
- cat
- cat/cat
files:
- path: output/cat/test.fasta
- name: cat zipped zipped - name: cat zipped zipped
command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config
tags: tags:
@ -15,6 +23,14 @@
files: files:
- path: output/cat/test.gz - path: output/cat/test.gz
- name: cat zipped zipped stub
command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run
tags:
- cat
- cat/cat
files:
- path: output/cat/test.gz
- name: cat zipped unzipped - name: cat zipped unzipped
command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config
tags: tags:
@ -24,6 +40,14 @@
- path: output/cat/cat.txt - path: output/cat/cat.txt
md5sum: c439d3b60e7bc03e8802a451a0d9a5d9 md5sum: c439d3b60e7bc03e8802a451a0d9a5d9
- name: cat zipped unzipped stub
command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run
tags:
- cat
- cat/cat
files:
- path: output/cat/cat.txt
- name: cat unzipped zipped - name: cat unzipped zipped
command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config
tags: tags:
@ -32,6 +56,14 @@
files: files:
- path: output/cat/cat.txt.gz - path: output/cat/cat.txt.gz
- name: cat unzipped zipped stub
command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run
tags:
- cat
- cat/cat
files:
- path: output/cat/cat.txt.gz
- name: cat one file unzipped zipped - name: cat one file unzipped zipped
command: nextflow run ./tests/modules/cat/cat -entry test_cat_one_file_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config command: nextflow run ./tests/modules/cat/cat -entry test_cat_one_file_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config
tags: tags:
@ -39,3 +71,11 @@
- cat/cat - cat/cat
files: files:
- path: output/cat/cat.txt.gz - path: output/cat/cat.txt.gz
- name: cat one file unzipped zipped stub
command: nextflow run ./tests/modules/cat/cat -entry test_cat_one_file_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run
tags:
- cat
- cat/cat
files:
- path: output/cat/cat.txt.gz

View file

@ -4,7 +4,17 @@
- cnvpytor - cnvpytor
- cnvpytor/callcnvs - cnvpytor/callcnvs
files: files:
- path: output/cnvpytor/calls.10000.tsv - path: output/cnvpytor/test.tsv
md5sum: d41d8cd98f00b204e9800998ecf8427e md5sum: d41d8cd98f00b204e9800998ecf8427e
- path: output/cnvpytor/versions.yml - path: output/cnvpytor/versions.yml
md5sum: 5fe6ca3ef5c40f9dbf487f28db237821 md5sum: 0bea08a253fcb2ff0ff79b99df77b9fa
- name: cnvpytor callcnvs test_cnvpytor_callcnvs stub
command: nextflow run tests/modules/cnvpytor/callcnvs -entry test_cnvpytor_callcnvs -c tests/config/nextflow.config -stub-run
tags:
- cnvpytor
- cnvpytor/callcnvs
files:
- path: output/cnvpytor/test.tsv
- path: output/cnvpytor/versions.yml
md5sum: 0bea08a253fcb2ff0ff79b99df77b9fa

View file

@ -7,4 +7,14 @@
- path: output/cnvpytor/test.pytor - path: output/cnvpytor/test.pytor
md5sum: aa03a8fa15b39f77816705a48e10312a md5sum: aa03a8fa15b39f77816705a48e10312a
- path: output/cnvpytor/versions.yml - path: output/cnvpytor/versions.yml
md5sum: 9a4b176afd5f1a3edeb37eeb301cf464 md5sum: 0f4d75c4f3a3eb26c22616d12b0b78b2
- name: cnvpytor histogram test_cnvpytor_histogram stub
command: nextflow run tests/modules/cnvpytor/histogram -entry test_cnvpytor_histogram -c tests/config/nextflow.config -stub-run
tags:
- cnvpytor
- cnvpytor/histogram
files:
- path: output/cnvpytor/test.pytor
- path: output/cnvpytor/versions.yml
md5sum: 0f4d75c4f3a3eb26c22616d12b0b78b2

View file

@ -8,5 +8,5 @@ process {
} }
params { params {
cnvpytor_chr = '' // specifies chromosome name(s) the same way as they are described in the sam/bam/cram header e.g. '1 2' or 'chr1 chr2'. cnvpytor_chr = null // specifies chromosome name(s) the same way as they are described in the sam/bam/cram header e.g. '1 2' or 'chr1 chr2'.
} }

View file

@ -0,0 +1,39 @@
- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth
command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth -c tests/config/nextflow.config
tags:
- cnvpytor
- cnvpytor/importreaddepth
files:
- path: output/cnvpytor/test.pytor
- path: output/cnvpytor/versions.yml
md5sum: 5834495324c08a37f3fd73ccdd881dc8
- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth stub
command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth -c tests/config/nextflow.config -stub-run
tags:
- cnvpytor
- cnvpytor/importreaddepth
files:
- path: output/cnvpytor/test.pytor
- path: output/cnvpytor/versions.yml
md5sum: 5834495324c08a37f3fd73ccdd881dc8
- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth_cram
command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth_cram -c tests/config/nextflow.config
tags:
- cnvpytor
- cnvpytor/importreaddepth
files:
- path: output/cnvpytor/test.pytor
- path: output/cnvpytor/versions.yml
md5sum: dfa0afb0982d985b96d1633f71ebb82a
- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth_cram stub
command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth_cram -c tests/config/nextflow.config -stub-run
tags:
- cnvpytor
- cnvpytor/importreaddepth
files:
- path: output/cnvpytor/test.pytor
- path: output/cnvpytor/versions.yml
md5sum: dfa0afb0982d985b96d1633f71ebb82a

View file

@ -7,4 +7,14 @@
- path: output/cnvpytor/test.pytor - path: output/cnvpytor/test.pytor
md5sum: aa03a8fa15b39f77816705a48e10312a md5sum: aa03a8fa15b39f77816705a48e10312a
- path: output/cnvpytor/versions.yml - path: output/cnvpytor/versions.yml
md5sum: 8a04506554c58cd170cc050fd9904c6f md5sum: 7fd6ec952a316463bcd324f176b46b64
- name: cnvpytor partition test_cnvpytor_partition stub
command: nextflow run tests/modules/cnvpytor/partition -entry test_cnvpytor_partition -c tests/config/nextflow.config -stub-run
tags:
- cnvpytor
- cnvpytor/partition
files:
- path: output/cnvpytor/test.pytor
- path: output/cnvpytor/versions.yml
md5sum: 7fd6ec952a316463bcd324f176b46b64

View file

@ -3,7 +3,7 @@ nextflow.enable.dsl = 2
include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf'
include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf'
include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' include { DASTOOL_FASTATOCONTIG2BIN } from '../../../../modules/dastool/fastatocontig2bin/main.nf'
include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf'
workflow test_dastool_dastool { workflow test_dastool_dastool {
@ -21,13 +21,13 @@ workflow test_dastool_dastool {
METABAT2_METABAT2 ( input_metabat2 ) METABAT2_METABAT2 ( input_metabat2 )
DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") DASTOOL_FASTATOCONTIG2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa")
Channel.of([ [ id:'test', single_end:false ], // meta map Channel.of([ [ id:'test', single_end:false ], // meta map
file(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)]) file(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)])
.join(DASTOOL_SCAFFOLDS2BIN.out.scaffolds2bin) .join( DASTOOL_FASTATOCONTIG2BIN.out.fastatocontig2bin )
.set {input_dastool} .set {input_dastool}
DASTOOL_DASTOOL ( input_dastool, [], [], [] ) DASTOOL_DASTOOL ( input_dastool, [], [] )
} }

View file

@ -1,29 +1,28 @@
- name: dastool dastool test_dastool_dastool - name: dastool dastool test_dastool_dastool
command: nextflow run ./tests/modules/dastool/dastool -entry test_dastool_dastool -c ./tests/config/nextflow.config -c ./tests/modules/dastool/dastool/nextflow.config command: nextflow run tests/modules/dastool/dastool -entry test_dastool_dastool -c tests/config/nextflow.config
tags: tags:
- dastool
- dastool/dastool - dastool/dastool
- dastool
files: files:
- path: output/dastool/test.seqlength - path: output/dastool/test.seqlength
md5sum: b815a5811008c36808a59b1d0dcfab24 md5sum: b815a5811008c36808a59b1d0dcfab24
- path: output/dastool/test.tsv - path: output/dastool/test.tsv
md5sum: 6e46c0be14dded7cb13af38f54feea47 md5sum: 6e46c0be14dded7cb13af38f54feea47
- path: output/dastool/test_DASTool.log - path: output/dastool/test_DASTool.log
contains: - path: output/dastool/test_DASTool_contig2bin.tsv
- "DAS Tool run on"
- path: output/dastool/test_DASTool_scaffolds2bin.txt
md5sum: 6e46c0be14dded7cb13af38f54feea47 md5sum: 6e46c0be14dded7cb13af38f54feea47
- path: output/dastool/test_DASTool_summary.txt - path: output/dastool/test_DASTool_summary.tsv
md5sum: a3efa8717b30dfada78dc5ae9a3dc396 md5sum: ab9dd3709a59a69bc66030b9e0ff3d5b
- path: output/dastool/test_proteins.faa
- path: output/dastool/test_proteins.faa.all.b6
md5sum: 39c11237ef22ac73109aaac267e185d0
- path: output/dastool/test_proteins.faa.archaea.scg - path: output/dastool/test_proteins.faa.archaea.scg
md5sum: e79d82eecee25821d1658ea4f082601d md5sum: e79d82eecee25821d1658ea4f082601d
- path: output/dastool/test_proteins.faa.bacteria.scg - path: output/dastool/test_proteins.faa.bacteria.scg
md5sum: 8132cfb17cf398d41c036ead55c96ffe md5sum: 8132cfb17cf398d41c036ead55c96ffe
- path: output/dastool/test_test.tsv.eval - path: output/dastool/test_proteins.faa.findSCG.b6
md5sum: a3efa8717b30dfada78dc5ae9a3dc396 md5sum: 48e90e12cd6c88d00608777dbc48a82a
- path: output/metabat2/bins/test.1.fa.gz - path: output/dastool/test_proteins.faa.scg.candidates.faa
md5sum: 2b297bf557cc3831b800348859331268 md5sum: d94b7bed0f8aa9cf2824d72c548c537c
- path: output/metabat2/test.tsv.gz - path: output/dastool/versions.yml
md5sum: 619338fa5019e361d5545ce385a6961f md5sum: 004e04c6a38652df2e0c59c44e29c9de
- path: output/metabat2/test.txt.gz
md5sum: 745a0446af6ef68b930975e9ce5a95d6

View file

@ -0,0 +1,48 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { GUNZIP } from '../../../../modules/gunzip/main.nf'
include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf'
include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf'
include { DASTOOL_FASTATOCONTIG2BIN } from '../../../../modules/dastool/fastatocontig2bin/main.nf'
workflow test_dastool_fastatocontig2bin {
input_depth = [ [ id:'test', single_end:false ], // meta map
file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ]
METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth )
Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)
.map { it -> [[ id:'test', single_end:false ], it] }
.join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth)
.set { input_metabat2 }
METABAT2_METABAT2 ( input_metabat2 )
DASTOOL_FASTATOCONTIG2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa")
}
workflow test_dastool_fastatocontig2bin_ungzipped {
input_depth = [ [ id:'test', single_end:false ], // meta map
file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ]
METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth )
Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)
.map { it -> [[ id:'test', single_end:false ], it] }
.join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth)
.set { input_metabat2 }
METABAT2_METABAT2 ( input_metabat2 )
// TODO test unzipped input files
ch_input_2_fastatocontig2bin = GUNZIP( METABAT2_METABAT2.out.fasta ).gunzip
DASTOOL_FASTATOCONTIG2BIN ( ch_input_2_fastatocontig2bin, "fa")
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,20 @@
- name: dastool fastatocontig2bin test_dastool_fastatocontig2bin
command: nextflow run tests/modules/dastool/fastatocontig2bin -entry test_dastool_fastatocontig2bin -c tests/config/nextflow.config
tags:
- dastool
- dastool/fastatocontig2bin
files:
- path: output/dastool/test.tsv
md5sum: 6e46c0be14dded7cb13af38f54feea47
- path: output/dastool/versions.yml
md5sum: ff4b6f14bee4548bf09b5e602c306595
- name: dastool fastatocontig2bin test_dastool_fastatocontig2bin_ungzipped
command: nextflow run tests/modules/dastool/fastatocontig2bin -entry test_dastool_fastatocontig2bin_ungzipped -c tests/config/nextflow.config
tags:
- dastool
- dastool/fastatocontig2bin
files:
- path: output/dastool/test.tsv
md5sum: 6e46c0be14dded7cb13af38f54feea47
- path: output/dastool/versions.yml

View file

@ -0,0 +1,15 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { GSTAMA_POLYACLEANUP } from '../../../../modules/gstama/polyacleanup/main.nf'
workflow test_gstama_polyacleanup {
input = [
[ id:'test' ], // meta map
file(params.test_data['homo_sapiens']['genome']['transcriptome_fasta'], checkIfExists: true)
]
GSTAMA_POLYACLEANUP ( input )
}

View file

@ -0,0 +1,6 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
ext.prefix = { "${meta.id}_tama" }
}

View file

@ -0,0 +1,14 @@
- name: gstama polyacleanup test_gstama_polyacleanup
command: nextflow run tests/modules/gstama/polyacleanup -entry test_gstama_polyacleanup -c tests/config/nextflow.config
tags:
- gstama
- gstama/polyacleanup
files:
- path: output/gstama/test_tama.fa.gz
md5sum: 9c768387478e5f966a42c369c0270b09
- path: output/gstama/test_tama_polya_flnc_report.txt.gz
md5sum: fe3606979ed11538aacd83159f4cff03
- path: output/gstama/test_tama_tails.fa.gz
md5sum: ba21256c0afe0bda71b3ee66b4c761bf
- path: output/gstama/versions.yml
md5sum: 07ebb812ae13a350d955fab7600b2542

View file

@ -7,28 +7,30 @@ include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf'
workflow test_manta_germline { workflow test_manta_germline {
input = [ input = [
[ id:'test'], // meta map [ id:'test'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)]
[],[]
] ]
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
bed = [[],[]]
MANTA_GERMLINE ( input, fasta, fai ) MANTA_GERMLINE ( input, fasta, fai, bed )
} }
workflow test_manta_germline_target_bed { workflow test_manta_germline_target_bed {
input = [ input = [
[ id:'test'], // meta map [ id:'test'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)]
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true),
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true)
] ]
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
bed = [
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true),
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true),
]
MANTA_GERMLINE ( input, fasta, fai ) MANTA_GERMLINE ( input, fasta, fai, bed )
} }
workflow test_manta_germline_target_bed_jointcalling { workflow test_manta_germline_target_bed_jointcalling {
@ -37,12 +39,14 @@ workflow test_manta_germline_target_bed_jointcalling {
[file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true),
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true)], file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true)],
[file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true),
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),], file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),]
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true),
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true)
] ]
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
bed = [
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true),
file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true),
]
MANTA_GERMLINE ( input, fasta, fai ) MANTA_GERMLINE ( input, fasta, fai, bed )
} }

View file

@ -0,0 +1,25 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { PHANTOMPEAKQUALTOOLS } from '../../../modules/phantompeakqualtools/main.nf'
workflow test_phantompeakqualtools_single_end {
input = [
[ id:'test', single_end:true ], // meta map
file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true)
]
PHANTOMPEAKQUALTOOLS ( input )
}
workflow test_phantompeakqualtools_paired_end {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true)
]
PHANTOMPEAKQUALTOOLS ( input )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,23 @@
- name: phantompeakqualtools test_phantompeakqualtools_single_end
command: nextflow run tests/modules/phantompeakqualtools -entry test_phantompeakqualtools_single_end -c tests/config/nextflow.config
tags:
- phantompeakqualtools
files:
- path: output/phantompeakqualtools/test.spp.Rdata
- path: output/phantompeakqualtools/test.spp.out
md5sum: b01d976506b6fe45b66c821b1e8a1d15
- path: output/phantompeakqualtools/test.spp.pdf
- path: output/phantompeakqualtools/versions.yml
md5sum: 6c2ede1aac4c574e3c72fbe09f15c03f
- name: phantompeakqualtools test_phantompeakqualtools_paired_end
command: nextflow run tests/modules/phantompeakqualtools -entry test_phantompeakqualtools_paired_end -c tests/config/nextflow.config
tags:
- phantompeakqualtools
files:
- path: output/phantompeakqualtools/test.spp.Rdata
- path: output/phantompeakqualtools/test.spp.out
md5sum: eed46e75eab119224f397a7a8b5924e6
- path: output/phantompeakqualtools/test.spp.pdf
- path: output/phantompeakqualtools/versions.yml
md5sum: 383d2dd583fcb40451bde0d3840bdb72

View file

@ -0,0 +1,58 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { SEQKIT_STATS } from '../../../../modules/seqkit/stats/main.nf'
workflow test_seqkit_stats_single_end {
input = [
[ id:'test', single_end:true ], // meta map
file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)
]
SEQKIT_STATS ( input )
}
workflow test_seqkit_stats_paired_end {
input = [
[ id:'test', single_end:false ], // meta map
[
file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)
]
]
SEQKIT_STATS ( input )
}
workflow test_seqkit_stats_nanopore {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true),
]
SEQKIT_STATS ( input )
}
workflow test_seqkit_stats_genome_fasta {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true),
]
SEQKIT_STATS ( input )
}
workflow test_seqkit_stats_transcriptome_fasta {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true),
]
SEQKIT_STATS ( input )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,54 @@
- name: seqkit stats test_seqkit_stats_single_end
command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_single_end -c tests/config/nextflow.config
tags:
- seqkit/stats
- seqkit
files:
- path: output/seqkit/test.tsv
md5sum: e23227d089a7e04b0ec0cb547c4aadff
- path: output/seqkit/versions.yml
md5sum: d67f0c16feb9df77b11f6c91bbdf9926
- name: seqkit stats test_seqkit_stats_paired_end
command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_paired_end -c tests/config/nextflow.config
tags:
- seqkit/stats
- seqkit
files:
- path: output/seqkit/test.tsv
md5sum: 9de20dc39fb01285e3f0c382fda9db52
- path: output/seqkit/versions.yml
md5sum: bd8881933b953d07f2600e2e6a88ebf3
- name: seqkit stats test_seqkit_stats_nanopore
command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_nanopore -c tests/config/nextflow.config
tags:
- seqkit/stats
- seqkit
files:
- path: output/seqkit/test.tsv
md5sum: 5da1709eb5ae64fa3b2d624bffe2e7aa
- path: output/seqkit/versions.yml
md5sum: 565632701fbe048f7ba99f1865bd48ca
- name: seqkit stats test_seqkit_stats_genome_fasta
command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_genome_fasta -c tests/config/nextflow.config
tags:
- seqkit/stats
- seqkit
files:
- path: output/seqkit/test.tsv
md5sum: f64489767a4e769539ef3faf83260184
- path: output/seqkit/versions.yml
md5sum: 782fcdeaa922c8bb532ffa5808849d87
- name: seqkit stats test_seqkit_stats_transcriptome_fasta
command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_transcriptome_fasta -c tests/config/nextflow.config
tags:
- seqkit/stats
- seqkit
files:
- path: output/seqkit/test.tsv
md5sum: fbb975b665a08c8862fcd1268613a945
- path: output/seqkit/versions.yml
md5sum: db99b016d986d26102ec398264a58410