mirror of
https://github.com/MillironX/nf-core_modules.git
synced 2024-11-10 20:23:10 +00:00
Merge branch 'master' into tool/elprep-merge
This commit is contained in:
commit
a7ed1477ee
38 changed files with 448 additions and 56 deletions
|
@ -7,8 +7,9 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES {
|
||||||
'quay.io/biocontainers/antismash-lite:6.0.1--pyhdfd78af_1' }"
|
'quay.io/biocontainers/antismash-lite:6.0.1--pyhdfd78af_1' }"
|
||||||
|
|
||||||
/*
|
/*
|
||||||
These files are normally downloaded by download-antismash-databases itself, and must be retrieved for input by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. This is solely for use for CI tests of the nf-core/module version of antiSMASH.
|
These files are normally downloaded/created by download-antismash-databases itself, and must be retrieved for input by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. This is solely for use for CI tests of the nf-core/module version of antiSMASH.
|
||||||
Reason: Upon execution, the tool checks if certain database files are present within the container and if not, it tries to create them in /usr/local/bin, for which only root user has write permissions. Mounting those database files with this module prevents the tool from trying to create them.
|
Reason: Upon execution, the tool checks if certain database files are present within the container and if not, it tries to create them in /usr/local/bin, for which only root user has write permissions. Mounting those database files with this module prevents the tool from trying to create them.
|
||||||
|
These files are also emitted as output channels in this module to enable the antismash-lite module to use them as mount volumes to the docker/singularity containers.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
containerOptions {
|
containerOptions {
|
||||||
|
@ -26,6 +27,9 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES {
|
||||||
|
|
||||||
output:
|
output:
|
||||||
path("antismash_db") , emit: database
|
path("antismash_db") , emit: database
|
||||||
|
path("css"), emit: css_dir
|
||||||
|
path("detection"), emit: detection_dir
|
||||||
|
path("modules"), emit: modules_dir
|
||||||
path "versions.yml", emit: versions
|
path "versions.yml", emit: versions
|
||||||
|
|
||||||
when:
|
when:
|
||||||
|
@ -40,7 +44,7 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES {
|
||||||
|
|
||||||
cat <<-END_VERSIONS > versions.yml
|
cat <<-END_VERSIONS > versions.yml
|
||||||
"${task.process}":
|
"${task.process}":
|
||||||
antismash: \$(antismash --version | sed 's/antiSMASH //')
|
antismash-lite: \$(antismash --version | sed 's/antiSMASH //')
|
||||||
END_VERSIONS
|
END_VERSIONS
|
||||||
"""
|
"""
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,17 +27,17 @@ input:
|
||||||
- database_css:
|
- database_css:
|
||||||
type: directory
|
type: directory
|
||||||
description: |
|
description: |
|
||||||
antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
||||||
pattern: "css"
|
pattern: "css"
|
||||||
- database_detection:
|
- database_detection:
|
||||||
type: directory
|
type: directory
|
||||||
description: |
|
description: |
|
||||||
antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
||||||
pattern: "detection"
|
pattern: "detection"
|
||||||
- database_modules:
|
- database_modules:
|
||||||
type: directory
|
type: directory
|
||||||
description: |
|
description: |
|
||||||
antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
||||||
pattern: "modules"
|
pattern: "modules"
|
||||||
|
|
||||||
output:
|
output:
|
||||||
|
@ -50,6 +50,21 @@ output:
|
||||||
type: directory
|
type: directory
|
||||||
description: Download directory for antiSMASH databases
|
description: Download directory for antiSMASH databases
|
||||||
pattern: "antismash_db"
|
pattern: "antismash_db"
|
||||||
|
- css_dir:
|
||||||
|
type: directory
|
||||||
|
description: |
|
||||||
|
antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
||||||
|
pattern: "css"
|
||||||
|
- detection_dir:
|
||||||
|
type: directory
|
||||||
|
description: |
|
||||||
|
antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
||||||
|
pattern: "detection"
|
||||||
|
- modules_dir:
|
||||||
|
type: directory
|
||||||
|
description: |
|
||||||
|
antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines.
|
||||||
|
pattern: "modules"
|
||||||
|
|
||||||
authors:
|
authors:
|
||||||
- "@jasmezz"
|
- "@jasmezz"
|
||||||
|
|
|
@ -2,10 +2,10 @@ process CUSTOM_GETCHROMSIZES {
|
||||||
tag "$fasta"
|
tag "$fasta"
|
||||||
label 'process_low'
|
label 'process_low'
|
||||||
|
|
||||||
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
|
conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null)
|
||||||
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
||||||
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
|
'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' :
|
||||||
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
|
'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }"
|
||||||
|
|
||||||
input:
|
input:
|
||||||
path fasta
|
path fasta
|
||||||
|
|
|
@ -17,7 +17,7 @@ process GATK4_HAPLOTYPECALLER {
|
||||||
|
|
||||||
output:
|
output:
|
||||||
tuple val(meta), path("*.vcf.gz"), emit: vcf
|
tuple val(meta), path("*.vcf.gz"), emit: vcf
|
||||||
tuple val(meta), path("*.tbi") , emit: tbi
|
tuple val(meta), path("*.tbi") , optional:true, emit: tbi
|
||||||
path "versions.yml" , emit: versions
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
when:
|
when:
|
||||||
|
|
|
@ -8,7 +8,7 @@ process GATK4_SPLITNCIGARREADS {
|
||||||
'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }"
|
'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }"
|
||||||
|
|
||||||
input:
|
input:
|
||||||
tuple val(meta), path(bam)
|
tuple val(meta), path(bam), path(bai), path(intervals)
|
||||||
path fasta
|
path fasta
|
||||||
path fai
|
path fai
|
||||||
path dict
|
path dict
|
||||||
|
@ -23,6 +23,7 @@ process GATK4_SPLITNCIGARREADS {
|
||||||
script:
|
script:
|
||||||
def args = task.ext.args ?: ''
|
def args = task.ext.args ?: ''
|
||||||
def prefix = task.ext.prefix ?: "${meta.id}"
|
def prefix = task.ext.prefix ?: "${meta.id}"
|
||||||
|
def interval_command = intervals ? "--intervals $intervals" : ""
|
||||||
|
|
||||||
def avail_mem = 3
|
def avail_mem = 3
|
||||||
if (!task.memory) {
|
if (!task.memory) {
|
||||||
|
@ -35,6 +36,7 @@ process GATK4_SPLITNCIGARREADS {
|
||||||
--input $bam \\
|
--input $bam \\
|
||||||
--output ${prefix}.bam \\
|
--output ${prefix}.bam \\
|
||||||
--reference $fasta \\
|
--reference $fasta \\
|
||||||
|
$interval_command \\
|
||||||
--tmp-dir . \\
|
--tmp-dir . \\
|
||||||
$args
|
$args
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,13 @@ input:
|
||||||
type: list
|
type: list
|
||||||
description: BAM/SAM/CRAM file containing reads
|
description: BAM/SAM/CRAM file containing reads
|
||||||
pattern: "*.{bam,sam,cram}"
|
pattern: "*.{bam,sam,cram}"
|
||||||
|
- bai:
|
||||||
|
type: list
|
||||||
|
description: BAI/SAI/CRAI index file (optional)
|
||||||
|
pattern: "*.{bai,sai,crai}"
|
||||||
|
- intervals:
|
||||||
|
type: file
|
||||||
|
description: Bed file with the genomic regions included in the library (optional)
|
||||||
- fasta:
|
- fasta:
|
||||||
type: file
|
type: file
|
||||||
description: The reference fasta file
|
description: The reference fasta file
|
||||||
|
|
34
modules/krona/ktimporttext/main.nf
Normal file
34
modules/krona/ktimporttext/main.nf
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
process KRONA_KTIMPORTTEXT {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::krona=2.8.1" : null)
|
||||||
|
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
||||||
|
'https://depot.galaxyproject.org/singularity/krona:2.8.1--pl5321hdfd78af_1':
|
||||||
|
'quay.io/biocontainers/krona:2.8.1--pl5321hdfd78af_1' }"
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(report)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path ('*.html'), emit: html
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
when:
|
||||||
|
task.ext.when == null || task.ext.when
|
||||||
|
|
||||||
|
script:
|
||||||
|
def args = task.ext.args ?: ''
|
||||||
|
def prefix = task.ext.prefix ?: "${meta.id}"
|
||||||
|
"""
|
||||||
|
ktImportText \\
|
||||||
|
$args \\
|
||||||
|
-o ${prefix}.html \\
|
||||||
|
$report
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
"${task.process}":
|
||||||
|
krona: \$( echo \$(ktImportText 2>&1) | sed 's/^.*KronaTools //g; s/- ktImportText.*\$//g')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
47
modules/krona/ktimporttext/meta.yml
Normal file
47
modules/krona/ktimporttext/meta.yml
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
name: "krona_ktimporttext"
|
||||||
|
description: Creates a Krona chart from text files listing quantities and lineages.
|
||||||
|
keywords:
|
||||||
|
- plot
|
||||||
|
- taxonomy
|
||||||
|
- interactive
|
||||||
|
- html
|
||||||
|
- visualisation
|
||||||
|
- krona chart
|
||||||
|
- metagenomics
|
||||||
|
tools:
|
||||||
|
- krona:
|
||||||
|
description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files.
|
||||||
|
homepage: https://github.com/marbl/Krona/wiki/KronaTools
|
||||||
|
documentation: http://manpages.ubuntu.com/manpages/impish/man1/ktImportTaxonomy.1.html
|
||||||
|
tool_dev_url: https://github.com/marbl/Krona
|
||||||
|
doi: 10.1186/1471-2105-12-385
|
||||||
|
licence: https://raw.githubusercontent.com/marbl/Krona/master/KronaTools/LICENSE.txt
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test']
|
||||||
|
- report:
|
||||||
|
type: file
|
||||||
|
description: "Tab-delimited text file. Each line should be a number followed by a list of wedges to contribute to (starting from the highest level). If no wedges are listed (and just a quantity is given), it will contribute to the top level. If the same lineage is listed more than once, the values will be added. Quantities can be omitted if -q is specified. Lines beginning with '#' will be ignored."
|
||||||
|
pattern: "*.{txt}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test' ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- html:
|
||||||
|
type: file
|
||||||
|
description: A html file containing an interactive krona plot.
|
||||||
|
pattern: "*.{html}"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@jianhong"
|
|
@ -2,18 +2,22 @@ process MINIMAP2_ALIGN {
|
||||||
tag "$meta.id"
|
tag "$meta.id"
|
||||||
label 'process_medium'
|
label 'process_medium'
|
||||||
|
|
||||||
conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null)
|
conda (params.enable_conda ? 'bioconda::minimap2=2.21 bioconda::samtools=1.12' : null)
|
||||||
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
||||||
'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' :
|
'https://depot.galaxyproject.org/singularity/mulled-v2-66534bcbb7031a148b13e2ad42583020b9cd25c4:1679e915ddb9d6b4abda91880c4b48857d471bd8-0' :
|
||||||
'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }"
|
'quay.io/biocontainers/mulled-v2-66534bcbb7031a148b13e2ad42583020b9cd25c4:1679e915ddb9d6b4abda91880c4b48857d471bd8-0' }"
|
||||||
|
|
||||||
input:
|
input:
|
||||||
tuple val(meta), path(reads)
|
tuple val(meta), path(reads)
|
||||||
path reference
|
path reference
|
||||||
|
val bam_format
|
||||||
|
val cigar_paf_format
|
||||||
|
val cigar_bam
|
||||||
|
|
||||||
output:
|
output:
|
||||||
tuple val(meta), path("*.paf"), emit: paf
|
tuple val(meta), path("*.paf"), optional: true, emit: paf
|
||||||
path "versions.yml" , emit: versions
|
tuple val(meta), path("*.bam"), optional: true, emit: bam
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
when:
|
when:
|
||||||
task.ext.when == null || task.ext.when
|
task.ext.when == null || task.ext.when
|
||||||
|
@ -22,13 +26,19 @@ process MINIMAP2_ALIGN {
|
||||||
def args = task.ext.args ?: ''
|
def args = task.ext.args ?: ''
|
||||||
def prefix = task.ext.prefix ?: "${meta.id}"
|
def prefix = task.ext.prefix ?: "${meta.id}"
|
||||||
def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}"
|
def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}"
|
||||||
|
def bam_output = bam_format ? "-a | samtools sort | samtools view -@ ${task.cpus} -b -h -o ${prefix}.bam" : "-o ${prefix}.paf"
|
||||||
|
def cigar_paf = cigar_paf_format && !sam_format ? "-c" : ''
|
||||||
|
def set_cigar_bam = cigar_bam && sam_format ? "-L" : ''
|
||||||
"""
|
"""
|
||||||
minimap2 \\
|
minimap2 \\
|
||||||
$args \\
|
$args \\
|
||||||
-t $task.cpus \\
|
-t $task.cpus \\
|
||||||
$reference \\
|
$reference \\
|
||||||
$input_reads \\
|
$input_reads \\
|
||||||
> ${prefix}.paf
|
$cigar_paf \\
|
||||||
|
$set_cigar_bam \\
|
||||||
|
$bam_output
|
||||||
|
|
||||||
|
|
||||||
cat <<-END_VERSIONS > versions.yml
|
cat <<-END_VERSIONS > versions.yml
|
||||||
"${task.process}":
|
"${task.process}":
|
||||||
|
|
|
@ -29,6 +29,17 @@ input:
|
||||||
type: file
|
type: file
|
||||||
description: |
|
description: |
|
||||||
Reference database in FASTA format.
|
Reference database in FASTA format.
|
||||||
|
- bam_format:
|
||||||
|
type: boolean
|
||||||
|
description: Specify that output should be in BAM format
|
||||||
|
- cigar_paf_format:
|
||||||
|
type: boolean
|
||||||
|
description: Specify that output CIGAR should be in PAF format
|
||||||
|
- cigar_bam:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
Write CIGAR with >65535 ops at the CG tag. This is recommended when
|
||||||
|
doing XYZ (https://github.com/lh3/minimap2#working-with-65535-cigar-operations)
|
||||||
output:
|
output:
|
||||||
- meta:
|
- meta:
|
||||||
type: map
|
type: map
|
||||||
|
@ -39,9 +50,16 @@ output:
|
||||||
type: file
|
type: file
|
||||||
description: Alignment in PAF format
|
description: Alignment in PAF format
|
||||||
pattern: "*.paf"
|
pattern: "*.paf"
|
||||||
|
- bam:
|
||||||
|
type: file
|
||||||
|
description: Alignment in BAM format
|
||||||
|
pattern: "*.bam"
|
||||||
- versions:
|
- versions:
|
||||||
type: file
|
type: file
|
||||||
description: File containing software versions
|
description: File containing software versions
|
||||||
pattern: "versions.yml"
|
pattern: "versions.yml"
|
||||||
authors:
|
authors:
|
||||||
- "@heuermh"
|
- "@heuermh"
|
||||||
|
- "@sofstam"
|
||||||
|
- "@sateeshperi"
|
||||||
|
- "@jfy133"
|
||||||
|
|
|
@ -22,11 +22,12 @@ process PHANTOMPEAKQUALTOOLS {
|
||||||
task.ext.when == null || task.ext.when
|
task.ext.when == null || task.ext.when
|
||||||
|
|
||||||
script:
|
script:
|
||||||
def args = task.ext.args ?: ''
|
def args = task.ext.args ?: ''
|
||||||
|
def args2 = task.ext.args2 ?: ''
|
||||||
def prefix = task.ext.prefix ?: "${meta.id}"
|
def prefix = task.ext.prefix ?: "${meta.id}"
|
||||||
"""
|
"""
|
||||||
RUN_SPP=`which run_spp.R`
|
RUN_SPP=`which run_spp.R`
|
||||||
Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out"
|
Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" $args2
|
||||||
|
|
||||||
cat <<-END_VERSIONS > versions.yml
|
cat <<-END_VERSIONS > versions.yml
|
||||||
"${task.process}":
|
"${task.process}":
|
||||||
|
|
|
@ -2,10 +2,10 @@ process RSEM_CALCULATEEXPRESSION {
|
||||||
tag "$meta.id"
|
tag "$meta.id"
|
||||||
label 'process_high'
|
label 'process_high'
|
||||||
|
|
||||||
conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null)
|
conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.10a" : null)
|
||||||
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
||||||
'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' :
|
'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' :
|
||||||
'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }"
|
'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' }"
|
||||||
|
|
||||||
input:
|
input:
|
||||||
tuple val(meta), path(reads)
|
tuple val(meta), path(reads)
|
||||||
|
|
|
@ -2,10 +2,10 @@ process RSEM_PREPAREREFERENCE {
|
||||||
tag "$fasta"
|
tag "$fasta"
|
||||||
label 'process_high'
|
label 'process_high'
|
||||||
|
|
||||||
conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null)
|
conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.10a" : null)
|
||||||
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
||||||
'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' :
|
'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' :
|
||||||
'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }"
|
'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' }"
|
||||||
|
|
||||||
input:
|
input:
|
||||||
path fasta, stageAs: "rsem/*"
|
path fasta, stageAs: "rsem/*"
|
||||||
|
|
35
modules/samtools/bamtocram/main.nf
Normal file
35
modules/samtools/bamtocram/main.nf
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
//There is a -L option to only output alignments in interval, might be an option for exons/panel data?
|
||||||
|
process SAMTOOLS_BAMTOCRAM {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_medium'
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null)
|
||||||
|
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
||||||
|
'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' :
|
||||||
|
'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }"
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(input), path(index)
|
||||||
|
path fasta
|
||||||
|
path fai
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.cram"), path("*.crai"), emit: cram_crai
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
when:
|
||||||
|
task.ext.when == null || task.ext.when
|
||||||
|
|
||||||
|
script:
|
||||||
|
def args = task.ext.args ?: ''
|
||||||
|
def prefix = task.ext.prefix ?: "${meta.id}"
|
||||||
|
"""
|
||||||
|
samtools view --threads ${task.cpus} --reference ${fasta} -C $args $input > ${prefix}.cram
|
||||||
|
samtools index -@${task.cpus} ${prefix}.cram
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
"${task.process}":
|
||||||
|
samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
52
modules/samtools/bamtocram/meta.yml
Normal file
52
modules/samtools/bamtocram/meta.yml
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
name: samtools_bamtocram
|
||||||
|
description: filter/convert and then index CRAM file
|
||||||
|
keywords:
|
||||||
|
- view
|
||||||
|
- index
|
||||||
|
- bam
|
||||||
|
- cram
|
||||||
|
tools:
|
||||||
|
- samtools:
|
||||||
|
description: |
|
||||||
|
SAMtools is a set of utilities for interacting with and post-processing
|
||||||
|
short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li.
|
||||||
|
These files are generated as output by short read aligners like BWA.
|
||||||
|
homepage: http://www.htslib.org/
|
||||||
|
documentation: hhttp://www.htslib.org/doc/samtools.html
|
||||||
|
doi: 10.1093/bioinformatics/btp352
|
||||||
|
licence: ["MIT"]
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- input:
|
||||||
|
type: file
|
||||||
|
description: BAM/SAM file
|
||||||
|
pattern: "*.{bam,sam}"
|
||||||
|
- index:
|
||||||
|
type: file
|
||||||
|
description: BAM/SAM index file
|
||||||
|
pattern: "*.{bai,sai}"
|
||||||
|
- fasta:
|
||||||
|
type: file
|
||||||
|
description: Reference file to create the CRAM file
|
||||||
|
pattern: "*.{fasta,fa}"
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- cram_crai:
|
||||||
|
type: file
|
||||||
|
description: filtered/converted CRAM file + index
|
||||||
|
pattern: "*{.cram,.crai}"
|
||||||
|
- version:
|
||||||
|
type: file
|
||||||
|
description: File containing software version
|
||||||
|
pattern: "*.{version.txt}"
|
||||||
|
authors:
|
||||||
|
- "@FriederikeHanssen"
|
||||||
|
- "@maxulysse"
|
|
@ -11,7 +11,8 @@ process TABIX_TABIX {
|
||||||
tuple val(meta), path(tab)
|
tuple val(meta), path(tab)
|
||||||
|
|
||||||
output:
|
output:
|
||||||
tuple val(meta), path("*.tbi"), emit: tbi
|
tuple val(meta), path("*.tbi"), optional:true, emit: tbi
|
||||||
|
tuple val(meta), path("*.csi"), optional:true, emit: csi
|
||||||
path "versions.yml" , emit: versions
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
when:
|
when:
|
||||||
|
|
|
@ -31,6 +31,10 @@ output:
|
||||||
type: file
|
type: file
|
||||||
description: tabix index file
|
description: tabix index file
|
||||||
pattern: "*.{tbi}"
|
pattern: "*.{tbi}"
|
||||||
|
- csi:
|
||||||
|
type: file
|
||||||
|
description: coordinate sorted index file
|
||||||
|
pattern: "*.{csi}"
|
||||||
- versions:
|
- versions:
|
||||||
type: file
|
type: file
|
||||||
description: File containing software versions
|
description: File containing software versions
|
||||||
|
|
|
@ -24,7 +24,7 @@ process TIDDIT_SV {
|
||||||
script:
|
script:
|
||||||
def args = task.ext.args ?: ''
|
def args = task.ext.args ?: ''
|
||||||
def prefix = task.ext.prefix ?: "${meta.id}"
|
def prefix = task.ext.prefix ?: "${meta.id}"
|
||||||
def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : ""
|
def reference = fasta ? "--ref $fasta" : ""
|
||||||
"""
|
"""
|
||||||
tiddit \\
|
tiddit \\
|
||||||
--sv \\
|
--sv \\
|
||||||
|
|
|
@ -11,12 +11,13 @@ process TRIMGALORE {
|
||||||
tuple val(meta), path(reads)
|
tuple val(meta), path(reads)
|
||||||
|
|
||||||
output:
|
output:
|
||||||
tuple val(meta), path("*.fq.gz") , emit: reads
|
tuple val(meta), path("*{trimmed,val}*.fq.gz"), emit: reads
|
||||||
tuple val(meta), path("*report.txt"), emit: log
|
tuple val(meta), path("*report.txt") , emit: log
|
||||||
path "versions.yml" , emit: versions
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
tuple val(meta), path("*.html"), emit: html optional true
|
tuple val(meta), path("*unpaired*.fq.gz") , emit: unpaired, optional: true
|
||||||
tuple val(meta), path("*.zip") , emit: zip optional true
|
tuple val(meta), path("*.html") , emit: html , optional: true
|
||||||
|
tuple val(meta), path("*.zip") , emit: zip , optional: true
|
||||||
|
|
||||||
when:
|
when:
|
||||||
task.ext.when == null || task.ext.when
|
task.ext.when == null || task.ext.when
|
||||||
|
@ -52,6 +53,7 @@ process TRIMGALORE {
|
||||||
$c_r1 \\
|
$c_r1 \\
|
||||||
$tpc_r1 \\
|
$tpc_r1 \\
|
||||||
${prefix}.fastq.gz
|
${prefix}.fastq.gz
|
||||||
|
|
||||||
cat <<-END_VERSIONS > versions.yml
|
cat <<-END_VERSIONS > versions.yml
|
||||||
"${task.process}":
|
"${task.process}":
|
||||||
trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//')
|
trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//')
|
||||||
|
@ -73,6 +75,7 @@ process TRIMGALORE {
|
||||||
$tpc_r2 \\
|
$tpc_r2 \\
|
||||||
${prefix}_1.fastq.gz \\
|
${prefix}_1.fastq.gz \\
|
||||||
${prefix}_2.fastq.gz
|
${prefix}_2.fastq.gz
|
||||||
|
|
||||||
cat <<-END_VERSIONS > versions.yml
|
cat <<-END_VERSIONS > versions.yml
|
||||||
"${task.process}":
|
"${task.process}":
|
||||||
trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//')
|
trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//')
|
||||||
|
|
|
@ -37,6 +37,11 @@ output:
|
||||||
List of input adapter trimmed FastQ files of size 1 and 2 for
|
List of input adapter trimmed FastQ files of size 1 and 2 for
|
||||||
single-end and paired-end data, respectively.
|
single-end and paired-end data, respectively.
|
||||||
pattern: "*.{fq.gz}"
|
pattern: "*.{fq.gz}"
|
||||||
|
- unpaired:
|
||||||
|
type: file
|
||||||
|
description: |
|
||||||
|
FastQ files containing unpaired reads from read 1 or read 2
|
||||||
|
pattern: "*unpaired*.fq.gz"
|
||||||
- html:
|
- html:
|
||||||
type: file
|
type: file
|
||||||
description: FastQC report (optional)
|
description: FastQC report (optional)
|
||||||
|
|
|
@ -1054,6 +1054,10 @@ krona/ktimporttaxonomy:
|
||||||
- modules/krona/ktimporttaxonomy/**
|
- modules/krona/ktimporttaxonomy/**
|
||||||
- tests/modules/krona/ktimporttaxonomy/**
|
- tests/modules/krona/ktimporttaxonomy/**
|
||||||
|
|
||||||
|
krona/ktimporttext:
|
||||||
|
- modules/krona/ktimporttext/**
|
||||||
|
- tests/modules/krona/ktimporttext/**
|
||||||
|
|
||||||
last/dotplot:
|
last/dotplot:
|
||||||
- modules/last/dotplot/**
|
- modules/last/dotplot/**
|
||||||
- tests/modules/last/dotplot/**
|
- tests/modules/last/dotplot/**
|
||||||
|
@ -1599,6 +1603,10 @@ samtools/bam2fq:
|
||||||
- modules/samtools/bam2fq/**
|
- modules/samtools/bam2fq/**
|
||||||
- tests/modules/samtools/bam2fq/**
|
- tests/modules/samtools/bam2fq/**
|
||||||
|
|
||||||
|
samtools/bamtocram:
|
||||||
|
- modules/samtools/bamtocram/**
|
||||||
|
- tests/modules/samtools/bamtocram/**
|
||||||
|
|
||||||
samtools/collatefastq:
|
samtools/collatefastq:
|
||||||
- modules/samtools/collatefastq/**
|
- modules/samtools/collatefastq/**
|
||||||
- tests/modules/samtools/collatefastq/**
|
- tests/modules/samtools/collatefastq/**
|
||||||
|
|
|
@ -109,6 +109,9 @@ params {
|
||||||
|
|
||||||
test_sequencing_summary = "${test_data_dir}/genomics/sarscov2/nanopore/sequencing_summary/test.sequencing_summary.txt"
|
test_sequencing_summary = "${test_data_dir}/genomics/sarscov2/nanopore/sequencing_summary/test.sequencing_summary.txt"
|
||||||
}
|
}
|
||||||
|
'metagenome' {
|
||||||
|
kraken_report = "${test_data_dir}/genomics/sarscov2/metagenome/test_1.kraken2.report.txt"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
'homo_sapiens' {
|
'homo_sapiens' {
|
||||||
'genome' {
|
'genome' {
|
||||||
|
@ -245,8 +248,8 @@ params {
|
||||||
test2_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2_2.fastq.gz"
|
test2_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2_2.fastq.gz"
|
||||||
test2_umi_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2.umi_1.fastq.gz"
|
test2_umi_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2.umi_1.fastq.gz"
|
||||||
test2_umi_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2.umi_2.fastq.gz"
|
test2_umi_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2.umi_2.fastq.gz"
|
||||||
test_rnaseq_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test.rnaseq_1.fastq.gz"
|
test_rnaseq_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test_rnaseq_1.fastq.gz"
|
||||||
test_rnaseq_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test.rnaseq_2.fastq.gz"
|
test_rnaseq_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test_rnaseq_2.fastq.gz"
|
||||||
|
|
||||||
test_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.baserecalibrator.table"
|
test_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.baserecalibrator.table"
|
||||||
test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table"
|
test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table"
|
||||||
|
|
|
@ -1,14 +1,17 @@
|
||||||
- name: antismash antismashlitedownloaddatabases test_antismash_antismashlitedownloaddatabases
|
- name: antismash antismashlitedownloaddatabases test_antismash_antismashlitedownloaddatabases
|
||||||
command: nextflow run tests/modules/antismash/antismashlitedownloaddatabases -entry test_antismash_antismashlitedownloaddatabases -c tests/config/nextflow.config
|
command: nextflow run tests/modules/antismash/antismashlitedownloaddatabases -entry test_antismash_antismashlitedownloaddatabases -c tests/config/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- antismash/antismashlitedownloaddatabases
|
|
||||||
- antismash
|
- antismash
|
||||||
|
- antismash/antismashlitedownloaddatabases
|
||||||
files:
|
files:
|
||||||
- path: output/antismash/versions.yml
|
- path: output/antismash/versions.yml
|
||||||
md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3
|
md5sum: 24859c67023abab99de295d3675a24b6
|
||||||
- path: output/antismash/antismash_db
|
- path: output/antismash/antismash_db
|
||||||
- path: output/antismash/antismash_db/clusterblast
|
- path: output/antismash/antismash_db/clusterblast
|
||||||
- path: output/antismash/antismash_db/clustercompare
|
- path: output/antismash/antismash_db/clustercompare
|
||||||
- path: output/antismash/antismash_db/pfam
|
- path: output/antismash/antismash_db/pfam
|
||||||
- path: output/antismash/antismash_db/resfam
|
- path: output/antismash/antismash_db/resfam
|
||||||
- path: output/antismash/antismash_db/tigrfam
|
- path: output/antismash/antismash_db/tigrfam
|
||||||
|
- path: output/antismash/css
|
||||||
|
- path: output/antismash/detection
|
||||||
|
- path: output/antismash/modules
|
||||||
|
|
|
@ -6,7 +6,23 @@ include { GATK4_SPLITNCIGARREADS } from '../../../../modules/gatk4/splitncigarre
|
||||||
|
|
||||||
workflow test_gatk4_splitncigarreads {
|
workflow test_gatk4_splitncigarreads {
|
||||||
input = [ [ id:'test' ], // meta map
|
input = [ [ id:'test' ], // meta map
|
||||||
[ file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ]
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true),
|
||||||
|
[],
|
||||||
|
[]
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true)
|
||||||
|
|
||||||
|
GATK4_SPLITNCIGARREADS ( input, fasta, fai, dict )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_gatk4_splitncigarreads_intervals {
|
||||||
|
input = [ [ id:'test' ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true),
|
||||||
|
file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)
|
||||||
]
|
]
|
||||||
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
|
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
|
|
@ -5,5 +5,14 @@
|
||||||
- gatk4/splitncigarreads
|
- gatk4/splitncigarreads
|
||||||
files:
|
files:
|
||||||
- path: output/gatk4/test.bam
|
- path: output/gatk4/test.bam
|
||||||
md5sum: ceed15c0bd64ff5c38d3816905933b0b
|
md5sum: 436d8e31285c6b588bdd1c7f1d07f6f2
|
||||||
|
- path: output/gatk4/versions.yml
|
||||||
|
- name: gatk4 splitncigarreads test_gatk4_splitncigarreads_intervals
|
||||||
|
command: nextflow run tests/modules/gatk4/splitncigarreads -entry test_gatk4_splitncigarreads_intervals -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- gatk4
|
||||||
|
- gatk4/splitncigarreads
|
||||||
|
files:
|
||||||
|
- path: output/gatk4/test.bam
|
||||||
|
md5sum: cd56e3225950f519fd47164cca60a0bb
|
||||||
- path: output/gatk4/versions.yml
|
- path: output/gatk4/versions.yml
|
||||||
|
|
31
tests/modules/krona/ktimporttext/main.nf
Normal file
31
tests/modules/krona/ktimporttext/main.nf
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { KRONA_KTIMPORTTEXT } from '../../../../modules/krona/ktimporttext/main.nf'
|
||||||
|
|
||||||
|
workflow test_krona_ktimporttext_multi {
|
||||||
|
|
||||||
|
input = [
|
||||||
|
[ id:'test', single_end:false ], // meta map
|
||||||
|
[
|
||||||
|
file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/krona/ktimporttext.txt', checkIfExists: true), // krona default test file
|
||||||
|
file(params.test_data['sarscov2']['metagenome']['kraken_report'], checkIfExists: true), //Kraken2 report file
|
||||||
|
file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/krona/kaiju_out4krona.txt', checkIfExists: true) // Kaiju output 4 krona
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
KRONA_KTIMPORTTEXT ( input )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_krona_ktimporttext_single {
|
||||||
|
|
||||||
|
input = [
|
||||||
|
[ id:'test', single_end:false ], // meta map
|
||||||
|
[
|
||||||
|
file('http://krona.sourceforge.net/examples/text.txt', checkIfExists: true) // krona default test file
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
KRONA_KTIMPORTTEXT ( input )
|
||||||
|
}
|
5
tests/modules/krona/ktimporttext/nextflow.config
Normal file
5
tests/modules/krona/ktimporttext/nextflow.config
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
process {
|
||||||
|
|
||||||
|
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
|
||||||
|
|
||||||
|
}
|
19
tests/modules/krona/ktimporttext/test.yml
Normal file
19
tests/modules/krona/ktimporttext/test.yml
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
- name: krona ktimporttext test_krona_ktimporttext_multi
|
||||||
|
command: nextflow run tests/modules/krona/ktimporttext -entry test_krona_ktimporttext_multi -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- krona
|
||||||
|
- krona/ktimporttext
|
||||||
|
files:
|
||||||
|
- path: output/krona/test.html
|
||||||
|
contains:
|
||||||
|
- "DOCTYPE html PUBLIC"
|
||||||
|
|
||||||
|
- name: krona ktimporttext test_krona_ktimporttext_single
|
||||||
|
command: nextflow run tests/modules/krona/ktimporttext -entry test_krona_ktimporttext_single -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- krona
|
||||||
|
- krona/ktimporttext
|
||||||
|
files:
|
||||||
|
- path: output/krona/test.html
|
||||||
|
contains:
|
||||||
|
- "DOCTYPE html PUBLIC"
|
|
@ -9,8 +9,11 @@ workflow test_minimap2_align_single_end {
|
||||||
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)]
|
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)]
|
||||||
]
|
]
|
||||||
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
bam_format = true
|
||||||
|
cigar_paf_format = false
|
||||||
|
cigar_bam = false
|
||||||
|
|
||||||
MINIMAP2_ALIGN ( input, fasta )
|
MINIMAP2_ALIGN ( input, fasta, bam_format, cigar_paf_format, cigar_bam)
|
||||||
}
|
}
|
||||||
|
|
||||||
workflow test_minimap2_align_paired_end {
|
workflow test_minimap2_align_paired_end {
|
||||||
|
@ -19,6 +22,9 @@ workflow test_minimap2_align_paired_end {
|
||||||
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ]
|
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ]
|
||||||
]
|
]
|
||||||
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
bam_format = true
|
||||||
|
cigar_paf_format = false
|
||||||
|
cigar_bam = false
|
||||||
|
|
||||||
MINIMAP2_ALIGN ( input, fasta )
|
MINIMAP2_ALIGN ( input, fasta, bam_format, cigar_paf_format, cigar_bam )
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
- name: minimap2 align single-end
|
- name: minimap2 align test_minimap2_align_single_end
|
||||||
command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config
|
command: nextflow run tests/modules/minimap2/align -entry test_minimap2_align_single_end -c tests/config/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- minimap2
|
- minimap2
|
||||||
- minimap2/align
|
- minimap2/align
|
||||||
files:
|
files:
|
||||||
- path: ./output/minimap2/test.paf
|
- path: output/minimap2/test.bam
|
||||||
md5sum: 70e8cf299ee3ecd33e629d10c1f588ce
|
- path: output/minimap2/versions.yml
|
||||||
|
|
||||||
- name: minimap2 align paired-end
|
- name: minimap2 align test_minimap2_align_paired_end
|
||||||
command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config
|
command: nextflow run tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c tests/config/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- minimap2
|
- minimap2
|
||||||
- minimap2/align
|
- minimap2/align
|
||||||
files:
|
files:
|
||||||
- path: ./output/minimap2/test.paf
|
- path: output/minimap2/test.bam
|
||||||
md5sum: 5e7b55a26bf0ea3a2843423d3e0b9a28
|
- path: output/minimap2/versions.yml
|
||||||
|
|
|
@ -42,7 +42,7 @@
|
||||||
- path: output/rsem/rsem/genome.transcripts.fa
|
- path: output/rsem/rsem/genome.transcripts.fa
|
||||||
md5sum: 050c521a2719c2ae48267c1e65218f29
|
md5sum: 050c521a2719c2ae48267c1e65218f29
|
||||||
- path: output/rsem/rsem/genomeParameters.txt
|
- path: output/rsem/rsem/genomeParameters.txt
|
||||||
md5sum: 2fe3a030e1706c3e8cd4df3818e6dd2f
|
md5sum: df5a456e3242520cc36e0083a6a7d9dd
|
||||||
- path: output/rsem/rsem/sjdbInfo.txt
|
- path: output/rsem/rsem/sjdbInfo.txt
|
||||||
md5sum: 5690ea9d9f09f7ff85b7fd47bd234903
|
md5sum: 5690ea9d9f09f7ff85b7fd47bd234903
|
||||||
- path: output/rsem/rsem/sjdbList.fromGTF.out.tab
|
- path: output/rsem/rsem/sjdbList.fromGTF.out.tab
|
||||||
|
@ -63,4 +63,4 @@
|
||||||
- path: output/rsem/test.stat/test.theta
|
- path: output/rsem/test.stat/test.theta
|
||||||
md5sum: de2e4490c98cc5383a86ae8225fd0a28
|
md5sum: de2e4490c98cc5383a86ae8225fd0a28
|
||||||
- path: output/rsem/test.transcript.bam
|
- path: output/rsem/test.transcript.bam
|
||||||
md5sum: 7846491086c478858419667d60f18edd
|
md5sum: ed681d39f5700ffc74d6321525330d93
|
||||||
|
|
17
tests/modules/samtools/bamtocram/main.nf
Normal file
17
tests/modules/samtools/bamtocram/main.nf
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { SAMTOOLS_BAMTOCRAM } from '../../../../modules/samtools/bamtocram/main.nf'
|
||||||
|
|
||||||
|
workflow test_samtools_bamtocram {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
|
||||||
|
SAMTOOLS_BAMTOCRAM ( input, fasta, fai )
|
||||||
|
}
|
5
tests/modules/samtools/bamtocram/nextflow.config
Normal file
5
tests/modules/samtools/bamtocram/nextflow.config
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
process {
|
||||||
|
|
||||||
|
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
|
||||||
|
|
||||||
|
}
|
9
tests/modules/samtools/bamtocram/test.yml
Normal file
9
tests/modules/samtools/bamtocram/test.yml
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
- name: samtools bamtocram test_samtools_bamtocram
|
||||||
|
command: nextflow run ./tests/modules/samtools/bamtocram -entry test_samtools_bamtocram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bamtocram/nextflow.config
|
||||||
|
tags:
|
||||||
|
- samtools/bamtocram
|
||||||
|
- samtools
|
||||||
|
files:
|
||||||
|
- path: output/samtools/test.cram
|
||||||
|
- path: output/samtools/test.cram.crai
|
||||||
|
- path: output/samtools/versions.yml
|
|
@ -2,9 +2,10 @@
|
||||||
|
|
||||||
nextflow.enable.dsl = 2
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf'
|
include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf'
|
||||||
include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf'
|
include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf'
|
||||||
include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf'
|
include { TABIX_TABIX as TABIX_VCF_TBI } from '../../../../modules/tabix/tabix/main.nf'
|
||||||
|
include { TABIX_TABIX as TABIX_VCF_CSI } from '../../../../modules/tabix/tabix/main.nf'
|
||||||
|
|
||||||
workflow test_tabix_tabix_bed {
|
workflow test_tabix_tabix_bed {
|
||||||
input = [ [ id:'B.bed' ], // meta map
|
input = [ [ id:'B.bed' ], // meta map
|
||||||
|
@ -22,10 +23,18 @@ workflow test_tabix_tabix_gff {
|
||||||
TABIX_GFF ( input )
|
TABIX_GFF ( input )
|
||||||
}
|
}
|
||||||
|
|
||||||
workflow test_tabix_tabix_vcf {
|
workflow test_tabix_tabix_vcf_tbi {
|
||||||
input = [ [ id:'test.vcf' ], // meta map
|
input = [ [ id:'test.vcf' ], // meta map
|
||||||
[ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ]
|
[ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ]
|
||||||
]
|
]
|
||||||
|
|
||||||
TABIX_VCF ( input )
|
TABIX_VCF_TBI ( input )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_tabix_tabix_vcf_csi {
|
||||||
|
input = [ [ id:'test.vcf' ], // meta map
|
||||||
|
[ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ]
|
||||||
|
]
|
||||||
|
|
||||||
|
TABIX_VCF_CSI ( input )
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,8 +10,12 @@ process {
|
||||||
ext.args = '-p gff'
|
ext.args = '-p gff'
|
||||||
}
|
}
|
||||||
|
|
||||||
withName: TABIX_VCF {
|
withName: TABIX_VCF_TBI {
|
||||||
ext.args = '-p vcf'
|
ext.args = '-p vcf'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
withName: TABIX_VCF_CSI {
|
||||||
|
ext.args = '-p vcf --csi'
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,10 +15,18 @@
|
||||||
- path: ./output/tabix/genome.gff3.gz.tbi
|
- path: ./output/tabix/genome.gff3.gz.tbi
|
||||||
md5sum: f79a67d95a98076e04fbe0455d825926
|
md5sum: f79a67d95a98076e04fbe0455d825926
|
||||||
- name: tabix tabix vcf
|
- name: tabix tabix vcf
|
||||||
command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config
|
command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf_tbi -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- tabix
|
- tabix
|
||||||
- tabix/tabix
|
- tabix/tabix
|
||||||
files:
|
files:
|
||||||
- path: output/tabix/test.vcf.gz.tbi
|
- path: output/tabix/test.vcf.gz.tbi
|
||||||
md5sum: 36e11bf96ed0af4a92caa91a68612d64
|
md5sum: 36e11bf96ed0af4a92caa91a68612d64
|
||||||
|
- name: tabix tabix vcf csi
|
||||||
|
command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf_csi -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config
|
||||||
|
tags:
|
||||||
|
- tabix
|
||||||
|
- tabix/tabix
|
||||||
|
files:
|
||||||
|
- path: output/tabix/test.vcf.gz.csi
|
||||||
|
md5sum: 5f930522d2b9dcdba2807b7da4dfa3fd
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
- path: output/tiddit/test.signals.tab
|
- path: output/tiddit/test.signals.tab
|
||||||
md5sum: dab4b2fec4ddf8eb1c23005b0770150e
|
md5sum: dab4b2fec4ddf8eb1c23005b0770150e
|
||||||
- path: output/tiddit/test.vcf
|
- path: output/tiddit/test.vcf
|
||||||
|
md5sum: bdce14ae8292bf3deb81f6f255baf859
|
||||||
|
|
||||||
- name: tiddit sv no ref
|
- name: tiddit sv no ref
|
||||||
command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config
|
command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config
|
||||||
|
@ -21,3 +22,4 @@
|
||||||
- path: output/tiddit/test.signals.tab
|
- path: output/tiddit/test.signals.tab
|
||||||
md5sum: dab4b2fec4ddf8eb1c23005b0770150e
|
md5sum: dab4b2fec4ddf8eb1c23005b0770150e
|
||||||
- path: output/tiddit/test.vcf
|
- path: output/tiddit/test.vcf
|
||||||
|
md5sum: 3d0e83a8199b2bdb81cfe3e6b12bf64b
|
||||||
|
|
Loading…
Reference in a new issue