Merge remote-tracking branch 'origin/master' into gatk_spark

This commit is contained in:
SusiJo 2022-06-09 09:33:17 +02:00
commit 0204d2374b
33 changed files with 682 additions and 88 deletions

View file

@ -9,7 +9,7 @@ process BCFTOOLS_ROH {
input:
tuple val(meta), path(vcf), path(tbi)
path af_file
tuple path(af_file), path(af_file_tbi)
path genetic_map
path regions_file
path samples_file

View file

@ -23,6 +23,9 @@ input:
- af_file:
type: file
description: "Read allele frequencies from a tab-delimited file containing the columns: CHROM\tPOS\tREF,ALT\tAF."
- af_file_tbi:
type: file
description: "tbi index of af_file."
- genetic_map:
type: file
description: "Genetic map in the format required also by IMPUTE2."

View file

@ -0,0 +1,54 @@
process GATK_INDELREALIGNER {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::gatk=3.5" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk:3.5--hdfd78af_11':
'quay.io/biocontainers/gatk:3.5--hdfd78af_11' }"
input:
tuple val(meta), path(bam), path(bai), path(intervals)
path(fasta)
path(fai)
path(dict)
path(known_vcf)
output:
tuple val(meta), path("*.bam"), path("*.bai"), emit: bam
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def known = known_vcf ? "-known ${known_vcf}" : ""
if ("$bam" == "${prefix}.bam") error "Input and output names are the same, set prefix in module configuration to disambiguate!"
def avail_mem = 3
if (!task.memory) {
log.info '[GATK IndelRealigner] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.'
} else {
avail_mem = task.memory.giga
}
"""
gatk3 \\
-Xmx${avail_mem}g \\
-T IndelRealigner \\
-R ${fasta} \\
-I ${bam} \\
--targetIntervals ${intervals} \\
${known} \\
-o ${prefix}.bam \\
$args
cat <<-END_VERSIONS > versions.yml
"${task.process}":
gatk: \$(echo \$(gatk3 --version))
END_VERSIONS
"""
}

View file

@ -0,0 +1,71 @@
name: "gatk_indelrealigner"
description: Performs local realignment around indels to correct for mapping errors
keywords:
- bam
- vcf
- variant calling
- indel
- realignment
tools:
- "gatk":
description: "The full Genome Analysis Toolkit (GATK) framework, license restricted."
homepage: "https://gatk.broadinstitute.org/hc/en-us"
documentation: "https://github.com/broadinstitute/gatk-docs"
licence: "['https://software.broadinstitute.org/gatk/download/licensing', 'BSD', 'https://www.broadinstitute.org/gatk/about/#licensing']"
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- bam:
type: file
description: Sorted and indexed BAM file
pattern: "*.bam"
- bai:
type: file
description: BAM index file
pattern: "*.bai"
- intervals:
type: file
description: Intervals file created by gatk3 RealignerTargetCreator
pattern: "*.{intervals,list}"
- fasta:
type: file
description: Reference file used to generate BAM file
pattern: ".{fasta,fa,fna}"
- fai:
type: file
description: Index of reference file used to generate BAM file
pattern: ".fai"
- dict:
type: file
description: GATK dict file for reference
pattern: ".dict"
- known_vcf:
type: file
description: Optional input VCF file(s) with known indels
pattern: ".vcf"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- bam:
type: file
description: Sorted and indexed BAM file with local realignment around variants
pattern: "*.bam"
- bai:
type: file
description: Output BAM Index file
pattern: "*.bai"
authors:
- "@jfy133"

View file

@ -0,0 +1,53 @@
process GATK_REALIGNERTARGETCREATOR {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::gatk=3.5" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/gatk:3.5--hdfd78af_11':
'quay.io/biocontainers/gatk:3.5--hdfd78af_11' }"
input:
tuple val(meta), path(input), path(index)
path fasta
path fai
path dict
path known_vcf
output:
tuple val(meta), path("*.intervals"), emit: intervals
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def known = known_vcf ? "-known ${known_vcf}" : ""
if ("$input" == "${prefix}.bam") error "Input and output names are the same, set prefix in module configuration to disambiguate!"
def avail_mem = 3
if (!task.memory) {
log.info '[GATK RealignerTargetCreator] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.'
} else {
avail_mem = task.memory.giga
}
"""
gatk3 \\
-Xmx${avail_mem}g \\
-T RealignerTargetCreator \\
-nt ${task.cpus} \\
-I ${input} \\
-R ${fasta} \\
-o ${prefix}.intervals \\
${known} \\
$args
cat <<-END_VERSIONS > versions.yml
"${task.process}":
gatk: \$(echo \$(gatk3 --version))
END_VERSIONS
"""
}

View file

@ -0,0 +1,64 @@
name: "gatk_realignertargetcreator"
description: Generates a list of locations that should be considered for local realignment prior genotyping.
keywords:
- bam
- vcf
- variant calling
- indel
- realignment
- targets
tools:
- "gatk":
description: "The full Genome Analysis Toolkit (GATK) framework, license restricted."
homepage: "https://gatk.broadinstitute.org/hc/en-us"
documentation: "https://github.com/broadinstitute/gatk-docs"
licence: "['https://software.broadinstitute.org/gatk/download/licensing', 'BSD', 'https://www.broadinstitute.org/gatk/about/#licensing']"
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- input:
type: file
description: Sorted and indexed BAM/CRAM/SAM file
pattern: "*.bam"
- index:
type: file
description: BAM index file
pattern: "*.bai"
- fasta:
type: file
description: Reference file used to generate BAM file
pattern: ".{fasta,fa,fna}"
- fai:
type: file
description: Index of reference file used to generate BAM file
pattern: ".fai"
- dict:
type: file
description: GATK dict file for reference
pattern: ".dict"
- known_vcf:
type: file
description: Optional input VCF file(s) with known indels
pattern: ".vcf"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- intervals:
type: file
description: File containg intervals that represent sites of extant and potential indels.
pattern: "*.intervals"
authors:
- "@jfy133"

View file

@ -7,7 +7,8 @@ process MULTIQC {
'quay.io/biocontainers/multiqc:1.12--pyhdfd78af_0' }"
input:
path multiqc_files
path multiqc_files, stageAs: "?/*"
tuple path(multiqc_config), path(multiqc_logo)
output:
path "*multiqc_report.html", emit: report
@ -20,8 +21,13 @@ process MULTIQC {
script:
def args = task.ext.args ?: ''
def config = multiqc_config ? "--config $multiqc_config" : ''
"""
multiqc -f $args .
multiqc \\
--force \\
$config \\
$args \\
.
cat <<-END_VERSIONS > versions.yml
"${task.process}":

View file

@ -17,6 +17,14 @@ input:
type: file
description: |
List of reports / files recognised by MultiQC, for example the html and zip output of FastQC
- multiqc_config:
type: file
description: Config yml for MultiQC
pattern: "*.{yml,yaml}"
- multiqc_logo:
type: file
description: Logo file for MultiQC
pattern: "*.{png}"
output:
- report:
type: file

View file

@ -0,0 +1,51 @@
process RHOCALL_ANNOTATE {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::rhocall=0.5.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/rhocall:0.5.1--py39hbf8eff0_0':
'quay.io/biocontainers/rhocall:0.5.1--py39hbf8eff0_0' }"
input:
tuple val(meta), path(vcf), path(tbi)
tuple val(meta), path(roh)
path bed
output:
tuple val(meta), path("*_rhocall.vcf"), emit: vcf
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def az_bed = bed ? "-b ${bed}" : ''
"""
rhocall \\
annotate \\
$args \\
$az_bed \\
-r $roh \\
-o ${prefix}_rhocall.vcf \\
$vcf
cat <<-END_VERSIONS > versions.yml
"${task.process}":
rhocall: \$(echo \$(rhocall --version 2>&1) | sed 's/rhocall, version //' )
END_VERSIONS
"""
stub:
def prefix = task.ext.prefix ?: "${meta.id}"
"""
touch ${prefix}_rhocall.vcf
cat <<-END_VERSIONS > versions.yml
"${task.process}":
rhocall: \$(echo \$(rhocall --version 2>&1) | sed 's/rhocall, version //' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,54 @@
name: "rhocall_annotate"
description: "Markup VCF file using rho-calls."
keywords:
- roh
- rhocall
tools:
- "rhocall":
description: "Call regions of homozygosity and make tentative UPD calls."
homepage: "https://github.com/dnil/rhocall"
documentation: "https://github.com/dnil/rhocall"
tool_dev_url: "https://github.com/dnil"
doi: ""
licence: "['GPL v3']"
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- vcf:
type: file
description: vcf file
pattern: "*.{vcf,vcf.gz}"
- tbi:
type: file
description: vcf index file
pattern: "*.{tbi}"
- roh:
type: file
description: Bcftools roh style TSV file with CHR,POS,AZ,QUAL
pattern: "*.{roh}"
- bed:
type: file
description: BED file with AZ windows.
pattern: "*.{bed}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- vcf:
type: file
description: vcf file
pattern: "*.{vcf,vcf.gz}"
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
authors:
- "@ramprasadn"

View file

@ -2,17 +2,17 @@ process TIDDIT_COV {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null)
conda (params.enable_conda ? "bioconda::tiddit=3.0.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' :
'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }"
'https://depot.galaxyproject.org/singularity/tiddit:3.0.0--py39h59fae87_1' :
'quay.io/biocontainers/tiddit:3.0.0--py39h59fae87_1' }"
input:
tuple val(meta), path(bam)
tuple val(meta), path(input)
path fasta
output:
tuple val(meta), path("*.tab"), optional: true, emit: cov
tuple val(meta), path("*.bed"), optional: true, emit: cov
tuple val(meta), path("*.wig"), optional: true, emit: wig
path "versions.yml" , emit: versions
@ -28,12 +28,12 @@ process TIDDIT_COV {
--cov \\
-o $prefix \\
$args \\
--bam $bam \\
--bam $input \\
$reference
cat <<-END_VERSIONS > versions.yml
"${task.process}":
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//')
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*tiddit-//; s/ .*\$//')
END_VERSIONS
"""
@ -45,7 +45,7 @@ process TIDDIT_COV {
cat <<-END_VERSIONS > versions.yml
"${task.process}":
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//')
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*tiddit-//; s/ .*\$//')
END_VERSIONS
"""
}

View file

@ -19,7 +19,7 @@ input:
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- bam:
- input:
type: file
description: BAM/CRAM file
pattern: "*.{bam,cram}"

View file

@ -2,20 +2,19 @@ process TIDDIT_SV {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::tiddit=2.12.1" : null)
conda (params.enable_conda ? "bioconda::tiddit=3.0.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/tiddit:2.12.1--py38h1773678_0' :
'quay.io/biocontainers/tiddit:2.12.1--py38h1773678_0' }"
'https://depot.galaxyproject.org/singularity/tiddit:3.0.0--py39h59fae87_1' :
'quay.io/biocontainers/tiddit:3.0.0--py39h59fae87_1' }"
input:
tuple val(meta), path(bam)
tuple val(meta), path(input), path(input_index)
path fasta
path fai
path bwa_index
output:
tuple val(meta), path("*.vcf") , emit: vcf
tuple val(meta), path("*.ploidy.tab") , emit: ploidy
tuple val(meta), path("*.signals.tab"), emit: signals
tuple val(meta), path("*.ploidies.tab"), emit: ploidy
path "versions.yml" , emit: versions
when:
@ -24,18 +23,19 @@ process TIDDIT_SV {
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def reference = fasta ? "--ref $fasta" : ""
"""
[[ -d $bwa_index ]] && for i in $bwa_index/*; do [[ -f $fasta && ! "\$i" =~ .*"$fasta".* ]] && ln -s \$i ${fasta}.\${i##*.} || ln -s \$i .; done
tiddit \\
--sv \\
$args \\
--bam $bam \\
$reference \\
--bam $input \\
--ref $fasta \\
-o $prefix
cat <<-END_VERSIONS > versions.yml
"${task.process}":
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//')
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*tiddit-//; s/ .*\$//')
END_VERSIONS
"""
@ -43,12 +43,11 @@ process TIDDIT_SV {
def prefix = task.ext.prefix ?: "${meta.id}"
"""
touch ${prefix}.vcf
touch ${prefix}.ploidy.tab
touch ${prefix}.signals.tab
touch ${prefix}.ploidies.tab
cat <<-END_VERSIONS > versions.yml
"${task.process}":
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//')
tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*tiddit-//; s/ .*\$//')
END_VERSIONS
"""
}

View file

@ -17,14 +17,22 @@ input:
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- input:
type: file
description: BAM/CRAM file
pattern: "*.{bam,cram}"
- index:
type: file
description: BAM/CRAM index file
pattern: "*.{bai,crai}"
- fasta:
type: file
description: Input FASTA file
pattern: "*.{fasta,fa}"
- fai:
- bwa_index:
type: file
description: FASTA index file
pattern: "*.{fai}"
description: BWA genome index files
pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}"
output:
- meta:
type: map
@ -38,11 +46,7 @@ output:
- ploidy:
type: file
description: tab
pattern: "*.{ploidy.tab}"
- signals:
type: file
description: tab
pattern: "*.{signals.tab}"
pattern: "*.{ploidies.tab}"
- versions:
type: file
description: File containing software versions

View file

@ -715,6 +715,14 @@ gamma/gamma:
- modules/gamma/gamma/**
- tests/modules/gamma/gamma/**
gatk/indelrealigner:
- modules/gatk/indelrealigner/**
- tests/modules/gatk/indelrealigner/**
gatk/realignertargetcreator:
- modules/gatk/realignertargetcreator/**
- tests/modules/gatk/realignertargetcreator/**
gatk/unifiedgenotyper:
- modules/gatk/unifiedgenotyper/**
- tests/modules/gatk/unifiedgenotyper/**
@ -1647,6 +1655,10 @@ rgi/main:
- modules/rgi/main/**
- tests/modules/rgi/main/**
rhocall/annotate:
- modules/rhocall/annotate/**
- tests/modules/rhocall/annotate/**
rmarkdownnotebook:
- modules/rmarkdownnotebook/**
- tests/modules/rmarkdownnotebook/**

View file

@ -10,7 +10,7 @@ workflow test_bcftools_roh {
file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true)]
af_file = []
af_file = [[],[]]
gen_map = []
regions = []
targets = []
@ -25,7 +25,7 @@ workflow test_bcftools_roh_stub {
file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true)]
af_file = []
af_file = [[],[]]
gen_map = []
regions = []
targets = []

View file

@ -0,0 +1,33 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { GATK_REALIGNERTARGETCREATOR } from '../../../../modules/gatk/realignertargetcreator/main.nf'
include { GATK_INDELREALIGNER } from '../../../../modules/gatk/indelrealigner/main.nf'
workflow test_gatk_indelrealigner {
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true)
input_realignertargetcreator = [ [ id:'test' ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true),
]
GATK_REALIGNERTARGETCREATOR ( input_realignertargetcreator, fasta, fai, dict, [] )
ch_intervals = GATK_REALIGNERTARGETCREATOR.out.intervals
ch_bams_indelrealigner = Channel.of([ [ id:'test' ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
])
ch_input_indelrealigner = ch_bams_indelrealigner.mix(ch_intervals).groupTuple(by: 0).map{ [it[0], it[1][0], it[2], it[1][1] ] }.dump(tag: "input")
GATK_INDELREALIGNER ( ch_input_indelrealigner, fasta, fai, dict, [] )
}

View file

@ -0,0 +1,6 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
ext.prefix = { "${meta.id}.realigned" }
}

View file

@ -0,0 +1,12 @@
- name: gatk indelrealigner test_gatk_indelrealigner
command: nextflow run ./tests/modules/gatk/indelrealigner -entry test_gatk_indelrealigner -c ./tests/config/nextflow.config -c ./tests/modules/gatk/indelrealigner/nextflow.config
tags:
- gatk/indelrealigner
- gatk
files:
- path: output/gatk/test.realigned.bai
md5sum: 85a67df8827fe426e7f3a458134c0551
- path: output/gatk/test.realigned.bam
md5sum: ea1df6f7fcafc408fae4dc1574813d8a
- path: output/gatk/test.realigned.intervals
md5sum: 7aa7a1b235a510e6591e262382086bf8

View file

@ -0,0 +1,18 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { GATK_REALIGNERTARGETCREATOR } from '../../../../modules/gatk/realignertargetcreator/main.nf'
workflow test_gatk_realignertargetcreator {
input = [ [ id:'test' ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true),
]
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true)
GATK_REALIGNERTARGETCREATOR ( input, fasta, fai, dict, [] )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,8 @@
- name: gatk realignertargetcreator test_gatk_realignertargetcreator
command: nextflow run ./tests/modules/gatk/realignertargetcreator -entry test_gatk_realignertargetcreator -c ./tests/config/nextflow.config -c ./tests/modules/gatk/realignertargetcreator/nextflow.config
tags:
- gatk
- gatk/realignertargetcreator
files:
- path: output/gatk/test.intervals
md5sum: 7aa7a1b235a510e6591e262382086bf8

View file

@ -3,14 +3,31 @@
nextflow.enable.dsl = 2
include { FASTQC } from '../../../modules/fastqc/main.nf'
include { FASTQC as FASTQC2 } from '../../../modules/fastqc/main.nf'
include { MULTIQC } from '../../../modules/multiqc/main.nf'
workflow test_multiqc {
input = [ [ id: 'test', single_end: false ],
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ]
input = [
[ id: 'test', single_end: false ],
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)]
]
FASTQC ( input )
MULTIQC ( FASTQC.out.zip.collect { it[1] } )
MULTIQC ( FASTQC.out.zip.collect { it[1] }, [[],[]] )
}
workflow test_multiqc_fn_collision {
fqc_input = [
[ id: 'test', single_end: false ],
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)]
]
mqc_input = Channel.empty()
FASTQC ( fqc_input )
mqc_input = mqc_input.mix(FASTQC.out.zip.collect { it[1] })
FASTQC2 ( fqc_input )
mqc_input = mqc_input.mix(FASTQC2.out.zip.collect { it[1] })
MULTIQC ( mqc_input, [[],[]] )
}

View file

@ -1,5 +1,7 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
withName: "FASTQC*" {
publishDir = [ enabled: false ]
}
}

View file

@ -1,6 +1,13 @@
- name: multiqc
- name: multiqc test_multiqc
command: nextflow run ./tests/modules/multiqc -entry test_multiqc -c ./tests/config/nextflow.config -c ./tests/modules/multiqc/nextflow.config
tags:
- multiqc
files:
- path: output/multiqc/multiqc_report.html
- name: multiqc test_multiqc_fn_collision
command: nextflow run ./tests/modules/multiqc -entry test_multiqc_fn_collision -c ./tests/config/nextflow.config -c ./tests/modules/multiqc/nextflow.config
tags:
- multiqc
files:
- path: output/multiqc/multiqc_report.html

View file

@ -0,0 +1,40 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { RHOCALL_ANNOTATE } from '../../../../modules/rhocall/annotate/main.nf'
include { BCFTOOLS_ROH } from '../../../../modules/bcftools/roh/main.nf'
workflow test_rhocall_annotate {
input = [ [ id:'test' ], // meta map
file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true)]
af_file = [[],[]]
gen_map = []
regions = []
targets = []
samples = []
BCFTOOLS_ROH ( input, af_file, gen_map, regions, samples, targets )
RHOCALL_ANNOTATE ( input, BCFTOOLS_ROH.out.roh, [])
}
workflow test_rhocall_annotate_stub {
input = [ [ id:'test' ], // meta map
file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true)]
af_file = [[],[]]
gen_map = []
regions = []
targets = []
samples = []
BCFTOOLS_ROH ( input, af_file, gen_map, regions, samples, targets )
RHOCALL_ANNOTATE ( input, BCFTOOLS_ROH.out.roh, [])
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,17 @@
- name: "rhocall annotate"
command: nextflow run ./tests/modules/rhocall/annotate -entry test_rhocall_annotate -c ./tests/config/nextflow.config -c ./tests/modules/rhocall/annotate/nextflow.config
tags:
- "rhocall"
- "rhocall/annotate"
files:
- path: "output/rhocall/test_rhocall.vcf"
- path: "output/rhocall/versions.yml"
- name: "rhocall annotate stub"
command: nextflow run ./tests/modules/rhocall/annotate -entry test_rhocall_annotate_stub -c ./tests/config/nextflow.config -c ./tests/modules/rhocall/annotate/nextflow.config -stub-run
tags:
- "rhocall"
- "rhocall/annotate"
files:
- path: "output/rhocall/test_rhocall.vcf"
- path: "output/rhocall/versions.yml"

View file

@ -2,22 +2,41 @@
nextflow.enable.dsl = 2
include { TIDDIT_COV } from '../../../../modules/tiddit/cov/main.nf'
include { TIDDIT_COV as TIDDIT_COV_BED } from '../../../../modules/tiddit/cov/main.nf'
include { TIDDIT_COV as TIDDIT_COV_WIG } from '../../../../modules/tiddit/cov/main.nf'
workflow test_tiddit_cov {
workflow test_tiddit_cov_cram_bed {
input = [ [ id:'test', single_end:false ], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true) ]
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
TIDDIT_COV_BED ( input, fasta )
}
workflow test_tiddit_cov_bam_bed {
input = [ [ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ]
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
TIDDIT_COV ( input, fasta )
TIDDIT_COV_BED ( input, [] )
}
workflow test_tiddit_cov_no_ref {
workflow test_tiddit_cov_cram_wig {
input = [ [ id:'test', single_end:false ], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true) ]
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
TIDDIT_COV_WIG ( input, fasta )
}
workflow test_tiddit_cov_bam_wig {
input = [ [ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ]
TIDDIT_COV ( input, [] )
TIDDIT_COV_WIG ( input, [] )
}

View file

@ -2,4 +2,8 @@ process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
withName: TIDDIT_COV_WIG {
ext.args = '-w'
}
}

View file

@ -1,17 +1,35 @@
- name: tiddit cov test_tiddit_cov
command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config
- name: tiddit cov test_tiddit_cov_cram_bed
command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_cram_bed -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config
tags:
- tiddit
- tiddit/cov
files:
- path: output/tiddit/test.tab
md5sum: f7974948f809f94879d8a60b726194f5
- path: output/tiddit/test.bed
md5sum: 3b1a28c62a5f25bbba77c1042e9abdf7
- name: tiddit cov test_tiddit_cov_no_ref
command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config
- name: tiddit cov test_tiddit_cov_bam_bed
command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_bam_bed -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config
tags:
- tiddit
- tiddit/cov
files:
- path: output/tiddit/test.tab
md5sum: f7974948f809f94879d8a60b726194f5
- path: output/tiddit/test.bed
md5sum: 9d1474f1c7c6516205254077087bb026
- name: tiddit cov test_tiddit_cov_cram_wig
command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_cram_wig -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config
tags:
- tiddit
- tiddit/cov
files:
- path: output/tiddit/test.wig
md5sum: ca3645fd0c3491c86c075c91d16d57c4
- name: tiddit cov test_tiddit_cov_bam_wig
command: nextflow run ./tests/modules/tiddit/cov -entry test_tiddit_cov_bam_wig -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/cov/nextflow.config
tags:
- tiddit
- tiddit/cov
files:
- path: output/tiddit/test.wig
md5sum: 44bea2ac6a56774738e65773065da670

View file

@ -2,25 +2,33 @@
nextflow.enable.dsl = 2
include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf'
include { TIDDIT_SV } from '../../../../modules/tiddit/sv/main.nf'
workflow test_tiddit_sv {
workflow test_tiddit_sv_bam {
input = [
[ id:'test' ], // meta map
[ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ]
[ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ],
[ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ]
]
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
TIDDIT_SV ( input, fasta, fai )
BWA_INDEX( fasta )
TIDDIT_SV ( input, fasta, BWA_INDEX.out.index)
}
workflow test_tiddit_sv_no_ref {
workflow test_tiddit_sv_cram {
input = [
[ id:'test' ], // meta map
[ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ]
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true) ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) ]
]
TIDDIT_SV ( input, [], [] )
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
BWA_INDEX( fasta )
TIDDIT_SV ( input, fasta, BWA_INDEX.out.index)
}

View file

@ -1,25 +1,21 @@
- name: tiddit sv
command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config
- name: tiddit sv test_tiddit_sv_bam
command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_bam -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config
tags:
- tiddit
- tiddit/sv
files:
- path: output/tiddit/test.ploidy.tab
md5sum: 45e050b0e204f0a5a3a99627cc440eaa
- path: output/tiddit/test.signals.tab
md5sum: dab4b2fec4ddf8eb1c23005b0770150e
- path: output/tiddit/test.ploidies.tab
md5sum: 6319d3611f7b6b94425a184d274b3dfc
- path: output/tiddit/test.vcf
md5sum: bdce14ae8292bf3deb81f6f255baf859
md5sum: 41d3f8746f0420f894104321b7e64f67
- name: tiddit sv no ref
command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config
- name: tiddit sv test_tiddit_sv_cram
command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_cram -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config
tags:
- tiddit
- tiddit/sv
files:
- path: output/tiddit/test.ploidy.tab
md5sum: 45e050b0e204f0a5a3a99627cc440eaa
- path: output/tiddit/test.signals.tab
md5sum: dab4b2fec4ddf8eb1c23005b0770150e
- path: output/tiddit/test.ploidies.tab
md5sum: f1162a940ddc8b963f6e0e506bb5c136
- path: output/tiddit/test.vcf
md5sum: 3d0e83a8199b2bdb81cfe3e6b12bf64b
md5sum: 34db59578991285d6b62dc1500272fca