Merge branch 'master' into seqtk/seq-indent-fix

This commit is contained in:
Sateesh Peri 2022-03-09 11:46:20 -05:00 committed by GitHub
commit 14825510cb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
109 changed files with 1866 additions and 155 deletions

View file

@ -27,6 +27,6 @@ Closes #XXX <!-- If this PR fixes an issue, please link it here! -->
- [ ] Add a resource `label`
- [ ] Use BioConda and BioContainers if possible to fulfil software requirements.
- Ensure that the test works with either Docker / Singularity. Conda CI tests can be quite flaky:
- [ ] `PROFILE=docker pytest --tag <MODULE> --symlink --keep-workflow-wd`
- [ ] `PROFILE=singularity pytest --tag <MODULE> --symlink --keep-workflow-wd`
- [ ] `PROFILE=conda pytest --tag <MODULE> --symlink --keep-workflow-wd`
- [ ] `PROFILE=docker pytest --tag <MODULE> --symlink --keep-workflow-wd --git-aware`
- [ ] `PROFILE=singularity pytest --tag <MODULE> --symlink --keep-workflow-wd --git-aware`
- [ ] `PROFILE=conda pytest --tag <MODULE> --symlink --keep-workflow-wd --git-aware`

View file

@ -86,7 +86,7 @@ jobs:
# Test the module
- name: Run pytest-workflow
# only use one thread for pytest-workflow to avoid race condition on conda cache.
run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof
run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware
- name: Output log on failure
if: failure()

View file

@ -11,9 +11,15 @@ process ADAPTERREMOVAL {
tuple val(meta), path(reads)
output:
tuple val(meta), path('*.fastq.gz'), emit: reads
tuple val(meta), path('*.log') , emit: log
path "versions.yml" , emit: versions
tuple val(meta), path('*.truncated.gz') , optional: true, emit: singles_truncated
tuple val(meta), path('*.discarded.gz') , optional: true, emit: discarded
tuple val(meta), path('*.pair1.truncated.gz') , optional: true, emit: pair1_truncated
tuple val(meta), path('*.pair2.truncated.gz') , optional: true, emit: pair2_truncated
tuple val(meta), path('*.collapsed.gz') , optional: true, emit: collapsed
tuple val(meta), path('*.collapsed.truncated') , optional: true, emit: collapsed_truncated
tuple val(meta), path('*paired.gz') , optional: true, emit: paired_interleaved
tuple val(meta), path('*.log') , emit: log
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
@ -28,30 +34,27 @@ process ADAPTERREMOVAL {
--file1 $reads \\
$args \\
--basename $prefix \\
--threads $task.cpus \\
--threads ${task.cpus} \\
--settings ${prefix}.log \\
--output1 ${prefix}.trimmed.fastq.gz \\
--seed 42 \\
--gzip \\
--gzip
cat <<-END_VERSIONS > versions.yml
"${task.process}":
adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g")
END_VERSIONS
"""
} else if (!meta.single_end && !meta.collapse) {
} else if (!meta.single_end ) {
"""
AdapterRemoval \\
--file1 ${reads[0]} \\
--file2 ${reads[1]} \\
$args \\
--basename $prefix \\
--threads $task.cpus \\
--threads ${task.cpus} \\
--settings ${prefix}.log \\
--output1 ${prefix}.pair1.trimmed.fastq.gz \\
--output2 ${prefix}.pair2.trimmed.fastq.gz \\
--seed 42 \\
--gzip \\
--gzip
cat <<-END_VERSIONS > versions.yml
"${task.process}":
@ -63,13 +66,12 @@ process ADAPTERREMOVAL {
AdapterRemoval \\
--file1 ${reads[0]} \\
--file2 ${reads[1]} \\
--collapse \\
$args \\
--basename $prefix \\
--threads $task.cpus \\
--settings ${prefix}.log \\
--seed 42 \\
--gzip \\
--gzip
cat *.collapsed.gz *.collapsed.truncated.gz > ${prefix}.merged.fastq.gz
cat <<-END_VERSIONS > versions.yml

View file

@ -17,13 +17,13 @@ input:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false, collapse:false ]
e.g. [ id:'test', single_end:false ]
- reads:
type: file
description: |
List of input FastQ files of size 1 and 2 for single-end and paired-end data,
respectively.
pattern: "*.{fq,fastq,fg.gz,fastq.gz}"
pattern: "*.{fq,fastq,fq.gz,fastq.gz}"
output:
- meta:
@ -31,12 +31,45 @@ output:
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- reads:
- singles_truncated:
type: file
description: |
List of input adapter trimmed FastQ files of size 1 or 2 for
single-end or collapsed data and paired-end data, respectively.
pattern: "*.{fastq.gz}"
Adapter trimmed FastQ files of either single-end reads, or singleton
'orphaned' reads from merging of paired-end data (i.e., one of the pair
was lost due to filtering thresholds).
pattern: "*.truncated.gz"
- discarded:
type: file
description: |
Adapter trimmed FastQ files of reads that did not pass filtering
thresholds.
pattern: "*.discarded.gz"
- pair1_truncated:
type: file
description: |
Adapter trimmed R1 FastQ files of paired-end reads that did not merge
with their respective R2 pair due to long templates. The respective pair
is stored in 'pair2_truncated'.
pattern: "*.pair1.truncated.gz"
- pair2_truncated:
type: file
description: |
Adapter trimmed R2 FastQ files of paired-end reads that did not merge
with their respective R1 pair due to long templates. The respective pair
is stored in 'pair1_truncated'.
pattern: "*.pair2.truncated.gz"
- collapsed:
type: file
description: |
Collapsed FastQ of paired-end reads that successfully merged with their
respective R1 pair but were not trimmed.
pattern: "*.collapsed.gz"
- collapsed_truncated:
type: file
description: |
Collapsed FastQ of paired-end reads that successfully merged with their
respective R1 pair and were trimmed of adapter due to sufficient overlap.
pattern: "*.collapsed.truncated.gz"
- log:
type: file
description: AdapterRemoval log file
@ -48,3 +81,4 @@ output:
authors:
- "@maxibor"
- "@jfy133"

View file

@ -0,0 +1,42 @@
process BCFTOOLS_ANNOTATE {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::bcftools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/bcftools:1.15--haf5b3da_0':
'quay.io/biocontainers/bcftools:1.15--haf5b3da_0' }"
input:
tuple val(meta), path(input)
output:
tuple val(meta), path("*_annotated.vcf.gz"), optional:true , emit: vcf
tuple val(meta), path("*_annotated.bcf") , optional:true , emit: bcf
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def matcher = input ==~ /\S+\.*vcf\.\S*/
def output_suffix = matcher ? "vcf.gz" : "bcf"
def output_type_compressed = matcher ? "z" : "b"
"""
bcftools \\
annotate \\
$args \\
--output ${prefix}_annotated.${output_suffix} \\
--output-type $output_type_compressed \\
--threads $task.cpus \\
$input
cat <<-END_VERSIONS > versions.yml
"${task.process}":
bcftools: \$( bcftools --version |& sed '1!d; s/^.*bcftools //' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,45 @@
name: bcftools_annotate
description: Add or remove annotations.
keywords:
- bcftools
- annotate
- vcf
- remove
- add
tools:
- annotate:
description: Add or remove annotations.
homepage: http://samtools.github.io/bcftools/bcftools.html
documentation: https://samtools.github.io/bcftools/bcftools.html#annotate
doi: 10.1093/bioinformatics/btp352
licence: ['MIT']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- input:
type: files
description: Query VCF or BCF file, can be either uncompressed or compressed
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- vcf:
type: file
description: Compressed annotated VCF file
pattern: "*_annotated.vcf.gz"
- bcf:
type: file
description: Compressed annotated BCF file
pattern: "*_annotated.bcf"
authors:
- "@projectoriented"

View file

@ -2,10 +2,10 @@ process BWA_MEM {
tag "$meta.id"
label 'process_high'
conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null)
conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' :
'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }"
'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' :
'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }"
input:
tuple val(meta), path(reads)

View file

@ -2,10 +2,10 @@ process BWA_SAMPE {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null)
conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' :
'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }"
'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' :
'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }"
input:
tuple val(meta), path(reads), path(sai)

View file

@ -2,10 +2,10 @@ process BWA_SAMSE {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null)
conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' :
'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }"
'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' :
'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }"
input:
tuple val(meta), path(reads), path(sai)

View file

@ -2,10 +2,10 @@ process BWAMEM2_MEM {
tag "$meta.id"
label 'process_high'
conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.12" : null)
conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' :
'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' }"
'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:8ee25ae85d7a2bacac3e3139db209aff3d605a18-0' :
'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:8ee25ae85d7a2bacac3e3139db209aff3d605a18-0' }"
input:
tuple val(meta), path(reads)

View file

@ -0,0 +1,158 @@
process CONTROLFREEC {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::control-freec=11.6" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/control-freec:11.6--h1b792b2_1':
'quay.io/biocontainers/control-freec:11.6--h1b792b2_1' }"
input:
tuple val(meta), path(mpileup_normal), path(mpileup_tumor), path(cpn_normal), path(cpn_tumor), path(minipileup_normal), path(minipileup_tumor)
path fasta
path fai
path snp_position
path known_snps
path known_snps_tbi
path chr_directory
path mappability
path target_bed
path gccontent_profile
output:
tuple val(meta), path("*_ratio.BedGraph") , emit: bedgraph, optional: true
tuple val(meta), path("*_control.cpn") , emit: control_cpn
tuple val(meta), path("*_sample.cpn") , emit: sample_cpn
tuple val(meta), path("GC_profile.*.cpn") , emit: gcprofile_cpn, optional:true
tuple val(meta), path("*_BAF.txt") , emit: BAF
tuple val(meta), path("*_CNVs") , emit: CNV
tuple val(meta), path("*_info.txt") , emit: info
tuple val(meta), path("*_ratio.txt") , emit: ratio
tuple val(meta), path("config.txt") , emit: config
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
//"General" configurations
def bedgraphoutput = task.ext.args?["general"]?["bedgraphoutput"] ? "BedGraphOutput = ${task.ext.args["general"]["bedgraphoutput"]}" : ""
def chr_files = chr_directory ? "chrFiles =\${PWD}/${chr_directory}" : ""
def chr_length = fai ? "chrLenFile = \${PWD}/${fai}" : ""
def breakpointthreshold = task.ext.args?["general"]?["breakpointthreshold"] ? "breakPointThreshold = ${task.ext.args["general"]["breakpointthreshold"]}" : ""
def breakpointtype = task.ext.args?["general"]?["breakpointtype"] ? "breakPointType = ${task.ext.args["general"]["breakpointtype"]}" : ""
def coefficientofvariation = task.ext.args?["general"]?["coefficient"] ? "coefficientOfVariation = ${task.ext.args["general"]["coefficientofvariation"]}" : ""
def contamination = task.ext.args?["general"]?["contamination"] ? "contamination = ${task.ext.args["general"]["contamination"]}" : ""
def contaminationadjustment = task.ext.args?["general"]?["contaminationadjustment"] ? "contaminationAdjustment = ${task.ext.args["general"]["contaminationadjustment"]}" : ""
def degree = task.ext.args?["general"]?["degree"] ? "degree = ${task.ext.args["general"]["degree"]}" : ""
def forcegccontentnormalization = task.ext.args?["general"]?["forcegccontentnormalization"] ? "forceGCcontentNormalization = ${task.ext.args["general"]["forcegccontentnormalization"]}" : ""
def gccontentprofile = gccontent_profile ? "GCcontentProfile = ${gccontent_profile}" : ""
def mappability = mappability ? "gemMappabilityFile = \${PWD}/${mappability}" : ""
def intercept = task.ext.args?["general"]?["intercept"] ? "intercept = ${task.ext.args["general"]["intercept"]}" : ""
def mincnalength = task.ext.args?["general"]?["mincnalength"] ? "minCNAlength = ${task.ext.args["general"]["mincnalength"]}" : ""
def minmappabilityperwindow = task.ext.args?["general"]?["minmappabilityperwindow"] ? "minMappabilityPerWindow = ${task.ext.args["general"]["minmappabilityperwindow"]}" : ""
def minexpectedgc = task.ext.args?["general"]?["minexpectedgc"] ? "minExpectedGC = ${task.ext.args["general"]["minexpectedgc"]}" : ""
def maxexpectedgc = task.ext.args?["general"]?["maxexpectedgc"] ? "maxExpectedGC = ${task.ext.args["general"]["maxexpectedgc"]}" : ""
def minimalsubclonepresence = task.ext.args?["general"]?["minimalsubclonepresence"] ? "minimalSubclonePresence = ${task.ext.args["general"]["minimalsubclonepresence"]}" : ""
def noisydata = task.ext.args?["general"]?["noisydata"] ? "noisyData = ${task.ext.args["general"]["noisydata"]}" : ""
def output = task.ext.prefix ? "outputDir = \${PWD}/${task.ext.prefix}" : ""
def ploidy = task.ext.args?["general"]?["ploidy"] ? "ploidy = ${task.ext.args["general"]["ploidy"]}" : ""
def printNA = task.ext.args?["general"]?["printNA"] ? "printNA = ${task.ext.args["general"]["printNA"]}" : ""
def readcountthreshold = task.ext.args?["general"]?["readcountthreshold"] ? "readCountThreshold = ${task.ext.args["general"]["readcountthreshold"]}" : ""
def sex = task.ext.args?["general"]?["sex"] ? "sex = ${task.ext.args["general"]["sex"]}" : ""
def step = task.ext.args?["general"]?["step"] ? "step = ${task.ext.args["general"]["step"]}" : ""
def telocentromeric = task.ext.args?["general"]?["telocentromeric"] ? "telocentromeric = ${task.ext.args["general"]["telocentromeric"]} " : ""
def uniquematch = task.ext.args?["general"]?["uniquematch"] ? "uniqueMatch = ${task.ext.args["general"]["uniquematch"]}" : ""
def window = task.ext.args?["general"]?["window"] ? "window = ${task.ext.args["general"]["window"]}" : ""
//"Control" configurations
def matefile_normal = mpileup_normal ? "mateFile = \${PWD}/${mpileup_normal}" : ""
def matecopynumberfile_normal = cpn_normal ? "mateCopyNumberFile = \${PWD}/${cpn_normal}" : ""
def minipileup_normal = minipileup_normal ? "miniPileup = \${PWD}/${minipileup_normal}" : ""
def inputformat_normal = task.ext.args?["control"]?["inputformat"] ? "inputFormat = ${task.ext.args["control"]["inputformat"]}" : ""
def mateorientation_normal = task.ext.args?["control"]?["mateorientation"] ? "mateOrientation = ${task.ext.args["control"]["mateorientation"]}" : ""
//"Sample" configuration
def matefile_tumor = mpileup_tumor ? "mateFile = \${PWD}/${mpileup_tumor}" : ""
def matecopynumberfile_tumor = cpn_tumor ? "mateCopyNumberFile = \${PWD}/${cpn_tumor}" : ""
def minipileup_tumor = minipileup_tumor ? "miniPileup = \${PWD}/${minipileup_tumor}" : ""
def inputformat_tumor = task.ext.args?["sample"]?["inputformat"] ? "inputFormat = ${task.ext.args["sample"]["inputformat"]}" : ""
def mateorientation_tumor = task.ext.args?["sample"]?["mateorientation"] ? "mateOrientation = ${task.ext.args["sample"]["mateorientation"]}" : ""
//"BAF" configuration
def makepileup = snp_position ? "makePileup = \${PWD}/${snp_position}" : ""
def fastafile = fasta ? "fastaFile = \${PWD}/${fasta}" : ""
def minimalcoverageperposition = task.ext.args?["BAF"]?["minimalcoverageperposition"] ? "minimalCoveragePerPosition = ${task.ext.args["BAF"]["minimalcoverageperposition"]}" : ""
def minimalqualityperposition = task.ext.args?["BAF"]?["minimalqualityperposition"] ? "minimalQualityPerPosition = ${task.ext.args["BAF"]["minimalqualityperposition"]}" : ""
def shiftinquality = task.ext.args?["BAF"]?["shiftinquality"] ? "shiftInQuality = ${task.ext.args["BAF"]["shiftinquality"]}" : ""
def snpfile = known_snps ? "SNPfile = \$PWD/${known_snps}" : ""
//"Target" configuration
def target_bed = target_bed ? "captureRegions = ${target_bed}" : ""
"""
touch config.txt
echo "[general]" >> config.txt
echo ${bedgraphoutput} >> config.txt
echo ${breakpointthreshold} >> config.txt
echo ${breakpointtype} >> config.txt
echo ${chr_files} >> config.txt
echo ${chr_length} >> config.txt
echo ${coefficientofvariation} >> config.txt
echo ${contamination} >> config.txt
echo ${contaminationadjustment} >> config.txt
echo ${degree} >> config.txt
echo ${forcegccontentnormalization} >> config.txt
echo ${gccontentprofile} >> config.txt
echo ${mappability} >> config.txt
echo ${intercept} >> config.txt
echo ${mincnalength} >> config.txt
echo ${minmappabilityperwindow} >> config.txt
echo ${minexpectedgc} >> config.txt
echo ${maxexpectedgc} >> config.txt
echo ${minimalsubclonepresence} >> config.txt
echo "maxThreads = ${task.cpus}" >> config.txt
echo ${noisydata} >> config.txt
echo ${output} >> config.txt
echo ${ploidy} >> config.txt
echo ${printNA} >> config.txt
echo ${readcountthreshold} >> config.txt
echo ${sex} >> config.txt
echo ${step} >> config.txt
echo ${telocentromeric} >> config.txt
echo ${uniquematch} >> config.txt
echo ${window} >> config.txt
echo "[control]" >> config.txt
echo ${matefile_normal} >> config.txt
echo ${matecopynumberfile_normal} >> config.txt
echo ${minipileup_normal} >> config.txt
echo ${inputformat_normal} >> config.txt
echo ${mateorientation_normal} >> config.txt
echo "[sample]" >> config.txt
echo ${matefile_tumor} >> config.txt
echo ${matecopynumberfile_tumor} >> config.txt
echo ${minipileup_tumor} >> config.txt
echo ${inputformat_tumor} >> config.txt
echo ${mateorientation_tumor} >> config.txt
echo "[BAF]" >> config.txt
echo ${makepileup} >> config.txt
echo ${fastafile} >> config.txt
echo ${minimalcoverageperposition} >> config.txt
echo ${minimalqualityperposition} >> config.txt
echo ${shiftinquality} >> config.txt
echo ${snpfile} >> config.txt
echo "[target]" >> config.txt
echo ${target_bed} >> config.txt
freec -conf config.txt
cat <<-END_VERSIONS > versions.yml
"${task.process}":
controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" )
END_VERSIONS
"""
}

View file

@ -0,0 +1,183 @@
name: controlfreec
description: Copy number and genotype annotation from whole genome and whole exome sequencing data
keywords:
- cna
- cnv
- somatic
- single
- tumor-only
tools:
- controlfreec:
description: Copy number and genotype annotation from whole genome and whole exome sequencing data.
homepage: http://boevalab.inf.ethz.ch/FREEC
documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html
tool_dev_url: https://github.com/BoevaLab/FREEC/
doi: "10.1093/bioinformatics/btq635"
licence: ['GPL >=2']
input:
- args:
type: map
description: |
Groovy Map containing tool parameters. MUST follow the structure/keywords below and be provided via modules.config.
<optional> parameters can be removed from the map, if they are not set. All value must be surrounded by quotes, meta map parameters can be set with, i.e. sex = meta.sex:
For default values, please check the documentation above.
```
{
[
"general" :[
"bedgraphoutput": <optional>,
"breakpointthreshold": <optional>,
"breakpointtype": <optional>,
"coefficientofvariation": <optional>,
"contamination": <optional>,
"contaminationadjustment": <optional>,
"degree": <optional>,
"forcegccontentnormalization": <optional>,
"gccontentprofile": <optional>,
"intercept": <optional>,
"mincnalength": <optional>,
"minmappabilityperwindow": <optional>,
"minexpectedgc": <optional>,
"maxexpectedgc": <optional>,
"minimalsubclonepresence": <optional>,
"noisydata": <optional>,
"ploidy": <optional>,
"printNA": <optional>,
"readcountthreshold": <optional >,
"sex": <optional>,
"step": <optional value>,
"telocentromeric": <optional>,
"uniquematch": <optional>,
"window": <optional>
],
"control":[
"inputformat": <required>,
"mateorientation": <optional>,
],
"sample":[
"inputformat": <required>,
"mateorientation": <optional>,
],
"BAF":[
"minimalcoverageperposition": <optional>,
"minimalqualityperposition": <optional>,
"shiftinquality": <optional>
]
]
}
```
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- mateFile_normal:
type: file
description: File with mapped reads
pattern: "*.{sam,bam,pileup(.gz),bowtie(.gz),eland(.gz),arachne(.gz),psl(.gz),bed(.gz)}"
- mateFile_tumor:
type: file
description: File with mapped reads
pattern: "*.{sam,bam,pileup(.gz),bowtie(.gz),eland(.gz),arachne(.gz),psl(.gz),bed(.gz)}"
- cpn_normal:
type: file
description: Raw copy number profiles (optional)
pattern: "*.cpn"
- cpn_tumor:
type: file
description: Raw copy number profiles (optional)
pattern: "*.cpn"
- minipileup_normal:
type: file
description: miniPileup file from previous run (optional)
pattern: "*.pileup"
- minipileup_tumor:
type: file
description: miniPileup file from previous run (optional)
pattern: "*.pileup"
- fasta:
type: file
description: Reference file (optional; required if args 'makePileup' is set)
pattern: "*.{fasta,fna,fa}"
- fai:
type: file
description: Fasta index
pattern: "*.fai"
- snp_position:
type: file
description:
pattern: "*.{}"
- known_snps:
type: file
description: File with known SNPs
pattern: "*.{vcf,vcf.gz}"
- known_snps_tbi:
type: file
description: Index of known_snps
pattern: "*.tbi"
- chr_directory:
type: file
description: Path to directory with chromosome fasta files (optional, required if gccontentprofile is not provided)
pattern: "*/"
- mappability:
type: file
description: Contains information of mappable positions (optional)
pattern: "*.gem"
- target_bed:
type: file
description: Sorted bed file containing capture regions (optional)
pattern: "*.bed"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- bedgraph:
type: file
description: Bedgraph format for the UCSC genome browser
pattern: ".bedgraph"
- control_cpn:
type: file
description: files with raw copy number profiles
pattern: "*_control.cpn"
- sample_cpn:
type: file
description: files with raw copy number profiles
pattern: "*_sample.cpn"
- gcprofile_cpn:
type: file
description: file with GC-content profile.
pattern: "GC_profile.*.cpn"
- BAF:
type: file
description: file B-allele frequencies for each possibly heterozygous SNP position
pattern: "*_BAF.txt"
- CNV:
type: file
description: file with coordinates of predicted copy number alterations.
pattern: "*_CNVs"
- info:
type: file
description: parsable file with information about FREEC run
pattern: "*_info.txt"
- ratio:
type: file
description: file with ratios and predicted copy number alterations for each window
pattern: "*_ratio.txt"
- config:
type: file
description: Config file used to run Control-FREEC
pattern: "config.txt"
authors:
- "@FriederikeHanssen"

View file

@ -2,10 +2,10 @@ process CUSTOM_GETCHROMSIZES {
tag "$fasta"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
path fasta

View file

@ -17,8 +17,8 @@ process DEEPVARIANT {
path(fai)
output:
tuple val(meta), path("*.vcf.gz") , emit: vcf
tuple val(meta), path("*g.vcf.gz"), emit: gvcf
tuple val(meta), path("${prefix}.vcf.gz") , emit: vcf
tuple val(meta), path("${prefix}.g.vcf.gz"), emit: gvcf
path "versions.yml" , emit: versions
when:
@ -26,7 +26,7 @@ process DEEPVARIANT {
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
prefix = task.ext.prefix ?: "${meta.id}"
def regions = intervals ? "--regions ${intervals}" : ""
"""

View file

@ -13,6 +13,7 @@ process FAQCS {
output:
tuple val(meta), path('*.trimmed.fastq.gz') , emit: reads
tuple val(meta), path('*.stats.txt') , emit: stats
tuple val(meta), path('*.txt') , optional:true, emit: txt
tuple val(meta), path('*_qc_report.pdf') , optional:true, emit: statspdf
tuple val(meta), path('*.log') , emit: log
tuple val(meta), path('*.discard.fastq.gz') , optional:true, emit: reads_fail

View file

@ -54,6 +54,10 @@ output:
type: file
description: trimming/qc text stats file
pattern: "*.stats.txt"
- txt:
type: file
description: trimming/qc text txt files from --debug option
pattern: "*.txt"
- statspdf:
type: file
description: trimming/qc pdf report file

View file

@ -0,0 +1,44 @@
process HAMRONIZATION_DEEPARG {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::hamronization=1.0.3" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hamronization:1.0.3--py_0':
'quay.io/biocontainers/hamronization:1.0.3--py_0' }"
input:
tuple val(meta), path(report)
val(format)
val(software_version)
val(reference_db_version)
output:
tuple val(meta), path("*.json"), optional: true, emit: json
tuple val(meta), path("*.tsv") , optional: true, emit: tsv
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
"""
hamronize \\
deeparg \\
${report} \\
$args \\
--format ${format} \\
--analysis_software_version ${software_version} \\
--reference_database_version ${reference_db_version} \\
--input_file_name ${prefix} \\
> ${prefix}.${format}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,60 @@
name: hamronization_deeparg
description: Tool to convert and summarize DeepARG outputs using the hAMRonization specification
keywords:
- amr
- antimicrobial resistance
- reporting
- deeparg
tools:
- hamronization:
description: Tool to convert and summarize AMR gene detection outputs using the hAMRonization specification
homepage: https://github.com/pha4ge/hAMRonization/blob/master/README.md
documentation: https://github.com/pha4ge/hAMRonization/blob/master/README.md
tool_dev_url: https://github.com/pha4ge/hAMRonization
doi: ""
licence: ['GNU Lesser General Public v3 (LGPL v3)']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- report:
type: file
description: Output .mapping.ARG file from DeepARG
pattern: "*.mapping.ARG"
- format:
type: value
description: Type of report file to be produced
pattern: "tsv|json"
- software_version:
type: value
description: Version of DeepARG used
pattern: "[0-9].[0-9].[0-9]"
- reference_db_version:
type: value
description: Database version of DeepARG used
pattern: "[0-9]"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- json:
type: file
description: hAMRonised report in JSON format
pattern: "*.json"
- tsv:
type: file
description: hAMRonised report in TSV format
pattern: "*.json"
authors:
- "@jfy133"

View file

@ -0,0 +1,38 @@
process HAMRONIZATION_SUMMARIZE {
label 'process_low'
conda (params.enable_conda ? "bioconda::hamronization=1.0.3" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hamronization:1.0.3--py_0':
'quay.io/biocontainers/hamronization:1.0.3--py_0' }"
input:
path(reports)
val(format)
output:
path("hamronization_combined_report.json"), optional: true, emit: json
path("hamronization_combined_report.tsv") , optional: true, emit: tsv
path("hamronization_combined_report.html"), optional: true, emit: html
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def outformat = format == 'interactive' ? 'html' : format
"""
hamronize \\
summarize \\
${reports.join(' ')} \\
-t ${format} \\
$args \\
-o hamronization_combined_report.${outformat}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,45 @@
name: hamronization_summarize
description: Tool to summarize and combine all hAMRonization reports into a single file
keywords:
- amr
- antimicrobial resistance
- reporting
tools:
- hamronization:
description: Tool to convert and summarize AMR gene detection outputs using the hAMRonization specification
homepage: https://github.com/pha4ge/hAMRonization/blob/master/README.md
documentation: https://github.com/pha4ge/hAMRonization/blob/master/README.md
tool_dev_url: https://github.com/pha4ge/hAMRonization
doi: ""
licence: ['GNU Lesser General Public v3 (LGPL v3)']
input:
- reports:
type: file
description: List of multiple hAMRonization reports in either JSON or TSV format
pattern: "*.{json,tsv}"
- format:
type: value
description: Type of final combined report file to be produced
pattern: "tsv|json|interactive"
output:
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- json:
type: file
description: hAMRonised summary in JSON format
pattern: "*.json"
- tsv:
type: file
description: hAMRonised summary in TSV format
pattern: "*.json"
- html:
type: file
description: hAMRonised summary in HTML format
pattern: "*.html"
authors:
- "@jfy133"

View file

@ -0,0 +1,44 @@
def VERSION = '1.0.1' // Version information not provided by tool on CLI
process HPSUISSERO {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::hpsuissero=1.0.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hpsuissero%3A1.0.1--hdfd78af_0':
'quay.io/biocontainers/hpsuissero:1.0.1--hdfd78af_0' }"
input:
tuple val(meta), path(fasta)
output:
tuple val(meta), path("*.tsv"), emit: tsv
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def is_compressed = fasta.getName().endsWith(".gz") ? true : false
def fasta_name = fasta.getName().replace(".gz", "")
"""
if [ "$is_compressed" == "true" ]; then
gzip -c -d $fasta > $fasta_name
fi
HpsuisSero.sh \\
-i $fasta_name \\
-o ./ \\
-s $prefix \\
-x fasta \\
-t $task.cpus
cat <<-END_VERSIONS > versions.yml
"${task.process}":
hpsuissero: $VERSION
END_VERSIONS
"""
}

View file

@ -0,0 +1,43 @@
name: hpsuissero
description: Serotype prediction of Haemophilus parasuis assemblies
keywords:
- bacteria
- fasta
- haemophilus
tools:
- hpsuissero:
description: Rapid Haemophilus parasuis serotyping pipeline for Nanpore data
homepage: https://github.com/jimmyliu1326/HpsuisSero
documentation: https://github.com/jimmyliu1326/HpsuisSero
tool_dev_url: https://github.com/jimmyliu1326/HpsuisSero
doi: ""
licence: ['MIT']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- fasta:
type: file
description: Assembly in FASTA format
pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz,faa,faa.gz}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- tsv:
type: file
description: Tab-delimited serotype prediction
pattern: "*.{tsv}"
authors:
- "@rpetit3"

View file

@ -61,11 +61,11 @@ output:
pattern: "*.r1.fail.fq.gz"
- unmerged_r2_fq_pass:
type: file
description: Passed unmerged R1 FASTQs
description: Passed unmerged R2 FASTQs
pattern: "*.r2.fq.gz"
- unmerged_r2_fq_pass:
type: file
description: Failed unmerged R1 FASTQs
description: Failed unmerged R2 FASTQs
pattern: "*.r2.fail.fq.gz"
- log:
type: file

35
modules/mafft/main.nf Normal file
View file

@ -0,0 +1,35 @@
process MAFFT {
tag "$meta.id"
label 'process_high'
conda (params.enable_conda ? "bioconda::mafft=7.490" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mafft:7.490--h779adbc_0':
'quay.io/biocontainers/mafft:7.490--h779adbc_0' }"
input:
tuple val(meta), path(fasta)
output:
tuple val(meta), path("*.fas"), emit: fas
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
"""
mafft \\
--thread ${task.cpus} \\
${args} \\
${fasta} \\
> ${prefix}.fas
cat <<-END_VERSIONS > versions.yml
"${task.process}":
mafft: \$(mafft --version 2>&1 | sed 's/^v//' | sed 's/ (.*)//')
END_VERSIONS
"""
}

42
modules/mafft/meta.yml Normal file
View file

@ -0,0 +1,42 @@
name: mafft
description: Multiple sequence alignment using MAFFT
keywords:
- msa
- multiple sequence alignment
tools:
- mafft:
description: Multiple alignment program for amino acid or nucleotide sequences based on fast Fourier transform
homepage: https://mafft.cbrc.jp/alignment/software/
documentation: https://mafft.cbrc.jp/alignment/software/manual/manual.html
tool_dev_url: https://mafft.cbrc.jp/alignment/software/source.html
doi: "10.1093/nar/gkf436"
licence: ['BSD']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- fasta:
type: file
description: FASTA file containing the sequences to align
pattern: "*.{fa,fasta}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- fas:
type: file
description: Aligned sequences in FASTA format
pattern: "*.{fas}"
authors:
- "@MillironX"

View file

@ -1,4 +1,5 @@
process MALT_RUN {
tag "$meta.id"
label 'process_high'
conda (params.enable_conda ? "bioconda::malt=0.53" : null)
@ -7,21 +8,22 @@ process MALT_RUN {
'quay.io/biocontainers/malt:0.53--hdfd78af_0' }"
input:
path fastqs
tuple val(meta), path(fastqs)
val mode
path index
output:
path "*.rma6" , emit: rma6
path "*.{tab,text,sam}", optional:true, emit: alignments
path "*.log" , emit: log
path "versions.yml" , emit: versions
tuple val(meta), path("*.rma6") , emit: rma6
tuple val(meta), path("*.{tab,text,sam}"), optional:true, emit: alignments
tuple val(meta), path("*.log") , emit: log
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def avail_mem = 6
if (!task.memory) {
log.info '[MALT_RUN] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.'
@ -38,7 +40,7 @@ process MALT_RUN {
$args \\
--inFile ${fastqs.join(' ')} \\
-m $mode \\
--index $index/ |&tee malt-run.log
--index $index/ |&tee ${prefix}-malt-run.log
cat <<-END_VERSIONS > versions.yml
"${task.process}":

View file

@ -19,6 +19,11 @@ tools:
licence: ["GPL v3"]
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- fastqs:
type: file
description: Input FASTQ files
@ -47,7 +52,7 @@ output:
- log:
type: file
description: Log of verbose MALT stdout
pattern: "malt-run.log"
pattern: "*-malt-run.log"
authors:
- "@jfy133"

View file

@ -29,8 +29,9 @@ process MAXBIN2 {
def prefix = task.ext.prefix ?: "${meta.id}"
def associate_files = reads ? "-reads $reads" : "-abund $abund"
"""
mkdir input/ && mv $contigs input/
run_MaxBin.pl \\
-contig $contigs \\
-contig input/$contigs \\
$associate_files \\
-thread $task.cpus \\
$args \\

View file

@ -1,6 +1,6 @@
process PICARD_CLEANSAM {
tag "$meta.id"
label 'process_low'
label 'process_medium'
conda (params.enable_conda ? "bioconda::picard=2.26.9" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
@ -8,10 +8,10 @@ process PICARD_CLEANSAM {
'quay.io/biocontainers/picard:2.26.9--hdfd78af_0' }"
input:
tuple val(meta), path(sam)
tuple val(meta), path(bam)
output:
tuple val(meta), path("*.sam"), emit: sam
tuple val(meta), path("*.bam"), emit: bam
path "versions.yml" , emit: versions
when:
@ -20,7 +20,6 @@ process PICARD_CLEANSAM {
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def STRINGENCY = task.ext.stringency ?: "STRICT"
def avail_mem = 3
if (!task.memory) {
log.info '[Picard CleanSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.'
@ -32,9 +31,8 @@ process PICARD_CLEANSAM {
-Xmx${avail_mem}g \\
CleanSam \\
${args} \\
-I ${sam} \\
-O ${prefix}.sam \\
--VALIDATION_STRINGENCY ${STRINGENCY}
-I ${bam} \\
-O ${prefix}.bam
cat <<-END_VERSIONS > versions.yml
"${task.process}":

View file

@ -1,8 +1,7 @@
name: picard_cleansam
description: Cleans the provided SAM/BAM, soft-clipping beyond-end-of-reference alignments and setting MAPQ to 0 for unmapped reads
description: Cleans the provided BAM, soft-clipping beyond-end-of-reference alignments and setting MAPQ to 0 for unmapped reads
keywords:
- clean
- sam
- bam
tools:
- picard:
@ -22,8 +21,8 @@ input:
e.g. [ id:'test', single_end:false ]
- sam:
type: file
description: SAM file
pattern: "*.{sam}"
description: BAM file
pattern: "*.{bam}"
output:
- meta:
@ -37,8 +36,8 @@ output:
pattern: "versions.yml"
- sam:
type: file
description: Cleaned SAM file
pattern: "*.{sam}"
description: Cleaned BAM file
pattern: "*.{bam}"
authors:
- "@sateeshperi"

View file

@ -0,0 +1,49 @@
process PICARD_SORTVCF {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::picard=2.26.10" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' :
'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }"
input:
tuple val(meta), path(vcf)
path reference
path sequence_dict
output:
tuple val(meta), path("*_sorted.vcf.gz"), emit: vcf
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def seq_dict = sequence_dict ? "-SEQUENCE_DICTIONARY $sequence_dict" : ""
def reference = reference ? "-REFERENCE_SEQUENCE $reference" : ""
def avail_mem = 3
if (!task.memory) {
log.info '[Picard SortVcf] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.'
} else {
avail_mem = task.memory.giga
}
"""
picard \\
SortVcf \\
-Xmx${avail_mem}g \\
--INPUT $vcf \\
$args \\
$seq_dict \\
$reference \\
--OUTPUT ${prefix}_sorted.vcf.gz
cat <<-END_VERSIONS > versions.yml
"${task.process}":
picard: \$(picard SortVcf --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:)
END_VERSIONS
"""
}

View file

@ -0,0 +1,40 @@
name: picard_sortvcf
description: Sorts vcf files
keywords:
- sort
- vcf
tools:
- picard:
description: Java tools for working with NGS data in the BAM/CRAM/SAM and VCF format
homepage: https://broadinstitute.github.io/picard/
documentation: https://broadinstitute.github.io/picard/command-line-overview.html#SortVcf
licence: ['MIT']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- vcf:
type: file
description: VCF file
pattern: "*.{vcf,vcf.gz}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- vcf:
type: file
description: Sorted VCF file
pattern: "*.{vcf}"
authors:
- "@ramprasadn"

View file

@ -23,11 +23,13 @@ process PLINK2_EXTRACT {
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
if( "$pgen" == "${prefix}.pgen" ) error "Input and output names are the same, use \"task.ext.prefix\" in modules.config to disambiguate!"
def mem_mb = task.memory.toMega()
"""
plink2 \\
--threads $task.cpus \\
--memory $mem_mb \\
--pfile ${pgen.baseName} \\
$args \\
--threads $task.cpus \\
--extract $variants \\
--make-pgen vzs \\
--out ${prefix}

View file

@ -0,0 +1,39 @@
process PLINK2_SCORE {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::plink2=2.00a2.3" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1' :
'quay.io/biocontainers/plink2:2.00a2.3--h712d239_1' }"
input:
tuple val(meta), path(pgen), path(psam), path(pvar)
path(scorefile)
output:
tuple val(meta), path("*.sscore"), emit: score
path("versions.yml") , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def mem_mb = task.memory.toMega() // plink is greedy
"""
plink2 \\
--threads $task.cpus \\
--memory $mem_mb \\
--pfile ${pgen.baseName} vzs \\
--score ${scorefile} \\
$args \\
--out ${prefix}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
plink2: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' )
END_VERSIONS
"""
}

View file

@ -0,0 +1,56 @@
name: plink2_score
description: Apply a scoring system to each sample in a plink 2 fileset
keywords:
- plink2
- score
tools:
- plink2:
description: |
Whole genome association analysis toolset, designed to perform a range
of basic, large-scale analyses in a computationally efficient manner
homepage: http://www.cog-genomics.org/plink/2.0/
documentation: http://www.cog-genomics.org/plink/2.0/general_usage
tool_dev_url: None
doi: "10.1186/s13742-015-0047-8"
licence: ['GPL v3']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- pgen:
type: file
description: PLINK 2 binary genotype table
pattern: "*.{pgen}"
- psam:
type: file
description: PLINK 2 sample information file
pattern: "*.{psam}"
- pvar:
type: file
description: PLINK 2 variant information file
pattern: "*.{pvar}"
- scorefile:
type: file
description: A text file containing variant identifiers and weights
pattern: "*.{scores,txt,scorefile}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- score:
type: file
description: A text file containing sample scores, in plink 2 .sscore format
pattern: "*.{sscore}"
authors:
- "@nebfield"

View file

@ -11,10 +11,10 @@ process PLINK2_VCF {
tuple val(meta), path(vcf)
output:
tuple val(meta), path("*.pgen"), emit: pgen
tuple val(meta), path("*.psam"), emit: psam
tuple val(meta), path("*.pvar"), emit: pvar
path "versions.yml" , emit: versions
tuple val(meta), path("*.pgen") , emit: pgen
tuple val(meta), path("*.psam") , emit: psam
tuple val(meta), path("*.pvar.zst"), emit: pvar
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
@ -22,10 +22,14 @@ process PLINK2_VCF {
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def mem_mb = task.memory.toMega()
"""
plink2 \\
--threads $task.cpus \\
--memory $mem_mb \\
$args \\
--vcf $vcf \\
--make-pgen vzs \\
--out ${prefix}
cat <<-END_VERSIONS > versions.yml

View file

@ -46,7 +46,7 @@ output:
- pvar:
type: file
description: PLINK 2 variant information file
pattern: "*.{psam}"
pattern: "*.{pvar.zst}"
authors:
- "@nebfield"

View file

@ -2,10 +2,10 @@ process QUALIMAP_BAMQCCRAM {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::qualimap=2.2.2d bioconda::samtools=1.12" : null)
conda (params.enable_conda ? "bioconda::qualimap=2.2.2d bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:4bf11d12f2c3eccf1eb585097c0b6fd31c18c418-0' :
'quay.io/biocontainers/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:4bf11d12f2c3eccf1eb585097c0b6fd31c18c418-0' }"
'https://depot.galaxyproject.org/singularity/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:9838874d42d4477d5042782ee019cec9854da7d5-0' :
'quay.io/biocontainers/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:9838874d42d4477d5042782ee019cec9854da7d5-0' }"
input:
tuple val(meta), path(cram), path(crai)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_AMPLICONCLIP {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_BAM2FQ {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(inputbam)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_DEPTH {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_FAIDX {
tag "$fasta"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(fasta)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_FASTQ {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_FIXMATE {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_FLAGSTAT {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam), path(bai)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_IDXSTATS {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam), path(bai)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_INDEX {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(input)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_MERGE {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(input_files)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_MPILEUP {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_SORT {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(bam)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_STATS {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(input), path(input_index)

View file

@ -2,10 +2,10 @@ process SAMTOOLS_VIEW {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
conda (params.enable_conda ? "bioconda::samtools=1.15" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' :
'quay.io/biocontainers/samtools:1.14--hb421002_0' }"
'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' :
'quay.io/biocontainers/samtools:1.15--h1170115_1' }"
input:
tuple val(meta), path(input)

44
modules/ssuissero/main.nf Normal file
View file

@ -0,0 +1,44 @@
def VERSION = '1.0.1' // Version information not provided by tool on CLI
process SSUISSERO {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::ssuissero=1.0.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/ssuissero%3A1.0.1--hdfd78af_0':
'quay.io/biocontainers/ssuissero:1.0.1--hdfd78af_0' }"
input:
tuple val(meta), path(fasta)
output:
tuple val(meta), path("*.tsv"), emit: tsv
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def is_compressed = fasta.getName().endsWith(".gz") ? true : false
def fasta_name = fasta.getName().replace(".gz", "")
"""
if [ "$is_compressed" == "true" ]; then
gzip -c -d $fasta > $fasta_name
fi
SsuisSero.sh \\
-i $fasta_name \\
-o ./ \\
-s $prefix \\
-x fasta \\
-t $task.cpus
cat <<-END_VERSIONS > versions.yml
"${task.process}":
ssuissero: $VERSION
END_VERSIONS
"""
}

View file

@ -0,0 +1,43 @@
name: ssuissero
description: Serotype prediction of Streptococcus suis assemblies
keywords:
- bacteria
- fasta
- streptococcus
tools:
- ssuissero:
description: Rapid Streptococcus suis serotyping pipeline for Nanopore Data
homepage: https://github.com/jimmyliu1326/SsuisSero
documentation: https://github.com/jimmyliu1326/SsuisSero
tool_dev_url: https://github.com/jimmyliu1326/SsuisSero
doi: ""
licence: ['MIT']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- fasta:
type: file
description: Assembly in FASTA format
pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz,faa,faa.gz}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- tsv:
type: file
description: Tab-delimited serotype prediction
pattern: "*.{tsv}"
authors:
- "@rpetit3"

33
modules/stranger/main.nf Normal file
View file

@ -0,0 +1,33 @@
process STRANGER {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "bioconda::stranger=0.8.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/stranger:0.8.1--pyh5e36f6f_0':
'quay.io/biocontainers/stranger:0.8.1--pyh5e36f6f_0' }"
input:
tuple val(meta), path(vcf)
output:
tuple val(meta), path("*.gz"), emit: vcf
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
"""
stranger \\
$args \\
$vcf | gzip --no-name > ${prefix}.vcf.gz
cat <<-END_VERSIONS > versions.yml
"${task.process}":
stranger: \$( stranger --version )
END_VERSIONS
"""
}

44
modules/stranger/meta.yml Normal file
View file

@ -0,0 +1,44 @@
name: stranger
description: Annotates output files from ExpansionHunter with the pathologic implications of the repeat sizes.
keywords:
- STR
- repeat_expansions
- annotate
- vcf
tools:
- stranger:
description: Annotate VCF files with str variants
homepage: https://github.com/moonso/stranger
documentation: https://github.com/moonso/stranger
tool_dev_url: https://github.com/moonso/stranger
doi: "10.5281/zenodo.4548873"
licence: ['MIT']
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- vcf:
type: file
description: VCF with repeat expansions
pattern: "*.{vcf.gz,vcf}"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- vcf:
type: file
description: annotated VCF with keys STR_STATUS, NormalMax and PathologicMin
pattern: "*.{vcf.gz}"
authors:
- "@ljmesi"

View file

@ -2,7 +2,7 @@ process UNTAR {
tag "$archive"
label 'process_low'
conda (params.enable_conda ? "conda-forge::sed=4.7" : null)
conda (params.enable_conda ? "conda-forge::tar=1.32" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' :
'biocontainers/biocontainers:v1.2.0_cv1' }"

View file

@ -13,6 +13,7 @@ process YARA_MAPPER {
output:
tuple val(meta), path("*.mapped.bam"), emit: bam
tuple val(meta), path("*.mapped.bam.bai"), emit: bai
path "versions.yml" , emit: versions
when:
@ -28,7 +29,9 @@ process YARA_MAPPER {
-t $task.cpus \\
-f bam \\
${index}/yara \\
$reads | samtools view -@ $task.cpus -hb -F4 > ${prefix}.mapped.bam
$reads | samtools view -@ $task.cpus -hb -F4 | samtools sort -@ $task.cpus > ${prefix}.mapped.bam
samtools index -@ $task.cpus ${prefix}.mapped.bam
cat <<-END_VERSIONS > versions.yml
"${task.process}":
@ -46,8 +49,11 @@ process YARA_MAPPER {
${reads[0]} \\
${reads[1]} > output.bam
samtools view -@ $task.cpus -hF 4 -f 0x40 -b output.bam > ${prefix}_1.mapped.bam
samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam
samtools view -@ $task.cpus -hF 4 -f 0x40 -b output.bam | samtools sort -@ $task.cpus > ${prefix}_1.mapped.bam
samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam | samtools sort -@ $task.cpus > ${prefix}_2.mapped.bam
samtools index -@ $task.cpus ${prefix}_1.mapped.bam
samtools index -@ $task.cpus ${prefix}_2.mapped.bam
cat <<-END_VERSIONS > versions.yml
"${task.process}":

View file

@ -45,6 +45,10 @@ output:
type: file
description: Sorted BAM file
pattern: "*.{bam}"
- bai:
type: file
description: Sorted BAM file index
pattern: "*.{bai}"
authors:
- "@apeltzer"

View file

@ -98,6 +98,10 @@ bbmap/index:
- modules/bbmap/index/**
- tests/modules/bbmap/index/**
bcftools/annotate:
- modules/bcftools/annotate/**
- tests/modules/bcftools/annotate/**
bcftools/concat:
- modules/bcftools/concat/**
- tests/modules/bcftools/concat/**
@ -352,6 +356,10 @@ cnvkit/batch:
- modules/cnvkit/batch/**
- tests/modules/cnvkit/batch/**
controlfreec:
- modules/controlfreec/**
- tests/modules/controlfreec/**
cooler/cload:
- modules/cooler/cload/**
- tests/modules/cooler/cload/**
@ -744,6 +752,14 @@ gunzip:
- modules/gunzip/**
- tests/modules/gunzip/**
hamronization/deeparg:
- modules/hamronization/deeparg/**
- tests/modules/hamronization/deeparg/**
hamronization/summarize:
- modules/hamronization/summarize/**
- tests/modules/hamronization/summarize/**
hicap:
- modules/hicap/**
- tests/modules/hicap/**
@ -807,6 +823,10 @@ homer/makeucscfile:
- modules/homer/makeucscfile/**
- tests/modules/homer/makeucscfile/**
hpsuissero:
- modules/hpsuissero/**
- tests/modules/hpsuissero/**
ichorcna/createpon:
- modules/ichorcna/createpon/**
- tests/modules/ichorcna/createpon/**
@ -960,6 +980,10 @@ macs2/callpeak:
- modules/macs2/callpeak/**
- tests/modules/macs2/callpeak/**
mafft:
- modules/mafft/**
- tests/modules/mafft/**
malt/build:
- modules/malt/build/**
- tests/modules/malt/build_test/**
@ -1225,6 +1249,10 @@ picard/sortsam:
- modules/picard/sortsam/**
- tests/modules/picard/sortsam/**
picard/sortvcf:
- modules/picard/sortvcf/**
- tests/modules/picard/sortvcf/**
pirate:
- modules/pirate/**
- tests/modules/pirate/**
@ -1245,6 +1273,10 @@ plink2/extract:
- modules/plink2/extract/**
- tests/modules/plink2/extract/**
plink2/score:
- modules/plink2/score/**
- tests/modules/plink2/score/**
plink2/vcf:
- modules/plink2/vcf/**
- tests/modules/plink2/vcf/**
@ -1533,6 +1565,10 @@ sratools/prefetch:
- modules/sratools/prefetch/**
- tests/modules/sratools/prefetch/**
ssuissero:
- modules/ssuissero/**
- tests/modules/ssuissero/**
staphopiasccmec:
- modules/staphopiasccmec/**
- tests/modules/staphopiasccmec/**
@ -1545,6 +1581,10 @@ star/genomegenerate:
- modules/star/genomegenerate/**
- tests/modules/star/genomegenerate/**
stranger:
- modules/stranger/**
- tests/modules/stranger/**
strelka/germline:
- modules/strelka/germline/**
- tests/modules/strelka/germline/**

View file

@ -67,6 +67,8 @@ params {
test_computematrix_mat_gz = "${test_data_dir}/genomics/sarscov2/illumina/deeptools/test.computeMatrix.mat.gz"
test_bcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.bcf"
test_vcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf"
test_vcf_gz = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf.gz"
test_vcf_gz_tbi = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf.gz.tbi"
@ -117,14 +119,16 @@ params {
genome_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz.tbi"
transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta"
genome2_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome2.fasta"
genome_chain_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.chain.gz"
genome_chain_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.chain.gz"
genome_21_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta"
genome_21_fasta_fai = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta.fai"
genome_21_dict = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.dict"
genome_21_sizes = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.sizes"
genome_21_interval_list = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.interval_list"
genome_21_multi_interval_bed = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/multi_intervals.bed"
genome_21_multi_interval_bed_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/multi_intervals.bed.gz"
genome_21_multi_interval_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/multi_intervals.bed.gz.tbi"
genome_21_chromosomes_dir = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/chromosomes.tar.gz"
dbsnp_146_hg38_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz"
dbsnp_146_hg38_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz.tbi"
@ -134,6 +138,7 @@ params {
mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi"
syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz"
syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi"
syntheticvcf_short_score = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.score"
gnomad_r2_1_1_sv_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1-sv.vcf.gz"
hapmap_3_3_hg38_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/hapmap_3.3.hg38.vcf.gz"
@ -154,7 +159,7 @@ params {
justhusky_ped = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky.ped"
justhusky_minimal_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz"
justhusky_minimal_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz.tbi"
vcfanno_tar_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/vcfanno/vcfanno_grch38_module_test.tar.gz"
vcfanno_toml = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/vcfanno/vcfanno.toml"
}
@ -270,6 +275,9 @@ params {
test_genome21_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/vcf/test.genome_21.somatic_sv.vcf.gz"
test_genome21_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/vcf/test.genome_21.somatic_sv.vcf.gz.tbi"
test_mpileup = "${test_data_dir}/genomics/homo_sapiens/illumina/mpileup/test.mpileup.gz"
test2_mpileup = "${test_data_dir}/genomics/homo_sapiens/illumina/mpileup/test2.mpileup.gz"
test_broadpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/broadpeak/test.broadPeak"
test2_broadpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/broadpeak/test2.broadPeak"
@ -314,6 +322,8 @@ params {
'genome' {
genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz"
genome_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.paf"
genome_mapping_potential_arg = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.mapping.potential.ARG"
}
'illumina' {
test1_contigs_fa_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz"

View file

@ -1,31 +1,44 @@
- name: adapterremoval test_adapterremoval_single_end
command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_single_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config
command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_single_end -c tests/config/nextflow.config
tags:
- adapterremoval
files:
- path: output/adapterremoval/test.discarded.gz
- path: output/adapterremoval/test.log
md5sum: 2fd3d5d703b63ba33a83021fccf25f77
- path: output/adapterremoval/test.trimmed.fastq.gz
- path: output/adapterremoval/test.truncated.gz
md5sum: 62139afee94defad5b83bdd0b8475a1f
- path: output/adapterremoval/versions.yml
md5sum: ac5b46719719b7ee62739530b80869fc
- name: adapterremoval test_adapterremoval_paired_end
command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config
command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c tests/config/nextflow.config
tags:
- adapterremoval
files:
- path: output/adapterremoval/test.discarded.gz
- path: output/adapterremoval/test.log
md5sum: b8a451d3981b327f3fdb44f40ba2d6d1
- path: output/adapterremoval/test.pair1.trimmed.fastq.gz
- path: output/adapterremoval/test.pair1.truncated.gz
md5sum: 294a6277f0139bd597e57c6fa31f39c7
- path: output/adapterremoval/test.pair2.trimmed.fastq.gz
- path: output/adapterremoval/test.pair2.truncated.gz
md5sum: de7b38e2c881bced8671acb1ab452d78
- path: output/adapterremoval/test.singleton.truncated.gz
- path: output/adapterremoval/versions.yml
md5sum: fa621c887897da5a379c719399c17db7
- name: adapterremoval test_adapterremoval_paired_end_collapse
command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config
command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c tests/config/nextflow.config
tags:
- adapterremoval
files:
- path: output/adapterremoval/test.discarded.gz
- path: output/adapterremoval/test.log
md5sum: 7f0b2328152226e46101a535cce718b3
- path: output/adapterremoval/test.merged.fastq.gz
md5sum: 07a8f725bfd3ecbeabdc41b32d898dee
md5sum: b8a451d3981b327f3fdb44f40ba2d6d1
- path: output/adapterremoval/test.pair1.truncated.gz
md5sum: 294a6277f0139bd597e57c6fa31f39c7
- path: output/adapterremoval/test.pair2.truncated.gz
md5sum: de7b38e2c881bced8671acb1ab452d78
- path: output/adapterremoval/test.singleton.truncated.gz
- path: output/adapterremoval/versions.yml
md5sum: fd428f92a8446e0b34c5ae1c447215b8

View file

@ -0,0 +1,23 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { BCFTOOLS_ANNOTATE } from '../../../../modules/bcftools/annotate/main.nf'
workflow test_bcftools_annotate_out_vcf {
input = [
[ id:'test_compressed_vcf', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ]
BCFTOOLS_ANNOTATE ( input )
}
workflow test_bcftools_annotate_out_bcf {
input = [
[ id:'test_compressed_bcf', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_bcf'], checkIfExists: true) ]
BCFTOOLS_ANNOTATE ( input )
}

View file

@ -0,0 +1,5 @@
process {
ext.args = "-x ID,INFO/DP,FORMAT/DP"
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,19 @@
- name: bcftools annotate test_bcftools_annotate_out_vcf
command: nextflow run tests/modules/bcftools/annotate -entry test_bcftools_annotate_out_vcf -c tests/config/nextflow.config
tags:
- bcftools/annotate
- bcftools
files:
- path: output/bcftools/test_compressed_vcf_annotated.vcf.gz
- path: output/bcftools/versions.yml
md5sum: de86d4d411baef1aaee0e72f519dbe1f
- name: bcftools annotate test_bcftools_annotate_out_bcf
command: nextflow run tests/modules/bcftools/annotate -entry test_bcftools_annotate_out_bcf -c tests/config/nextflow.config
tags:
- bcftools/annotate
- bcftools
files:
- path: output/bcftools/test_compressed_bcf_annotated.bcf
- path: output/bcftools/versions.yml
md5sum: a57e62a5a189fe85aabd52c010d88ca6

View file

@ -5,4 +5,4 @@
- bwa/sampe
files:
- path: output/bwa/test.bam
md5sum: f6ad85d66d44c5d26e692109d2e34100
md5sum: 01d1d71c88b6de07ed51d1d06e9e970b

View file

@ -5,4 +5,4 @@
- bwa/samse
files:
- path: output/bwa/test.bam
md5sum: 27eb91146e45dee65664c18596be4262
md5sum: ddfa4a8f6b65d44704a2d9528abc7e79

View file

@ -0,0 +1,37 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { CONTROLFREEC } from '../../../modules/controlfreec/main.nf'
include { UNTAR } from '../../../modules/untar/main.nf'
workflow test_controlfreec {
input = [
[ id:'test', single_end:false, sex:'XX' ], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_mpileup'], checkIfExists: true),
file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true),
[],[],[],[]
]
fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true)
fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true)
dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true)
dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true)
chrfiles = file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true)
target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true)
UNTAR(chrfiles)
CONTROLFREEC ( input,
fasta,
fai,
[],
dbsnp,
dbsnp_tbi,
UNTAR.out.untar,
[],
target_bed,
[]
)
}

View file

@ -0,0 +1,26 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
withName:CONTROLFREEC{
ext.args = { [
"sample":[
inputformat: 'pileup',
mateorientation: 'FR'
],
"general" :[
bedgraphoutput: "TRUE",
noisydata: "TRUE",
minexpectedgc: "0",
readcountthreshold: "1",
sex: meta.sex,
window: "10",
],
"control":[
inputformat: "pileup",
mateorientation: "FR"
]
]
}
}
}

View file

@ -0,0 +1,22 @@
- name: controlfreec test_controlfreec
command: nextflow run tests/modules/controlfreec -entry test_controlfreec -c tests/config/nextflow.config
tags:
- controlfreec
files:
- path: output/controlfreec/config.txt
- path: output/controlfreec/test.mpileup.gz_control.cpn
md5sum: 1768b571677c418560e5a8fe203bdc79
- path: output/controlfreec/test2.mpileup.gz_BAF.txt
md5sum: 3bb7437001cf061a77eaf87b8558c48d
- path: output/controlfreec/test2.mpileup.gz_CNVs
md5sum: 1f4f5834dbd1490afdb22f6d3091c4c9
- path: output/controlfreec/test2.mpileup.gz_info.txt
md5sum: 1a3055d35028525ccc9e693cc9f335e0
- path: output/controlfreec/test2.mpileup.gz_ratio.BedGraph
md5sum: 8ba455b232be20cdcc5bf1e4035e8032
- path: output/controlfreec/test2.mpileup.gz_ratio.txt
md5sum: b76b2434de710325069e37fb1e132760
- path: output/controlfreec/test2.mpileup.gz_sample.cpn
md5sum: c80dad58a77b1d7ba6d273999f4b4b4b
- path: output/controlfreec/versions.yml
md5sum: ff93f6466d4686aab708425782c6c848

View file

@ -1,5 +1,6 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
ext.args = {"--debug" }
}

View file

@ -3,8 +3,20 @@
tags:
- faqcs
files:
- path: output/faqcs/qa.test.base_content.txt
md5sum: f992603f01ca430c03c8aae02eba2f5d
- path: output/faqcs/qa.test.for_qual_histogram.txt
md5sum: a3d462ab84151e982f99f85f52c21de3
- path: output/faqcs/qa.test.length_count.txt
md5sum: 80915f09fbaf5884c32e95acab2d031c
- path: output/faqcs/test.base_content.txt
md5sum: f992603f01ca430c03c8aae02eba2f5d
- path: output/faqcs/test.fastp.log
md5sum: be79dc893f87de1f82faf749cdfb848c
- path: output/faqcs/test.for_qual_histogram.txt
md5sum: a3d462ab84151e982f99f85f52c21de3
- path: output/faqcs/test.length_count.txt
md5sum: 80915f09fbaf5884c32e95acab2d031c
- path: output/faqcs/test.stats.txt
md5sum: ea20e93706b2e4c676004253baa3cec6
- path: output/faqcs/test.trimmed.fastq.gz
@ -18,8 +30,20 @@
tags:
- faqcs
files:
- path: output/faqcs/qa.test.base_content.txt
md5sum: 99aa9a775ccd8d6503f0cf80f775203c
- path: output/faqcs/qa.test.for_qual_histogram.txt
md5sum: 4f4b131be5425bdfa4b3237e44fa7d48
- path: output/faqcs/qa.test.length_count.txt
md5sum: 420298983c762754d5b0ef32c9d5dad4
- path: output/faqcs/test.base_content.txt
md5sum: 99aa9a775ccd8d6503f0cf80f775203c
- path: output/faqcs/test.fastp.log
md5sum: be79dc893f87de1f82faf749cdfb848c
- path: output/faqcs/test.for_qual_histogram.txt
md5sum: 4f4b131be5425bdfa4b3237e44fa7d48
- path: output/faqcs/test.length_count.txt
md5sum: 420298983c762754d5b0ef32c9d5dad4
- path: output/faqcs/test.stats.txt
md5sum: 9a693f8af94ab8c485519d9a523aa622
- path: output/faqcs/test_1.trimmed.fastq.gz

View file

@ -0,0 +1,15 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { HAMRONIZATION_DEEPARG } from '../../../../modules/hamronization/deeparg/main.nf'
workflow test_hamronization_deeparg {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['bacteroides_fragilis']['genome']['genome_mapping_potential_arg'], checkIfExists: true),
]
HAMRONIZATION_DEEPARG ( input, 'tsv', '1.0.2', '2' )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,8 @@
- name: hamronization deeparg test_hamronization_deeparg
command: nextflow run tests/modules/hamronization/deeparg -entry test_hamronization_deeparg -c tests/config/nextflow.config
tags:
- hamronization
- hamronization/deeparg
files:
- path: output/hamronization/test.tsv
md5sum: 3c315605aca0c5964796bb5fd4cdd522

View file

@ -0,0 +1,36 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { HAMRONIZATION_DEEPARG } from '../../../../modules/hamronization/deeparg/main.nf'
include { HAMRONIZATION_DEEPARG as HAMRONIZATION_DEEPARG_SECOND } from '../../../../modules/hamronization/deeparg/main.nf'
include { HAMRONIZATION_SUMMARIZE } from '../../../../modules/hamronization/summarize/main.nf'
workflow test_hamronization_summarize {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['bacteroides_fragilis']['genome']['genome_mapping_potential_arg'], checkIfExists: true),
]
input2 = [
[ id:'test2', single_end:false ], // meta map
file(params.test_data['bacteroides_fragilis']['genome']['genome_mapping_potential_arg'], checkIfExists: true),
]
HAMRONIZATION_DEEPARG ( input, 'tsv', '1.0.2', '2' )
HAMRONIZATION_DEEPARG_SECOND ( input2, 'tsv', '1.0.2', '2' )
ch_deeparg_run_one = HAMRONIZATION_DEEPARG.out.tsv
ch_deeparg_run_two = HAMRONIZATION_DEEPARG_SECOND.out.tsv
ch_deeparg_run_one
.mix( ch_deeparg_run_two )
.map{
[ it[1] ]
}
.collect()
.set { ch_input_for_summarize }
HAMRONIZATION_SUMMARIZE ( ch_input_for_summarize , 'json' )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,14 @@
- name: hamronization summarize test_hamronization_summarize
command: nextflow run tests/modules/hamronization/summarize -entry test_hamronization_summarize -c tests/config/nextflow.config
tags:
- hamronization
- hamronization/summarize
files:
- path: output/hamronization/hamronization_combined_report.json
md5sum: 1623b6cc3b213208a425e023edd94691
- path: output/hamronization/test.tsv
md5sum: 3c315605aca0c5964796bb5fd4cdd522
- path: output/hamronization/test2.tsv
md5sum: 453f38502e35261a50a0849dca34f05b
- path: output/hamronization/versions.yml
md5sum: 99b5046fac643e16ca3362d1baf3284b

View file

@ -0,0 +1,15 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { HPSUISSERO } from '../../../modules/hpsuissero/main.nf'
workflow test_hpsuissero {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['haemophilus_influenzae']['genome']['genome_fna_gz'], checkIfExists: true)
]
HPSUISSERO ( input )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,9 @@
- name: hpsuissero test_hpsuissero
command: nextflow run tests/modules/hpsuissero -entry test_hpsuissero -c tests/config/nextflow.config
tags:
- hpsuissero
files:
- path: output/hpsuissero/test_serotyping_res.tsv
md5sum: 559dd2ca386eeb58f3975e3204ce9d43
- path: output/hpsuissero/versions.yml
md5sum: f65438e63a74ac6ee365bfdbbd3f996a

View file

@ -0,0 +1,15 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { MAFFT } from '../../../modules/mafft/main.nf'
workflow test_mafft {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['scaffolds_fasta'], checkIfExists: true)
]
MAFFT ( input )
}

View file

@ -0,0 +1,6 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
ext.args = "--auto"
}

View file

@ -0,0 +1,9 @@
- name: mafft test_mafft
command: nextflow run tests/modules/mafft -entry test_mafft -c tests/config/nextflow.config
tags:
- mafft
files:
- path: output/mafft/test.fas
md5sum: 23426611f4a0df532b6708f072bd445b
- path: output/mafft/versions.yml
md5sum: b1b5ab3728ae17401808335f1c8f8215

View file

@ -12,10 +12,14 @@ workflow test_malt_run {
gff = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true)
seq_type = "DNA"
map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true)
input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)
]
mode = "BlastN"
UNZIP ( map_db )
MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive )
MALT_RUN ( input, mode, MALT_BUILD.out.index )
}

View file

@ -5,4 +5,4 @@
- malt/run
files:
- path: output/malt/test_1.rma6
- path: output/malt/malt-run.log
- path: output/malt/test-malt-run.log

View file

@ -4,7 +4,7 @@
- picard/cleansam
- picard
files:
- path: output/picard/test.sam
md5sum: e314171a6060eb79947c13ad126ddf00
- path: output/picard/test.bam
md5sum: a48f8e77a1480445efc57570c3a38a68
- path: output/picard/versions.yml
md5sum: e6457d7c6de51bf6f4b577eda65e57ac

View file

@ -0,0 +1,18 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { PICARD_SORTVCF } from '../../../../modules/picard/sortvcf/main.nf'
workflow test_picard_sortvcf {
input = [ [ id:'test' ], // meta map
file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true)
]
fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ]
dict = [ file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) ]
PICARD_SORTVCF ( input, fasta, dict )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,7 @@
- name: picard sortvcf
command: nextflow run ./tests/modules/picard/sortvcf -entry test_picard_sortvcf -c ./tests/config/nextflow.config -c ./tests/modules/picard/sortvcf/nextflow.config
tags:
- picard
- picard/sortvcf
files:
- path: output/picard/test_sorted.vcf.gz

View file

@ -0,0 +1,24 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf'
include { PLINK2_SCORE } from '../../../../modules/plink2/score/main.nf'
workflow test_plink2_score {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true)
]
PLINK2_VCF ( input )
scorefile = file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_score'], checkIfExists: true)
PLINK2_VCF.out.pgen
.concat(PLINK2_VCF.out.psam, PLINK2_VCF.out.pvar)
.groupTuple()
.map { it.flatten() }
.set { ch_target_genome }
PLINK2_SCORE ( ch_target_genome, scorefile )
}

View file

@ -0,0 +1,15 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
// relabel input variants to a common scheme chr:pos:alt:ref
withName: PLINK2_VCF {
ext.args = '--set-missing-var-ids @:#:\\$1:\\$2'
}
// scoring really needs an adjustment for small test dataset (n > 50
// normally)
withName: PLINK2_SCORE {
ext.args = 'no-mean-imputation'
}
}

View file

@ -0,0 +1,16 @@
- name: plink2 score test_plink2_score
command: nextflow run tests/modules/plink2/score -entry test_plink2_score -c tests/config/nextflow.config
tags:
- plink2
- plink2/score
files:
- path: output/plink2/test.pgen
md5sum: fac12ca9041d6950f6b7d60ac2120721
- path: output/plink2/test.psam
md5sum: e6c714488754cb8448c3dfda08c4c0ea
- path: output/plink2/test.pvar.zst
md5sum: 98d59e9779a8b62d5032cd98b642a63b
- path: output/plink2/test.sscore
md5sum: 97bde840f69febd65f2c00e9243126e9
- path: output/plink2/versions.yml
md5sum: 71499ab14e1583c88ced3a7a4f05bfa7

View file

@ -1,12 +1,14 @@
- name: plink2 vcf test_plink2_vcf
command: nextflow run ./tests/modules/plink2/vcf -entry test_plink2_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink2/vcf/nextflow.config
command: nextflow run tests/modules/plink2/vcf -entry test_plink2_vcf -c tests/config/nextflow.config
tags:
- plink2/vcf
- plink2
- plink2/vcf
files:
- path: output/plink2/test.pgen
md5sum: d66d3cd4a6c9cca1a4073d7f4b277041
- path: output/plink2/test.psam
md5sum: dc3b77d7753a7bed41734323e3549b10
- path: output/plink2/test.pvar
md5sum: d61e53f847a6335138b584216b4e45d0
- path: output/plink2/test.pvar.zst
md5sum: b53cccb83e024a39789af5eab8de1c28
- path: output/plink2/versions.yml
md5sum: 82ada74bc81473b7cba377f696acf54c

View file

@ -5,7 +5,7 @@
- samtools/ampliconclip
files:
- path: output/samtools/test.bam
md5sum: 678f9ab04fbe3206f0f96e170fd833e9
md5sum: 5d0e8bc9e6059ef3a63ee6328a3935c7
- name: samtools ampliconclip no stats with rejects
command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config
@ -14,9 +14,9 @@
- samtools/ampliconclip
files:
- path: output/samtools/test.bam
md5sum: bbf65ea626539d96c8271e17d1fc988b
md5sum: 2c998295d624c59620b7ffdb0cc080e2
- path: output/samtools/test.cliprejects.bam
md5sum: a0bee15aead020d16d0c81bd9667df46
md5sum: f3ebba8d91ad29cc4d2d00943e6f6bab
- name: samtools ampliconclip with stats with rejects
command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config
@ -25,8 +25,8 @@
- samtools/ampliconclip
files:
- path: output/samtools/test.bam
md5sum: f5a3611ecad34ba2dde77096e1c7dd93
md5sum: 87882973b425ab27aad6ef18faf11f25
- path: output/samtools/test.cliprejects.bam
md5sum: 90ee7ce908b4bdb89ab41e4410de9012
md5sum: eb5e186e1a69864dc2e99a290f02ff78
- path: output/samtools/test.clipstats.txt
md5sum: fc23355e1743d47f2541f2cb1a7a0cda

View file

@ -14,9 +14,9 @@
- samtools
files:
- path: output/samtools/test_1.fq.gz
md5sum: 4522edbe158ec4804765794569f67493
md5sum: 1c84aadcdca10e97be2b5b6ce773f5ed
- path: output/samtools/test_2.fq.gz
md5sum: 7e00ef40d5cfe272b67461381019dcc1
md5sum: e679ec035d3208785e704458d6b68c8c
- path: output/samtools/test_other.fq.gz
md5sum: 709872fc2910431b1e8b7074bfe38c67
- path: output/samtools/test_singleton.fq.gz

View file

@ -7,4 +7,4 @@
- path: output/samtools/genome.fasta.fai
md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5
- path: output/samtools/versions.yml
md5sum: d56671a7c8f8058944d3d536c3058f7f
md5sum: 6a16b2148a0ab43e6d0506056e6a0409

View file

@ -5,6 +5,6 @@
- samtools/fastq
files:
- path: output/samtools/test_2.fastq.gz
md5sum: 3b1c92f33a44a78d82f8360ab4fdfd61
md5sum: 51e7a469b554de694799bec982fd722e
- path: output/samtools/test_1.fastq.gz
md5sum: 5a3f9c69a032c4ffd9071ea31a14e6f9
md5sum: 6c2d5b467eb94e058300271a542e34e6

View file

@ -5,4 +5,4 @@
- samtools/fixmate
files:
- path: output/samtools/test.bam
md5sum: a4092657a4b17170c7702a76cbf192a1
md5sum: c7f574bb0c469e0ccfecb6b7210e03c5

View file

@ -23,4 +23,4 @@
- samtools/index
files:
- path: output/samtools/test.paired_end.sorted.bam.csi
md5sum: 3dd9e3ed959fca075b88bb8dc3cf7dbd
md5sum: 8d63373007553e74d823fc2b9cbcf84d

Some files were not shown because too many files have changed in this diff Show more