Merge remote-tracking branch 'upstream/master' into gatk_spark

This commit is contained in:
SusiJo 2022-05-31 14:36:14 +02:00
commit 1e396d767f
19 changed files with 393 additions and 83 deletions

View file

@ -42,7 +42,6 @@ output:
type: file type: file
description: File containing software versions description: File containing software versions
pattern: "versions.yml" pattern: "versions.yml"
## TODO nf-core: Delete / customise this example output
- out: - out:
type: file type: file
description: The data in the asked format (bed, fasta, fastq, json, pileup, sam, yaml) description: The data in the asked format (bed, fasta, fastq, json, pileup, sam, yaml)

View file

@ -8,8 +8,8 @@ process MASH_SCREEN {
'quay.io/biocontainers/mash:2.3--he348c14_1' }" 'quay.io/biocontainers/mash:2.3--he348c14_1' }"
input: input:
tuple val(meta), path(query_sketch) tuple val(meta), path(query)
path fastx_db path sequences_sketch
output: output:
tuple val(meta), path("*.screen"), emit: screen tuple val(meta), path("*.screen"), emit: screen
@ -26,8 +26,8 @@ process MASH_SCREEN {
screen \\ screen \\
$args \\ $args \\
-p $task.cpus \\ -p $task.cpus \\
$query_sketch \\ $sequences_sketch \\
$fastx_db \\ $query \\
> ${prefix}.screen > ${prefix}.screen
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml

View file

@ -20,13 +20,14 @@ input:
description: | description: |
Groovy Map containing sample information Groovy Map containing sample information
e.g. [ id:'test', single_end:false ] e.g. [ id:'test', single_end:false ]
- query_sketch: - query:
type: file type: file
description: MinHash sketch of query sequences description: Query sequences
pattern: "*.msh" pattern: "*.fastq.gz"
- fastx_db: - sequence_sketch:
type: file type: file
description: Sequence files to match against description: Sequence files to match against
pattern: "*.msh"
output: output:
- meta: - meta:

View file

@ -14,13 +14,17 @@ process MOSDEPTH {
output: output:
tuple val(meta), path('*.global.dist.txt') , emit: global_txt tuple val(meta), path('*.global.dist.txt') , emit: global_txt
tuple val(meta), path('*.region.dist.txt') , emit: regions_txt , optional:true
tuple val(meta), path('*.summary.txt') , emit: summary_txt tuple val(meta), path('*.summary.txt') , emit: summary_txt
tuple val(meta), path('*.per-base.d4') , emit: per_base_d4 , optional:true tuple val(meta), path('*.region.dist.txt') , optional:true, emit: regions_txt
tuple val(meta), path('*.per-base.bed.gz') , emit: per_base_bed, optional:true tuple val(meta), path('*.per-base.d4') , optional:true, emit: per_base_d4
tuple val(meta), path('*.per-base.bed.gz.csi'), emit: per_base_csi, optional:true tuple val(meta), path('*.per-base.bed.gz') , optional:true, emit: per_base_bed
tuple val(meta), path('*.regions.bed.gz') , emit: regions_bed , optional:true tuple val(meta), path('*.per-base.bed.gz.csi') , optional:true, emit: per_base_csi
tuple val(meta), path('*.regions.bed.gz.csi') , emit: regions_csi , optional:true tuple val(meta), path('*.regions.bed.gz') , optional:true, emit: regions_bed
tuple val(meta), path('*.regions.bed.gz.csi') , optional:true, emit: regions_csi
tuple val(meta), path('*.quantized.bed.gz') , optional:true, emit: quantized_bed
tuple val(meta), path('*.quantized.bed.gz.csi') , optional:true, emit: quantized_csi
tuple val(meta), path('*.thresholds.bed.gz') , optional:true, emit: thresholds_bed
tuple val(meta), path('*.thresholds.bed.gz.csi'), optional:true, emit: thresholds_csi
path "versions.yml" , emit: versions path "versions.yml" , emit: versions
when: when:
@ -34,10 +38,13 @@ process MOSDEPTH {
if (bed && args.contains("--by")) { if (bed && args.contains("--by")) {
exit 1, "'--by' can only be specified once when running mosdepth! Either remove input BED file definition or remove '--by' from 'ext.args' definition" exit 1, "'--by' can only be specified once when running mosdepth! Either remove input BED file definition or remove '--by' from 'ext.args' definition"
} }
if (!bed && args.contains("--thresholds")) {
exit 1, "'--thresholds' can only be specified in conjunction with '--by'"
}
""" """
mosdepth \\ mosdepth \\
--threads ${task.cpus} \\ --threads $task.cpus \\
$interval \\ $interval \\
$reference \\ $reference \\
$args \\ $args \\
@ -61,6 +68,10 @@ process MOSDEPTH {
touch ${prefix}.per-base.bed.gz.csi touch ${prefix}.per-base.bed.gz.csi
touch ${prefix}.regions.bed.gz touch ${prefix}.regions.bed.gz
touch ${prefix}.regions.bed.gz.csi touch ${prefix}.regions.bed.gz.csi
touch ${prefix}.quantized.bed.gz
touch ${prefix}.quantized.bed.gz.csi
touch ${prefix}.thresholds.bed.gz
touch ${prefix}.thresholds.bed.gz.csi
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":

View file

@ -72,6 +72,22 @@ output:
type: file type: file
description: Index file for BED file with per-region coverage description: Index file for BED file with per-region coverage
pattern: "*.{regions.bed.gz.csi}" pattern: "*.{regions.bed.gz.csi}"
- quantized_bed:
type: file
description: BED file with binned coverage
pattern: "*.{quantized.bed.gz}"
- quantized_csi:
type: file
description: Index file for BED file with binned coverage
pattern: "*.{quantized.bed.gz.csi}"
- thresholds_bed:
type: file
description: BED file with the number of bases in each region that are covered at or above each threshold
pattern: "*.{thresholds.bed.gz}"
- thresholds_csi:
type: file
description: Index file for BED file with threshold coverage
pattern: "*.{thresholds.bed.gz.csi}"
- versions: - versions:
type: file type: file
description: File containing software versions description: File containing software versions

View file

@ -2,11 +2,10 @@ process STAR_ALIGN {
tag "$meta.id" tag "$meta.id"
label 'process_high' label 'process_high'
// Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::star=2.7.10a bioconda::samtools=1.15.1 conda-forge::gawk=5.1.0" : null)
conda (params.enable_conda ? 'bioconda::star=2.7.9a' : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' : 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' :
'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' }" 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' }"
input: input:
tuple val(meta), path(reads) tuple val(meta), path(reads)
@ -67,6 +66,8 @@ process STAR_ALIGN {
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":
star: \$(STAR --version | sed -e "s/STAR_//g") star: \$(STAR --version | sed -e "s/STAR_//g")
samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//')
END_VERSIONS END_VERSIONS
""" """
} }

View file

@ -2,11 +2,10 @@ process STAR_GENOMEGENERATE {
tag "$fasta" tag "$fasta"
label 'process_high' label 'process_high'
// Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::star=2.7.10a bioconda::samtools=1.15.1 conda-forge::gawk=5.1.0" : null)
conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.15.1 conda-forge::gawk=5.1.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:1c4c32d87798d425c970ececfbadd155e7560277-0' : 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' :
'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:1c4c32d87798d425c970ececfbadd155e7560277-0' }" 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' }"
input: input:
path fasta path fasta
@ -14,7 +13,7 @@ process STAR_GENOMEGENERATE {
output: output:
path "star" , emit: index path "star" , emit: index
path "versions.yml" , emit: versions path "versions.yml", emit: versions
when: when:
task.ext.when == null || task.ext.when task.ext.when == null || task.ext.when

View file

@ -0,0 +1,67 @@
process VSEARCH_USEARCHGLOBAL {
tag "${meta.id}"
label 'process_low'
conda (params.enable_conda ? "bioconda::vsearch=2.21.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/vsearch:2.21.1--h95f258a_0':
'quay.io/biocontainers/vsearch:2.21.1--h95f258a_0' }"
input:
tuple val(meta), path(queryfasta)
path db
val idcutoff
val outoption
val user_columns
output:
tuple val(meta), path('*.aln') , optional: true, emit: aln
tuple val(meta), path('*.biom') , optional: true, emit: biom
tuple val(meta), path('*.lca') , optional: true, emit: lca
tuple val(meta), path('*.mothur') , optional: true, emit: mothur
tuple val(meta), path('*.otu') , optional: true, emit: otu
tuple val(meta), path('*.sam') , optional: true, emit: sam
tuple val(meta), path('*.tsv') , optional: true, emit: tsv
tuple val(meta), path('*.txt') , optional: true, emit: txt
tuple val(meta), path('*.uc') , optional: true, emit: uc
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def columns = user_columns ? "--userfields ${user_columns}" : ''
switch ( outoption ) {
case "alnout": outfmt = "--alnout"; out_ext = 'aln'; break
case "biomout": outfmt = "--biomout"; out_ext = 'biom'; break
case "blast6out": outfmt = "--blast6out"; out_ext = 'txt'; break
case "mothur_shared_out": outfmt = "--mothur_shared_out"; out_ext = 'mothur'; break
case "otutabout": outfmt = "--otutabout"; out_ext = 'otu'; break
case "samout": outfmt = "--samout"; out_ext = 'sam'; break
case "uc": outfmt = "--uc"; out_ext = 'uc'; break
case "userout": outfmt = "--userout"; out_ext = 'tsv'; break
case "lcaout": outfmt = "--lcaout"; out_ext = 'lca'; break
default:
outfmt = "--alnout";
out_ext = 'aln';
log.warn("Unknown output file format provided (${outoption}): selecting pairwise alignments (alnout)");
break
}
"""
vsearch \\
--usearch_global $queryfasta \\
--db $db \\
--id $idcutoff \\
--threads $task.cpus \\
$args \\
${columns} \\
${outfmt} ${prefix}.${out_ext}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
vsearch: \$(vsearch --version 2>&1 | head -n 1 | sed 's/vsearch //g' | sed 's/,.*//g' | sed 's/^v//' | sed 's/_.*//')
END_VERSIONS
"""
}

View file

@ -0,0 +1,83 @@
name: "vsearch_usearchglobal"
description: Compare target sequences to fasta-formatted query sequences using global pairwise alignment.
keywords:
- vsearch
- usearch
- alignment
- fasta
tools:
- "vsearch":
description: "VSEARCH is a versatile open-source tool for microbiome analysis, including chimera detection, clustering, dereplication and rereplication, extraction, FASTA/FASTQ/SFF file processing, masking, orienting, pair-wise alignment, restriction site cutting, searching, shuffling, sorting, subsampling, and taxonomic classification of amplicon sequences for metagenomics, genomics, and population genetics. (USEARCH alternative)"
homepage: "https://github.com/torognes/vsearch"
documentation: "None"
tool_dev_url: "https://github.com/torognes/vsearch"
doi: "doi: 10.7717/peerj.2584"
licence: "['GPL v3-or-later OR BSD-2-clause']"
input:
- meta:
type: map
description: Groovy Map containing sample information e.g. [ id:'test' ]
- queryfasta:
type: file
description: Query sequences in FASTA format
pattern: "*.{fasta,fa,fna,faa}"
- db:
type: file
description: Reference database file in FASTA or UDB format
pattern: "*"
- idcutoff:
type: real
description: Reject the sequence match if the pairwise identity is lower than the given id cutoff value (value ranging from 0.0 to 1.0 included)
- outoption:
type: string
description: Specify the type of output file to be generated by selecting one of the vsearch output file options
pattern: "alnout|biomout|blast6out|mothur_shared_out|otutabout|samout|uc|userout|lcaout"
- user_columns:
type: string
description: If using the `userout` option, specify which columns to include in output, with fields separated with `+` (e.g. query+target+id). See USEARCH manual for valid options. For other output options, use an empty string.
output:
- aln:
type: file
description: Results in pairwise alignment format
pattern: "*.{aln}"
- biom:
type: file
description: Results in an OTU table in the biom version 1.0 file format
pattern: "*.{biom}"
- lca:
type: file
description: Last common ancestor (LCA) information about the hits of each query in tab-separated format
pattern: "*.{lca}"
- mothur:
type: file
description: Results in an OTU table in the mothur shared tab-separated plain text file format
pattern: "*.{mothur}"
- otu:
type: file
description: Results in an OTU table in the classic tab-separated plain text format
pattern: "*.{otu}"
- sam:
type: file
description: Results written in sam format
pattern: "*.{sam}"
- tsv:
type: file
description: Results in tab-separated output, columns defined by user
pattern: "*.{tsv}"
- txt:
type: file
description: Tab delimited results in blast-like tabular format
pattern: "*.{txt}"
- uc:
type: file
description: Tab delimited results in a uclust-like format with 10 columns
pattern: "*.{uc}"
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
authors:
- "@jtangrot"

View file

@ -2052,6 +2052,10 @@ vcftools:
- modules/vcftools/** - modules/vcftools/**
- tests/modules/vcftools/** - tests/modules/vcftools/**
vsearch/usearchglobal:
- modules/vsearch/usearchglobal/**
- tests/modules/vsearch/usearchglobal/**
yara/index: yara/index:
- modules/yara/index/** - modules/yara/index/**
- tests/modules/yara/index/** - tests/modules/yara/index/**

View file

@ -14,8 +14,11 @@ workflow test_mash_screen {
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)
] ]
] ]
fastx_db = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) sars_db = [
[ id: 'sars_db' ],
file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
]
MASH_SKETCH ( input ) MASH_SKETCH ( sars_db )
MASH_SCREEN ( MASH_SKETCH.out.mash, fastx_db ) MASH_SCREEN ( input, MASH_SKETCH.out.mash.map { meta, sketch -> sketch } )
} }

View file

@ -4,9 +4,9 @@
- mash - mash
- mash/screen - mash/screen
files: files:
- path: output/mash/test.mash_stats - path: output/mash/sars_db.mash_stats
md5sum: 2a6f297d8e69a5e4160243bc6c89129c md5sum: 1dafbd23e36e18bf4c87a007d0fc98f7
- path: output/mash/test.msh - path: output/mash/sars_db.msh
md5sum: d747145a43dad5f82342036f8f5d9133 md5sum: 24289e4a13526e88eeb2abfca4a0f0a8
- path: output/mash/test.screen - path: output/mash/test.screen
md5sum: d3c871dccd5cd57ab54781fa5c5d7278 md5sum: ac8701e1aab651b2f36c6380b1351b11

View file

@ -5,12 +5,14 @@ nextflow.enable.dsl = 2
include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' include { MOSDEPTH } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_FAIL } from '../../../modules/mosdepth/main.nf' include { MOSDEPTH as MOSDEPTH_FAIL } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_WINDOW } from '../../../modules/mosdepth/main.nf' include { MOSDEPTH as MOSDEPTH_WINDOW } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_THRESHOLD } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_QUANTIZED } from '../../../modules/mosdepth/main.nf'
workflow test_mosdepth { workflow test_mosdepth {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
MOSDEPTH ( input, [], [] ) MOSDEPTH ( input, [], [] )
@ -19,10 +21,10 @@ workflow test_mosdepth {
workflow test_mosdepth_bed { workflow test_mosdepth_bed {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH ( input, bed, [] ) MOSDEPTH ( input, bed, [] )
} }
@ -30,10 +32,10 @@ workflow test_mosdepth_bed {
workflow test_mosdepth_cram { workflow test_mosdepth_cram {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)
] ]
fasta = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
MOSDEPTH ( input, [], fasta ) MOSDEPTH ( input, [], fasta )
} }
@ -41,11 +43,11 @@ workflow test_mosdepth_cram {
workflow test_mosdepth_cram_bed { workflow test_mosdepth_cram_bed {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
fasta = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
MOSDEPTH ( input, bed, fasta ) MOSDEPTH ( input, bed, fasta )
} }
@ -53,21 +55,42 @@ workflow test_mosdepth_cram_bed {
workflow test_mosdepth_window { workflow test_mosdepth_window {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH_WINDOW ( input, [], [] ) MOSDEPTH_WINDOW ( input, [], [] )
} }
workflow test_mosdepth_quantized {
input = [
[ id:'test', single_end:true ],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
]
MOSDEPTH_QUANTIZED ( input, [], [] )
}
workflow test_mosdepth_thresholds {
input = [
[ id:'test', single_end:true ],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
]
bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH_THRESHOLD ( input, bed, [] )
}
workflow test_mosdepth_fail { workflow test_mosdepth_fail {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH_FAIL ( input, bed, [] ) MOSDEPTH_FAIL ( input, bed, [] )
} }

View file

@ -7,4 +7,10 @@ process {
withName: MOSDEPTH_WINDOW { withName: MOSDEPTH_WINDOW {
ext.args = "--by 100" ext.args = "--by 100"
} }
withName: MOSDEPTH_QUANTIZED {
ext.args = "--quantize 0:1:4:100:200"
}
withName: MOSDEPTH_THRESHOLD {
ext.args = "--thresholds 1,10,20,30"
}
} }

View file

@ -86,6 +86,48 @@
- path: output/mosdepth/test.regions.bed.gz.csi - path: output/mosdepth/test.regions.bed.gz.csi
md5sum: 257d67678136963d9dd904330079609d md5sum: 257d67678136963d9dd904330079609d
- name: mosdepth test_mosdepth_quantized
command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_quantized -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config
tags:
- mosdepth
files:
- path: output/mosdepth/test.mosdepth.global.dist.txt
md5sum: e82e90c7d508a135b5a8a7cd6933452e
- path: output/mosdepth/test.mosdepth.summary.txt
md5sum: 4f0d231060cbde4efdd673863bd2fb59
- path: output/mosdepth/test.per-base.bed.gz
md5sum: bc1df47d46f818fee5275975925d769a
- path: output/mosdepth/test.per-base.bed.gz.csi
md5sum: 9e649ac749ff6c6073bef5ab63e8aaa4
- path: output/mosdepth/test.quantized.bed.gz
md5sum: 3e434a8bafcf59a67841ae3d4d752838
- path: output/mosdepth/test.quantized.bed.gz.csi
md5sum: be9617f551f19a33923f1e886eaefb93
- name: mosdepth test_mosdepth_thresholds
command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_thresholds -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config
tags:
- mosdepth
files:
- path: output/mosdepth/test.mosdepth.global.dist.txt
md5sum: e82e90c7d508a135b5a8a7cd6933452e
- path: output/mosdepth/test.mosdepth.region.dist.txt
md5sum: e82e90c7d508a135b5a8a7cd6933452e
- path: output/mosdepth/test.mosdepth.summary.txt
md5sum: 96c037f769974b904beb53edc4f56d82
- path: output/mosdepth/test.per-base.bed.gz
md5sum: bc1df47d46f818fee5275975925d769a
- path: output/mosdepth/test.per-base.bed.gz.csi
md5sum: 9e649ac749ff6c6073bef5ab63e8aaa4
- path: output/mosdepth/test.regions.bed.gz
md5sum: 5d398caf7171ec4406278e2add3009ae
- path: output/mosdepth/test.regions.bed.gz.csi
md5sum: 47669cfe41f3e222e74d81e1b1be191f
- path: output/mosdepth/test.thresholds.bed.gz
md5sum: 13101e326eea3cbfa1d569b69f494f4c
- path: output/mosdepth/test.thresholds.bed.gz.csi
md5sum: 912055ee9452229439df6fae95644196
- name: mosdepth test_mosdepth_fail - name: mosdepth test_mosdepth_fail
command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_fail -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_fail -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config
tags: tags:

View file

@ -36,7 +36,7 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: b9f5e2f6a624b64c300fe25dc3ac801f md5sum: 63de6af2210e138b49d7b4d570c6e67f
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out
@ -80,7 +80,7 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: 38d08f0b944a2a1b981a250d675aa0d9 md5sum: 7cdef439bc8092bfefb4d091bf8ee6ab
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out
@ -124,7 +124,7 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: c740d5177067c1fcc48ab7a16cd639d7 md5sum: 5dbc36fce7b72628c809bbc7d3d67973
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out
@ -168,9 +168,9 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: a1bd1b40950a58ea2776908076160052 md5sum: d85858bf55a523121dde762046a34c5c
- path: output/star/test.Chimeric.out.junction - path: output/star/test.Chimeric.out.junction
md5sum: 327629eb54032212f29e1c32cbac6975 md5sum: ae87d1a24180f5a35cf6b47fdfdd0539
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out

View file

@ -0,0 +1,25 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { VSEARCH_USEARCHGLOBAL } from '../../../../modules/vsearch/usearchglobal/main.nf'
workflow test_vsearch_usearchglobal {
query = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true)
db = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
idcutoff = 0.985
outoption = "xcfert" // Nonsense text to check default case.
columns = ""
VSEARCH_USEARCHGLOBAL ( [[id:'test'], query], db, idcutoff, outoption, columns )
}
workflow test_vsearch_usearchglobal_userout {
query = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true)
db = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
idcutoff = 0.985
outoption = "userout"
columns = "query+target+id"
VSEARCH_USEARCHGLOBAL ( [[id:'test'], query], db, idcutoff, outoption, columns )
}

View file

@ -0,0 +1,4 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,26 @@
- name: vsearch usearchglobal test_vsearch_usearchglobal
command: nextflow run ./tests/modules/vsearch/usearchglobal -entry test_vsearch_usearchglobal -c ./tests/config/nextflow.config -c ./tests/modules/vsearch/usearchglobal/nextflow.config
tags:
- vsearch/usearchglobal
- vsearch
files:
- path: output/vsearch/test.aln
contains:
- "vsearch --usearch_global transcriptome.fasta --db genome.fasta --id 0.985 --threads 2 --alnout test.aln"
- "Query >lcl|MT192765.1_cds_QIK50427.1_2"
- "%Id TLen Target"
- "100% 29829 MT192765.1"
- "Query 3822nt >lcl|MT192765.1_cds_QIK50427.1_2"
- "Target 29829nt >MT192765.1"
- "Qry 21249 + CAACAGAGTTGTTATTTCTAGTGATGTTCTTGTTAACAACTAA 21291"
- "Tgt 21506 + CAACAGAGTTGTTATTTCTAGTGATGTTCTTGTTAACAACTAA 21548"
- "21291 cols, 21290 ids (100.0%), 1 gaps (0.0%)"
- name: vsearch usearchglobal test_vsearch_usearchglobal_userout
command: nextflow run ./tests/modules/vsearch/usearchglobal -entry test_vsearch_usearchglobal_userout -c ./tests/config/nextflow.config -c ./tests/modules/vsearch/usearchglobal/nextflow.config
tags:
- vsearch/usearchglobal
- vsearch
files:
- path: output/vsearch/test.tsv
md5sum: b6cc50f7c8d18cb82e74dab70ed4baab