Merge branch 'master' into r-ampir

This commit is contained in:
James A. Fellows Yates 2022-06-01 09:18:54 +02:00 committed by GitHub
commit 5a94794793
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 530 additions and 139 deletions

View file

@ -42,7 +42,6 @@ output:
type: file type: file
description: File containing software versions description: File containing software versions
pattern: "versions.yml" pattern: "versions.yml"
## TODO nf-core: Delete / customise this example output
- out: - out:
type: file type: file
description: The data in the asked format (bed, fasta, fastq, json, pileup, sam, yaml) description: The data in the asked format (bed, fasta, fastq, json, pileup, sam, yaml)

View file

@ -8,7 +8,7 @@ process BCFTOOLS_CONCAT {
'quay.io/biocontainers/bcftools:1.14--h88f3f91_0' }" 'quay.io/biocontainers/bcftools:1.14--h88f3f91_0' }"
input: input:
tuple val(meta), path(vcfs) tuple val(meta), path(vcfs), path(tbi)
output: output:
tuple val(meta), path("*.gz"), emit: vcf tuple val(meta), path("*.gz"), emit: vcf

View file

@ -25,6 +25,11 @@ input:
description: | description: |
List containing 2 or more vcf files List containing 2 or more vcf files
e.g. [ 'file1.vcf', 'file2.vcf' ] e.g. [ 'file1.vcf', 'file2.vcf' ]
- tbi:
type: files
description: |
List containing 2 or more index files (optional)
e.g. [ 'file1.tbi', 'file2.tbi' ]
output: output:
- meta: - meta:
type: map type: map

View file

@ -11,7 +11,7 @@ process FILTLONG {
tuple val(meta), path(shortreads), path(longreads) tuple val(meta), path(shortreads), path(longreads)
output: output:
tuple val(meta), path("${meta.id}_lr_filtlong.fastq.gz"), emit: reads tuple val(meta), path("*.fastq.gz"), emit: reads
path "versions.yml" , emit: versions path "versions.yml" , emit: versions
when: when:
@ -20,13 +20,14 @@ process FILTLONG {
script: script:
def args = task.ext.args ?: '' def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}" def prefix = task.ext.prefix ?: "${meta.id}"
def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}" def short_reads = !shortreads ? "" : meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}"
if ("$longreads" == "${prefix}.fastq.gz") error "Longread FASTQ input and output names are the same, set prefix in module configuration to disambiguate!"
""" """
filtlong \\ filtlong \\
$short_reads \\ $short_reads \\
$args \\ $args \\
$longreads \\ $longreads \\
| gzip -n > ${prefix}_lr_filtlong.fastq.gz | gzip -n > ${prefix}.fastq.gz
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":

View file

@ -1,6 +1,6 @@
def VERSION = '2.1' // Version information not provided by tool on CLI def VERSION = '2.1' // Version information not provided by tool on CLI
process GAMMA { process GAMMA_GAMMA {
tag "$meta.id" tag "$meta.id"
label 'process_low' label 'process_low'
@ -26,13 +26,24 @@ process GAMMA {
script: script:
def args = task.ext.args ?: '' def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}" def prefix = task.ext.prefix ?: "${meta.id}"
""" """
GAMMA.py \\ if [[ ${fasta} == *.gz ]]
then
FNAME=\$(basename ${fasta} .gz)
gunzip -f ${fasta}
GAMMA.py \\
$args \\
"\${FNAME}" \\
$db \\
$prefix
else
GAMMA.py \\
$args \\ $args \\
$fasta \\ $fasta \\
$db \\ $db \\
$prefix $prefix
fi
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":
gamma: $VERSION gamma: $VERSION

View file

@ -1,4 +1,4 @@
name: "gamma" name: "gamma_gamma"
description: Gene Allele Mutation Microbial Assessment description: Gene Allele Mutation Microbial Assessment
keywords: keywords:
- gamma - gamma
@ -61,3 +61,4 @@ output:
authors: authors:
- "@sateeshperi" - "@sateeshperi"
- "@rastanton" - "@rastanton"
- "@jvhagey"

View file

@ -13,6 +13,7 @@ process GATK4_MERGEVCFS {
output: output:
tuple val(meta), path('*.vcf.gz'), emit: vcf tuple val(meta), path('*.vcf.gz'), emit: vcf
tuple val(meta), path("*.tbi") , emit: tbi
path "versions.yml" , emit: versions path "versions.yml" , emit: versions
when: when:

View file

@ -35,6 +35,11 @@ output:
type: file type: file
description: merged vcf file description: merged vcf file
pattern: "*.vcf.gz" pattern: "*.vcf.gz"
- tbi:
type: file
description: index files for the merged vcf files
pattern: "*.tbi"
- versions: - versions:
type: file type: file
description: File containing software versions description: File containing software versions

View file

@ -8,8 +8,8 @@ process MASH_SCREEN {
'quay.io/biocontainers/mash:2.3--he348c14_1' }" 'quay.io/biocontainers/mash:2.3--he348c14_1' }"
input: input:
tuple val(meta), path(query_sketch) tuple val(meta), path(query)
path fastx_db path sequences_sketch
output: output:
tuple val(meta), path("*.screen"), emit: screen tuple val(meta), path("*.screen"), emit: screen
@ -26,8 +26,8 @@ process MASH_SCREEN {
screen \\ screen \\
$args \\ $args \\
-p $task.cpus \\ -p $task.cpus \\
$query_sketch \\ $sequences_sketch \\
$fastx_db \\ $query \\
> ${prefix}.screen > ${prefix}.screen
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml

View file

@ -20,13 +20,14 @@ input:
description: | description: |
Groovy Map containing sample information Groovy Map containing sample information
e.g. [ id:'test', single_end:false ] e.g. [ id:'test', single_end:false ]
- query_sketch: - query:
type: file type: file
description: MinHash sketch of query sequences description: Query sequences
pattern: "*.msh" pattern: "*.fastq.gz"
- fastx_db: - sequence_sketch:
type: file type: file
description: Sequence files to match against description: Sequence files to match against
pattern: "*.msh"
output: output:
- meta: - meta:

View file

@ -13,15 +13,19 @@ process MOSDEPTH {
path fasta path fasta
output: output:
tuple val(meta), path('*.global.dist.txt') , emit: global_txt tuple val(meta), path('*.global.dist.txt') , emit: global_txt
tuple val(meta), path('*.region.dist.txt') , emit: regions_txt , optional:true tuple val(meta), path('*.summary.txt') , emit: summary_txt
tuple val(meta), path('*.summary.txt') , emit: summary_txt tuple val(meta), path('*.region.dist.txt') , optional:true, emit: regions_txt
tuple val(meta), path('*.per-base.d4') , emit: per_base_d4 , optional:true tuple val(meta), path('*.per-base.d4') , optional:true, emit: per_base_d4
tuple val(meta), path('*.per-base.bed.gz') , emit: per_base_bed, optional:true tuple val(meta), path('*.per-base.bed.gz') , optional:true, emit: per_base_bed
tuple val(meta), path('*.per-base.bed.gz.csi'), emit: per_base_csi, optional:true tuple val(meta), path('*.per-base.bed.gz.csi') , optional:true, emit: per_base_csi
tuple val(meta), path('*.regions.bed.gz') , emit: regions_bed , optional:true tuple val(meta), path('*.regions.bed.gz') , optional:true, emit: regions_bed
tuple val(meta), path('*.regions.bed.gz.csi') , emit: regions_csi , optional:true tuple val(meta), path('*.regions.bed.gz.csi') , optional:true, emit: regions_csi
path "versions.yml" , emit: versions tuple val(meta), path('*.quantized.bed.gz') , optional:true, emit: quantized_bed
tuple val(meta), path('*.quantized.bed.gz.csi') , optional:true, emit: quantized_csi
tuple val(meta), path('*.thresholds.bed.gz') , optional:true, emit: thresholds_bed
tuple val(meta), path('*.thresholds.bed.gz.csi'), optional:true, emit: thresholds_csi
path "versions.yml" , emit: versions
when: when:
task.ext.when == null || task.ext.when task.ext.when == null || task.ext.when
@ -34,10 +38,13 @@ process MOSDEPTH {
if (bed && args.contains("--by")) { if (bed && args.contains("--by")) {
exit 1, "'--by' can only be specified once when running mosdepth! Either remove input BED file definition or remove '--by' from 'ext.args' definition" exit 1, "'--by' can only be specified once when running mosdepth! Either remove input BED file definition or remove '--by' from 'ext.args' definition"
} }
if (!bed && args.contains("--thresholds")) {
exit 1, "'--thresholds' can only be specified in conjunction with '--by'"
}
""" """
mosdepth \\ mosdepth \\
--threads ${task.cpus} \\ --threads $task.cpus \\
$interval \\ $interval \\
$reference \\ $reference \\
$args \\ $args \\
@ -61,6 +68,10 @@ process MOSDEPTH {
touch ${prefix}.per-base.bed.gz.csi touch ${prefix}.per-base.bed.gz.csi
touch ${prefix}.regions.bed.gz touch ${prefix}.regions.bed.gz
touch ${prefix}.regions.bed.gz.csi touch ${prefix}.regions.bed.gz.csi
touch ${prefix}.quantized.bed.gz
touch ${prefix}.quantized.bed.gz.csi
touch ${prefix}.thresholds.bed.gz
touch ${prefix}.thresholds.bed.gz.csi
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":

View file

@ -72,6 +72,22 @@ output:
type: file type: file
description: Index file for BED file with per-region coverage description: Index file for BED file with per-region coverage
pattern: "*.{regions.bed.gz.csi}" pattern: "*.{regions.bed.gz.csi}"
- quantized_bed:
type: file
description: BED file with binned coverage
pattern: "*.{quantized.bed.gz}"
- quantized_csi:
type: file
description: Index file for BED file with binned coverage
pattern: "*.{quantized.bed.gz.csi}"
- thresholds_bed:
type: file
description: BED file with the number of bases in each region that are covered at or above each threshold
pattern: "*.{thresholds.bed.gz}"
- thresholds_csi:
type: file
description: Index file for BED file with threshold coverage
pattern: "*.{thresholds.bed.gz.csi}"
- versions: - versions:
type: file type: file
description: File containing software versions description: File containing software versions

View file

@ -2,16 +2,15 @@ process STAR_ALIGN {
tag "$meta.id" tag "$meta.id"
label 'process_high' label 'process_high'
// Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::star=2.7.10a bioconda::samtools=1.15.1 conda-forge::gawk=5.1.0" : null)
conda (params.enable_conda ? 'bioconda::star=2.7.9a' : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/star:2.7.9a--h9ee0642_0' : 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' :
'quay.io/biocontainers/star:2.7.9a--h9ee0642_0' }" 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' }"
input: input:
tuple val(meta), path(reads) tuple val(meta), path(reads)
path index path index
path gtf path gtf
val star_ignore_sjdbgtf val star_ignore_sjdbgtf
val seq_platform val seq_platform
val seq_center val seq_center
@ -67,6 +66,8 @@ process STAR_ALIGN {
cat <<-END_VERSIONS > versions.yml cat <<-END_VERSIONS > versions.yml
"${task.process}": "${task.process}":
star: \$(STAR --version | sed -e "s/STAR_//g") star: \$(STAR --version | sed -e "s/STAR_//g")
samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
gawk: \$(echo \$(gawk --version 2>&1) | sed 's/^.*GNU Awk //; s/, .*\$//')
END_VERSIONS END_VERSIONS
""" """
} }

View file

@ -2,19 +2,18 @@ process STAR_GENOMEGENERATE {
tag "$fasta" tag "$fasta"
label 'process_high' label 'process_high'
// Note: 2.7X indices incompatible with AWS iGenomes. conda (params.enable_conda ? "bioconda::star=2.7.10a bioconda::samtools=1.15.1 conda-forge::gawk=5.1.0" : null)
conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.15.1 conda-forge::gawk=5.1.0" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:1c4c32d87798d425c970ececfbadd155e7560277-0' : 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' :
'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:1c4c32d87798d425c970ececfbadd155e7560277-0' }" 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:afaaa4c6f5b308b4b6aa2dd8e99e1466b2a6b0cd-0' }"
input: input:
path fasta path fasta
path gtf path gtf
output: output:
path "star" , emit: index path "star" , emit: index
path "versions.yml" , emit: versions path "versions.yml", emit: versions
when: when:
task.ext.when == null || task.ext.when task.ext.when == null || task.ext.when
@ -22,7 +21,7 @@ process STAR_GENOMEGENERATE {
script: script:
def args = task.ext.args ?: '' def args = task.ext.args ?: ''
def args_list = args.tokenize() def args_list = args.tokenize()
def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : '' def memory = task.memory ? "--limitGenomeGenerateRAM ${task.memory.toBytes() - 100000000}" : ''
if (args_list.contains('--genomeSAindexNbases')) { if (args_list.contains('--genomeSAindexNbases')) {
""" """
mkdir star mkdir star

View file

@ -0,0 +1,67 @@
process VSEARCH_USEARCHGLOBAL {
tag "${meta.id}"
label 'process_low'
conda (params.enable_conda ? "bioconda::vsearch=2.21.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/vsearch:2.21.1--h95f258a_0':
'quay.io/biocontainers/vsearch:2.21.1--h95f258a_0' }"
input:
tuple val(meta), path(queryfasta)
path db
val idcutoff
val outoption
val user_columns
output:
tuple val(meta), path('*.aln') , optional: true, emit: aln
tuple val(meta), path('*.biom') , optional: true, emit: biom
tuple val(meta), path('*.lca') , optional: true, emit: lca
tuple val(meta), path('*.mothur') , optional: true, emit: mothur
tuple val(meta), path('*.otu') , optional: true, emit: otu
tuple val(meta), path('*.sam') , optional: true, emit: sam
tuple val(meta), path('*.tsv') , optional: true, emit: tsv
tuple val(meta), path('*.txt') , optional: true, emit: txt
tuple val(meta), path('*.uc') , optional: true, emit: uc
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
def columns = user_columns ? "--userfields ${user_columns}" : ''
switch ( outoption ) {
case "alnout": outfmt = "--alnout"; out_ext = 'aln'; break
case "biomout": outfmt = "--biomout"; out_ext = 'biom'; break
case "blast6out": outfmt = "--blast6out"; out_ext = 'txt'; break
case "mothur_shared_out": outfmt = "--mothur_shared_out"; out_ext = 'mothur'; break
case "otutabout": outfmt = "--otutabout"; out_ext = 'otu'; break
case "samout": outfmt = "--samout"; out_ext = 'sam'; break
case "uc": outfmt = "--uc"; out_ext = 'uc'; break
case "userout": outfmt = "--userout"; out_ext = 'tsv'; break
case "lcaout": outfmt = "--lcaout"; out_ext = 'lca'; break
default:
outfmt = "--alnout";
out_ext = 'aln';
log.warn("Unknown output file format provided (${outoption}): selecting pairwise alignments (alnout)");
break
}
"""
vsearch \\
--usearch_global $queryfasta \\
--db $db \\
--id $idcutoff \\
--threads $task.cpus \\
$args \\
${columns} \\
${outfmt} ${prefix}.${out_ext}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
vsearch: \$(vsearch --version 2>&1 | head -n 1 | sed 's/vsearch //g' | sed 's/,.*//g' | sed 's/^v//' | sed 's/_.*//')
END_VERSIONS
"""
}

View file

@ -0,0 +1,83 @@
name: "vsearch_usearchglobal"
description: Compare target sequences to fasta-formatted query sequences using global pairwise alignment.
keywords:
- vsearch
- usearch
- alignment
- fasta
tools:
- "vsearch":
description: "VSEARCH is a versatile open-source tool for microbiome analysis, including chimera detection, clustering, dereplication and rereplication, extraction, FASTA/FASTQ/SFF file processing, masking, orienting, pair-wise alignment, restriction site cutting, searching, shuffling, sorting, subsampling, and taxonomic classification of amplicon sequences for metagenomics, genomics, and population genetics. (USEARCH alternative)"
homepage: "https://github.com/torognes/vsearch"
documentation: "None"
tool_dev_url: "https://github.com/torognes/vsearch"
doi: "doi: 10.7717/peerj.2584"
licence: "['GPL v3-or-later OR BSD-2-clause']"
input:
- meta:
type: map
description: Groovy Map containing sample information e.g. [ id:'test' ]
- queryfasta:
type: file
description: Query sequences in FASTA format
pattern: "*.{fasta,fa,fna,faa}"
- db:
type: file
description: Reference database file in FASTA or UDB format
pattern: "*"
- idcutoff:
type: real
description: Reject the sequence match if the pairwise identity is lower than the given id cutoff value (value ranging from 0.0 to 1.0 included)
- outoption:
type: string
description: Specify the type of output file to be generated by selecting one of the vsearch output file options
pattern: "alnout|biomout|blast6out|mothur_shared_out|otutabout|samout|uc|userout|lcaout"
- user_columns:
type: string
description: If using the `userout` option, specify which columns to include in output, with fields separated with `+` (e.g. query+target+id). See USEARCH manual for valid options. For other output options, use an empty string.
output:
- aln:
type: file
description: Results in pairwise alignment format
pattern: "*.{aln}"
- biom:
type: file
description: Results in an OTU table in the biom version 1.0 file format
pattern: "*.{biom}"
- lca:
type: file
description: Last common ancestor (LCA) information about the hits of each query in tab-separated format
pattern: "*.{lca}"
- mothur:
type: file
description: Results in an OTU table in the mothur shared tab-separated plain text file format
pattern: "*.{mothur}"
- otu:
type: file
description: Results in an OTU table in the classic tab-separated plain text format
pattern: "*.{otu}"
- sam:
type: file
description: Results written in sam format
pattern: "*.{sam}"
- tsv:
type: file
description: Results in tab-separated output, columns defined by user
pattern: "*.{tsv}"
- txt:
type: file
description: Tab delimited results in blast-like tabular format
pattern: "*.{txt}"
- uc:
type: file
description: Tab delimited results in a uclust-like format with 10 columns
pattern: "*.{uc}"
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
authors:
- "@jtangrot"

View file

@ -715,9 +715,9 @@ freebayes:
- modules/freebayes/** - modules/freebayes/**
- tests/modules/freebayes/** - tests/modules/freebayes/**
gamma: gamma/gamma:
- modules/gamma/** - modules/gamma/gamma/**
- tests/modules/gamma/** - tests/modules/gamma/gamma/**
gatk4/applybqsr: gatk4/applybqsr:
- modules/gatk4/applybqsr/** - modules/gatk4/applybqsr/**
@ -2056,6 +2056,10 @@ vcftools:
- modules/vcftools/** - modules/vcftools/**
- tests/modules/vcftools/** - tests/modules/vcftools/**
vsearch/usearchglobal:
- modules/vsearch/usearchglobal/**
- tests/modules/vsearch/usearchglobal/**
yara/index: yara/index:
- modules/yara/index/** - modules/yara/index/**
- tests/modules/yara/index/** - tests/modules/yara/index/**

View file

@ -4,13 +4,25 @@ nextflow.enable.dsl = 2
include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf' include { BCFTOOLS_CONCAT } from '../../../../modules/bcftools/concat/main.nf'
workflow test_bcftools_concat { workflow test_bcftools_concat_tbi {
input = [ [ id:'test3' ], // meta map input = [ [ id:'test3' ], // meta map
[ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true), [ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test2_vcf_gz'], checkIfExists: true) ] file(params.test_data['sarscov2']['illumina']['test2_vcf_gz'], checkIfExists: true) ],
[ file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test2_vcf_gz_tbi'], checkIfExists: true) ]
]
BCFTOOLS_CONCAT ( input )
}
workflow test_bcftools_concat_no_tbi {
input = [ [ id:'test3' ], // meta map
[ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true),
file(params.test_data['sarscov2']['illumina']['test2_vcf_gz'], checkIfExists: true) ],
[]
] ]
BCFTOOLS_CONCAT ( input ) BCFTOOLS_CONCAT ( input )
} }

View file

@ -1,8 +1,17 @@
- name: bcftools concat test_bcftools_concat - name: bcftools concat test_bcftools_concat_tbi
command: nextflow run ./tests/modules/bcftools/concat -entry test_bcftools_concat -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/concat/nextflow.config command: nextflow run ./tests/modules/bcftools/concat -entry test_bcftools_concat_tbi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/concat/nextflow.config
tags: tags:
- bcftools/concat
- bcftools - bcftools
- bcftools/concat
files:
- path: output/bcftools/test3.vcf.gz
md5sum: 35c88bfaad20101062e98beb217d7137
- name: bcftools concat test_bcftools_concat_no_tbi
command: nextflow run ./tests/modules/bcftools/concat -entry test_bcftools_concat_no_tbi -c ./tests/config/nextflow.config -c ./tests/modules/bcftools/concat/nextflow.config
tags:
- bcftools
- bcftools/concat
files: files:
- path: output/bcftools/test3.vcf.gz - path: output/bcftools/test3.vcf.gz
md5sum: 35c88bfaad20101062e98beb217d7137 md5sum: 35c88bfaad20101062e98beb217d7137

View file

@ -2,4 +2,7 @@ process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
ext.args = "--min_length 10"
ext.prefix = "test_lr"
} }

View file

@ -1,23 +1,26 @@
- name: filtlong test_filtlong - name: filtlong test_filtlong
command: nextflow run ./tests/modules/filtlong -entry test_filtlong -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config command: nextflow run ./tests/modules/filtlong -entry test_filtlong -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config
tags: tags:
- filtlong - filtlong
files: files:
- path: output/filtlong/test_lr_filtlong.fastq.gz - path: output/filtlong/test_lr.fastq.gz
md5sum: 7029066c27ac6f5ef18d660d5741979a contains:
- "@00068f7a-51b3-4933-8fc6-7d6e29181ff9"
- name: filtlong test_filtlong_illumina_se - name: filtlong test_filtlong_illumina_se
command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_se -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_se -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config
tags: tags:
- filtlong - filtlong
files: files:
- path: output/filtlong/test_lr_filtlong.fastq.gz - path: output/filtlong/test_lr.fastq.gz
md5sum: 7029066c27ac6f5ef18d660d5741979a contains:
- "@00068f7a-51b3-4933-8fc6-7d6e29181ff9"
- name: filtlong test_filtlong_illumina_pe - name: filtlong test_filtlong_illumina_pe
command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_pe -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config command: nextflow run ./tests/modules/filtlong -entry test_filtlong_illumina_pe -c ./tests/config/nextflow.config -c ./tests/modules/filtlong/nextflow.config
tags: tags:
- filtlong - filtlong
files: files:
- path: output/filtlong/test_lr_filtlong.fastq.gz - path: output/filtlong/test_lr.fastq.gz
md5sum: 7029066c27ac6f5ef18d660d5741979a contains:
- "@00068f7a-51b3-4933-8fc6-7d6e29181ff9"

View file

@ -0,0 +1,29 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { GAMMA_GAMMA } from '../../../../modules/gamma/gamma/main.nf'
workflow test_unzip {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['bacteroides_fragilis']['illumina']['test1_contigs_fa_gz'], checkIfExists: true),
]
db = [ file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/ResGANNCBI_20210507_srst2.fasta", checkIfExists: true), ]
GAMMA_GAMMA ( input, db )
}
workflow test_gamma {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
]
db = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ]
GAMMA_GAMMA ( input, db )
}

View file

@ -0,0 +1,29 @@
- name: gamma gamma test_unzip
command: nextflow run tests/modules/gamma/gamma -entry test_unzip -c tests/config/nextflow.config
tags:
- gamma/gamma
- gamma
files:
- path: output/gamma/test.fasta
md5sum: 5b3b831d863fffaa3410a9ee7bfa12ce
- path: output/gamma/test.gamma
md5sum: 46165a89e10b7315d3a9b0aa6c561626
- path: output/gamma/test.psl
md5sum: f489ce4602ddbcb692d5781ee3fbf449
- path: output/gamma/versions.yml
md5sum: 8baafec7b3b87f788f69e30d317c9722
- name: gamma gamma test_gamma
command: nextflow run tests/modules/gamma/gamma -entry test_gamma -c tests/config/nextflow.config
tags:
- gamma/gamma
- gamma
files:
- path: output/gamma/test.fasta
md5sum: df37b48466181311e0a679f3c5878484
- path: output/gamma/test.gamma
md5sum: 3256708fa517a65ed01d99e0e3c762ae
- path: output/gamma/test.psl
md5sum: 162a2757ed3b167ae1e0cdb24213f940
- path: output/gamma/versions.yml
md5sum: b75c2871d8cac2f8ac67c0fbd22babd6

View file

@ -1,17 +0,0 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { GAMMA } from '../../../modules/gamma/main.nf'
workflow test_gamma {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
]
db = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ]
GAMMA ( input, db )
}

View file

@ -1,13 +0,0 @@
- name: gamma test_gamma
command: nextflow run tests/modules/gamma -entry test_gamma -c tests/config/nextflow.config
tags:
- gamma
files:
- path: output/gamma/test.fasta
md5sum: df37b48466181311e0a679f3c5878484
- path: output/gamma/test.gamma
md5sum: 3256708fa517a65ed01d99e0e3c762ae
- path: output/gamma/test.psl
md5sum: 162a2757ed3b167ae1e0cdb24213f940
- path: output/gamma/versions.yml
md5sum: 3fefb5b46c94993362243c5f9a472057

View file

@ -6,6 +6,8 @@
files: files:
- path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz
md5sum: 5b289bda88d3a3504f2e19ee8cff177c md5sum: 5b289bda88d3a3504f2e19ee8cff177c
- path: output/gatk4/test.vcf.gz.tbi
md5sum: a81673763b13086cfce9a23e72a35a16
- path: output/gatk4/versions.yml - path: output/gatk4/versions.yml
- name: gatk4 mergevcfs test_gatk4_mergevcfs_no_dict - name: gatk4 mergevcfs test_gatk4_mergevcfs_no_dict

View file

@ -14,8 +14,11 @@ workflow test_mash_screen {
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)
] ]
] ]
fastx_db = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) sars_db = [
[ id: 'sars_db' ],
file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
]
MASH_SKETCH ( input ) MASH_SKETCH ( sars_db )
MASH_SCREEN ( MASH_SKETCH.out.mash, fastx_db ) MASH_SCREEN ( input, MASH_SKETCH.out.mash.map { meta, sketch -> sketch } )
} }

View file

@ -4,9 +4,9 @@
- mash - mash
- mash/screen - mash/screen
files: files:
- path: output/mash/test.mash_stats - path: output/mash/sars_db.mash_stats
md5sum: 2a6f297d8e69a5e4160243bc6c89129c md5sum: 1dafbd23e36e18bf4c87a007d0fc98f7
- path: output/mash/test.msh - path: output/mash/sars_db.msh
md5sum: d747145a43dad5f82342036f8f5d9133 md5sum: 24289e4a13526e88eeb2abfca4a0f0a8
- path: output/mash/test.screen - path: output/mash/test.screen
md5sum: d3c871dccd5cd57ab54781fa5c5d7278 md5sum: ac8701e1aab651b2f36c6380b1351b11

View file

@ -2,72 +2,95 @@
nextflow.enable.dsl = 2 nextflow.enable.dsl = 2
include { MOSDEPTH } from '../../../modules/mosdepth/main.nf' include { MOSDEPTH } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_FAIL } from '../../../modules/mosdepth/main.nf' include { MOSDEPTH as MOSDEPTH_FAIL } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_WINDOW } from '../../../modules/mosdepth/main.nf' include { MOSDEPTH as MOSDEPTH_WINDOW } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_THRESHOLD } from '../../../modules/mosdepth/main.nf'
include { MOSDEPTH as MOSDEPTH_QUANTIZED } from '../../../modules/mosdepth/main.nf'
workflow test_mosdepth { workflow test_mosdepth {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
MOSDEPTH ( input, [], [] ) MOSDEPTH ( input, [], [] )
} }
workflow test_mosdepth_bed { workflow test_mosdepth_bed {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH ( input, bed, [] ) MOSDEPTH ( input, bed, [] )
} }
workflow test_mosdepth_cram { workflow test_mosdepth_cram {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)
] ]
fasta = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
MOSDEPTH ( input, [], fasta ) MOSDEPTH ( input, [], fasta )
} }
workflow test_mosdepth_cram_bed { workflow test_mosdepth_cram_bed {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
fasta = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
MOSDEPTH ( input, bed, fasta ) MOSDEPTH ( input, bed, fasta )
} }
workflow test_mosdepth_window { workflow test_mosdepth_window {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH_WINDOW ( input, [], [] ) MOSDEPTH_WINDOW ( input, [], [] )
} }
workflow test_mosdepth_quantized {
input = [
[ id:'test', single_end:true ],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
]
MOSDEPTH_QUANTIZED ( input, [], [] )
}
workflow test_mosdepth_thresholds {
input = [
[ id:'test', single_end:true ],
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
]
bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH_THRESHOLD ( input, bed, [] )
}
workflow test_mosdepth_fail { workflow test_mosdepth_fail {
input = [ input = [
[ id:'test', single_end:true ], [ id:'test', single_end:true ],
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
[ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
] ]
bed = [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)
MOSDEPTH_FAIL ( input, bed, [] ) MOSDEPTH_FAIL ( input, bed, [] )
} }

View file

@ -7,4 +7,10 @@ process {
withName: MOSDEPTH_WINDOW { withName: MOSDEPTH_WINDOW {
ext.args = "--by 100" ext.args = "--by 100"
} }
withName: MOSDEPTH_QUANTIZED {
ext.args = "--quantize 0:1:4:100:200"
}
withName: MOSDEPTH_THRESHOLD {
ext.args = "--thresholds 1,10,20,30"
}
} }

View file

@ -86,6 +86,48 @@
- path: output/mosdepth/test.regions.bed.gz.csi - path: output/mosdepth/test.regions.bed.gz.csi
md5sum: 257d67678136963d9dd904330079609d md5sum: 257d67678136963d9dd904330079609d
- name: mosdepth test_mosdepth_quantized
command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_quantized -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config
tags:
- mosdepth
files:
- path: output/mosdepth/test.mosdepth.global.dist.txt
md5sum: e82e90c7d508a135b5a8a7cd6933452e
- path: output/mosdepth/test.mosdepth.summary.txt
md5sum: 4f0d231060cbde4efdd673863bd2fb59
- path: output/mosdepth/test.per-base.bed.gz
md5sum: bc1df47d46f818fee5275975925d769a
- path: output/mosdepth/test.per-base.bed.gz.csi
md5sum: 9e649ac749ff6c6073bef5ab63e8aaa4
- path: output/mosdepth/test.quantized.bed.gz
md5sum: 3e434a8bafcf59a67841ae3d4d752838
- path: output/mosdepth/test.quantized.bed.gz.csi
md5sum: be9617f551f19a33923f1e886eaefb93
- name: mosdepth test_mosdepth_thresholds
command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_thresholds -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config
tags:
- mosdepth
files:
- path: output/mosdepth/test.mosdepth.global.dist.txt
md5sum: e82e90c7d508a135b5a8a7cd6933452e
- path: output/mosdepth/test.mosdepth.region.dist.txt
md5sum: e82e90c7d508a135b5a8a7cd6933452e
- path: output/mosdepth/test.mosdepth.summary.txt
md5sum: 96c037f769974b904beb53edc4f56d82
- path: output/mosdepth/test.per-base.bed.gz
md5sum: bc1df47d46f818fee5275975925d769a
- path: output/mosdepth/test.per-base.bed.gz.csi
md5sum: 9e649ac749ff6c6073bef5ab63e8aaa4
- path: output/mosdepth/test.regions.bed.gz
md5sum: 5d398caf7171ec4406278e2add3009ae
- path: output/mosdepth/test.regions.bed.gz.csi
md5sum: 47669cfe41f3e222e74d81e1b1be191f
- path: output/mosdepth/test.thresholds.bed.gz
md5sum: 13101e326eea3cbfa1d569b69f494f4c
- path: output/mosdepth/test.thresholds.bed.gz.csi
md5sum: 912055ee9452229439df6fae95644196
- name: mosdepth test_mosdepth_fail - name: mosdepth test_mosdepth_fail
command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_fail -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config command: nextflow run ./tests/modules/mosdepth -entry test_mosdepth_fail -c ./tests/config/nextflow.config -c ./tests/modules/mosdepth/nextflow.config
tags: tags:

View file

@ -36,7 +36,7 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: b9f5e2f6a624b64c300fe25dc3ac801f md5sum: 63de6af2210e138b49d7b4d570c6e67f
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out
@ -80,7 +80,7 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: 38d08f0b944a2a1b981a250d675aa0d9 md5sum: 7cdef439bc8092bfefb4d091bf8ee6ab
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out
@ -124,7 +124,7 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: c740d5177067c1fcc48ab7a16cd639d7 md5sum: 5dbc36fce7b72628c809bbc7d3d67973
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out
@ -168,9 +168,9 @@
- path: output/star/star/transcriptInfo.tab - path: output/star/star/transcriptInfo.tab
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36 md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
- path: output/star/test.Aligned.out.bam - path: output/star/test.Aligned.out.bam
md5sum: a1bd1b40950a58ea2776908076160052 md5sum: d85858bf55a523121dde762046a34c5c
- path: output/star/test.Chimeric.out.junction - path: output/star/test.Chimeric.out.junction
md5sum: 327629eb54032212f29e1c32cbac6975 md5sum: ae87d1a24180f5a35cf6b47fdfdd0539
- path: output/star/test.Log.final.out - path: output/star/test.Log.final.out
- path: output/star/test.Log.out - path: output/star/test.Log.out
- path: output/star/test.Log.progress.out - path: output/star/test.Log.progress.out

View file

@ -0,0 +1,25 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { VSEARCH_USEARCHGLOBAL } from '../../../../modules/vsearch/usearchglobal/main.nf'
workflow test_vsearch_usearchglobal {
query = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true)
db = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
idcutoff = 0.985
outoption = "xcfert" // Nonsense text to check default case.
columns = ""
VSEARCH_USEARCHGLOBAL ( [[id:'test'], query], db, idcutoff, outoption, columns )
}
workflow test_vsearch_usearchglobal_userout {
query = file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true)
db = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
idcutoff = 0.985
outoption = "userout"
columns = "query+target+id"
VSEARCH_USEARCHGLOBAL ( [[id:'test'], query], db, idcutoff, outoption, columns )
}

View file

@ -0,0 +1,4 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,26 @@
- name: vsearch usearchglobal test_vsearch_usearchglobal
command: nextflow run ./tests/modules/vsearch/usearchglobal -entry test_vsearch_usearchglobal -c ./tests/config/nextflow.config -c ./tests/modules/vsearch/usearchglobal/nextflow.config
tags:
- vsearch/usearchglobal
- vsearch
files:
- path: output/vsearch/test.aln
contains:
- "vsearch --usearch_global transcriptome.fasta --db genome.fasta --id 0.985 --threads 2 --alnout test.aln"
- "Query >lcl|MT192765.1_cds_QIK50427.1_2"
- "%Id TLen Target"
- "100% 29829 MT192765.1"
- "Query 3822nt >lcl|MT192765.1_cds_QIK50427.1_2"
- "Target 29829nt >MT192765.1"
- "Qry 21249 + CAACAGAGTTGTTATTTCTAGTGATGTTCTTGTTAACAACTAA 21291"
- "Tgt 21506 + CAACAGAGTTGTTATTTCTAGTGATGTTCTTGTTAACAACTAA 21548"
- "21291 cols, 21290 ids (100.0%), 1 gaps (0.0%)"
- name: vsearch usearchglobal test_vsearch_usearchglobal_userout
command: nextflow run ./tests/modules/vsearch/usearchglobal -entry test_vsearch_usearchglobal_userout -c ./tests/config/nextflow.config -c ./tests/modules/vsearch/usearchglobal/nextflow.config
tags:
- vsearch/usearchglobal
- vsearch
files:
- path: output/vsearch/test.tsv
md5sum: b6cc50f7c8d18cb82e74dab70ed4baab