mirror of
https://github.com/MillironX/taxprofiler.git
synced 2024-11-22 14:29:55 +00:00
Add prinseq as alternative complexity filtering
This commit is contained in:
parent
d3572e1878
commit
a384162810
8 changed files with 189 additions and 20 deletions
12
CITATIONS.md
12
CITATIONS.md
|
@ -18,23 +18,27 @@
|
||||||
|
|
||||||
- [fastp](https://doi.org/10.1093/bioinformatics/bty560)
|
- [fastp](https://doi.org/10.1093/bioinformatics/bty560)
|
||||||
|
|
||||||
> Chen, Shifu, Yanqing Zhou, Yaru Chen, and Jia Gu. 2018. “Fastp: An Ultra-Fast All-in-One FASTQ Preprocessor.” Bioinformatics 34 (17): i884-90. 10.1093/bioinformatics/bty560.
|
> Chen, Shifu, Yanqing Zhou, Yaru Chen, and Jia Gu. 2018. Fastp: An Ultra-Fast All-in-One FASTQ Preprocessor. Bioinformatics 34 (17): i884-90. 10.1093/bioinformatics/bty560.
|
||||||
|
|
||||||
- [AdapterRemoval2](https://doi.org/10.1186/s13104-016-1900-2)
|
- [AdapterRemoval2](https://doi.org/10.1186/s13104-016-1900-2)
|
||||||
|
|
||||||
> Schubert, Mikkel, Stinus Lindgreen, and Ludovic Orlando. 2016. “AdapterRemoval v2: Rapid Adapter Trimming, Identification, and Read Merging.” BMC Research Notes 9 (February): 88. doi:10.1186/s13104-016-1900-2.
|
> Schubert, Mikkel, Stinus Lindgreen, and Ludovic Orlando. 2016. AdapterRemoval v2: Rapid Adapter Trimming, Identification, and Read Merging. BMC Research Notes 9 (February): 88. doi:10.1186/s13104-016-1900-2.
|
||||||
|
|
||||||
- [Porechop](https://github.com/rrwick/Porechop)
|
- [Porechop](https://github.com/rrwick/Porechop)
|
||||||
|
|
||||||
- [BBTools](http://sourceforge.net/projects/bbmap/)
|
- [BBTools](http://sourceforge.net/projects/bbmap/)
|
||||||
|
|
||||||
|
- [PRINSEQ++](https://doi.org/10.7287/peerj.preprints.27553v1)
|
||||||
|
|
||||||
|
> Cantu, Vito Adrian, Jeffrey Sadural, and Robert Edwards. 2019. PRINSEQ++, a Multi-Threaded Tool for Fast and Efficient Quality Control and Preprocessing of Sequencing Datasets. e27553v1. PeerJ Preprints. doi: 10.7287/peerj.preprints.27553v1.
|
||||||
|
|
||||||
- [Kraken2](https://doi.org/10.1186/s13059-019-1891-0)
|
- [Kraken2](https://doi.org/10.1186/s13059-019-1891-0)
|
||||||
|
|
||||||
> Wood, Derrick E., Jennifer Lu, and Ben Langmead. 2019. “Improved Metagenomic Analysis with Kraken 2.” Genome Biology 20 (1): 257. doi: 10.1186/s13059-019-1891-0.
|
> Wood, Derrick E., Jennifer Lu, and Ben Langmead. 2019. Improved Metagenomic Analysis with Kraken 2. Genome Biology 20 (1): 257. doi: 10.1186/s13059-019-1891-0.
|
||||||
|
|
||||||
- [MALT](https://doi.org/10.1038/s41559-017-0446-6)
|
- [MALT](https://doi.org/10.1038/s41559-017-0446-6)
|
||||||
|
|
||||||
> Vågene, Åshild J., Alexander Herbig, Michael G. Campana, Nelly M. Robles García, Christina Warinner, Susanna Sabin, Maria A. Spyrou, et al. 2018. “Salmonella Enterica Genomes from Victims of a Major Sixteenth-Century Epidemic in Mexico.” Nature Ecology & Evolution 2 (3): 520-28. doi: 10.1038/s41559-017-0446-6.
|
> Vågene, Åshild J., Alexander Herbig, Michael G. Campana, Nelly M. Robles García, Christina Warinner, Susanna Sabin, Maria A. Spyrou, et al. 2018. Salmonella Enterica Genomes from Victims of a Major Sixteenth-Century Epidemic in Mexico. Nature Ecology & Evolution 2 (3): 520-28. doi: 10.1038/s41559-017-0446-6.
|
||||||
|
|
||||||
## Software packaging/containerisation tools
|
## Software packaging/containerisation tools
|
||||||
|
|
||||||
|
|
|
@ -143,7 +143,7 @@ process {
|
||||||
|
|
||||||
withName: BBMAP_BBDUK {
|
withName: BBMAP_BBDUK {
|
||||||
ext.args = [
|
ext.args = [
|
||||||
"entropy=${params.shortread_complexityfilter_bbduk_entropy}",
|
"entropy=${params.shortread_complexityfilter_entropy}",
|
||||||
"entropywindow=${params.shortread_complexityfilter_bbduk_windowsize}",
|
"entropywindow=${params.shortread_complexityfilter_bbduk_windowsize}",
|
||||||
params.shortread_complexityfilter_bbduk_mask ? "entropymask=t" : "entropymask=f"
|
params.shortread_complexityfilter_bbduk_mask ? "entropymask=t" : "entropymask=f"
|
||||||
].join(' ').trim()
|
].join(' ').trim()
|
||||||
|
@ -155,6 +155,19 @@ process {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
withName: PRINSEQPLUSPLUS {
|
||||||
|
ext.args = [
|
||||||
|
params.shortread_complexityfilter_prinseqplusplus_mode == 'dust' ? "-lc_dust=${params.shortread_complexityfilter_prinseqplusplus_dustscore}" : "-lc_entropy=${params.shortread_complexityfilter_entropy}",
|
||||||
|
"-trim_qual_left=0 -trim_qual_left=0 -trim_qual_window=0 -trim_qual_step=0"
|
||||||
|
].join(' ').trim()
|
||||||
|
ext.prefix = { "${meta.id}-${meta.run_accession}" }
|
||||||
|
publishDir = [
|
||||||
|
path: { "${params.outdir}/prinseqplusplus/" },
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
pattern: '*{_good_out.fastq.gz,_good_out_R1.fastq.gz,_good_out_R2.fastq.gz,log}'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
withName: MALT_RUN {
|
withName: MALT_RUN {
|
||||||
ext.args = { "${meta.db_params}" }
|
ext.args = { "${meta.db_params}" }
|
||||||
ext.prefix = { "${meta.id}-${meta.run_accession}-${meta.db_name}" }
|
ext.prefix = { "${meta.id}-${meta.run_accession}-${meta.db_name}" }
|
||||||
|
|
|
@ -33,6 +33,9 @@
|
||||||
"porechop": {
|
"porechop": {
|
||||||
"git_sha": "e20e57f90b6787ac9a010a980cf6ea98bd990046"
|
"git_sha": "e20e57f90b6787ac9a010a980cf6ea98bd990046"
|
||||||
},
|
},
|
||||||
|
"prinseqplusplus": {
|
||||||
|
"git_sha": "f1c5384c31e985591716afdd732cf8c2ae29d05b"
|
||||||
|
},
|
||||||
"untar": {
|
"untar": {
|
||||||
"git_sha": "e080f4c8acf5760039ed12ec1f206170f3f9a918"
|
"git_sha": "e080f4c8acf5760039ed12ec1f206170f3f9a918"
|
||||||
}
|
}
|
||||||
|
|
61
modules/nf-core/modules/prinseqplusplus/main.nf
generated
Normal file
61
modules/nf-core/modules/prinseqplusplus/main.nf
generated
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
process PRINSEQPLUSPLUS {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::prinseq-plus-plus=1.2.3" : null)
|
||||||
|
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
|
||||||
|
'https://depot.galaxyproject.org/singularity/prinseq-plus-plus:1.2.3--hc90279e_1':
|
||||||
|
'quay.io/biocontainers/prinseq-plus-plus:1.2.3--hc90279e_1' }"
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(reads)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*_good_out*.fastq.gz") , emit: good_reads
|
||||||
|
tuple val(meta), path("*_single_out*.fastq.gz"), optional: true, emit: single_reads
|
||||||
|
tuple val(meta), path("*_bad_out*.fastq.gz") , optional: true, emit: bad_reads
|
||||||
|
tuple val(meta), path("*.log") , emit: log
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
when:
|
||||||
|
task.ext.when == null || task.ext.when
|
||||||
|
|
||||||
|
script:
|
||||||
|
def args = task.ext.args ?: ''
|
||||||
|
def prefix = task.ext.prefix ?: "${meta.id}"
|
||||||
|
|
||||||
|
if (meta.single_end) {
|
||||||
|
"""
|
||||||
|
prinseq++ \\
|
||||||
|
-threads $task.cpus \\
|
||||||
|
-fastq ${reads} \\
|
||||||
|
-out_name ${prefix} \\
|
||||||
|
-out_gz \\
|
||||||
|
-VERBOSE 1 \\
|
||||||
|
$args \\
|
||||||
|
| tee ${prefix}.log
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
"${task.process}":
|
||||||
|
prinseqplusplus: \$(echo \$(prinseq++ --version | cut -f 2 -d ' ' ))
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
} else {
|
||||||
|
"""
|
||||||
|
prinseq++ \\
|
||||||
|
-threads $task.cpus \\
|
||||||
|
-fastq ${reads[0]} \\
|
||||||
|
-fastq2 ${reads[1]} \\
|
||||||
|
-out_name ${prefix} \\
|
||||||
|
-out_gz \\
|
||||||
|
-VERBOSE 1 \\
|
||||||
|
$args \\
|
||||||
|
| tee ${prefix}.log
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
"${task.process}":
|
||||||
|
prinseqplusplus: \$(echo \$(prinseq++ --version | cut -f 2 -d ' ' ))
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
}
|
60
modules/nf-core/modules/prinseqplusplus/meta.yml
generated
Normal file
60
modules/nf-core/modules/prinseqplusplus/meta.yml
generated
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
name: "prinseqplusplus"
|
||||||
|
description: PRINSEQ++ is a C++ implementation of the prinseq-lite.pl program. It can be used to filter, reformat or trim genomic and metagenomic sequence data
|
||||||
|
keywords:
|
||||||
|
- fastq
|
||||||
|
- fasta
|
||||||
|
- filter
|
||||||
|
- trim
|
||||||
|
tools:
|
||||||
|
- "prinseqplusplus":
|
||||||
|
description: "PRINSEQ++ - Multi-threaded C++ sequence cleaning"
|
||||||
|
homepage: "https://github.com/Adrian-Cantu/PRINSEQ-plus-plus"
|
||||||
|
documentation: "https://github.com/Adrian-Cantu/PRINSEQ-plus-plus"
|
||||||
|
tool_dev_url: "https://github.com/Adrian-Cantu/PRINSEQ-plus-plus"
|
||||||
|
doi: "10.7287/peerj.preprints.27553v1"
|
||||||
|
licence: "['GPL v2']"
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: |
|
||||||
|
List of input FastQ files of size 1 and 2 for single-end and paired-end
|
||||||
|
data, respectively.
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- good_reads:
|
||||||
|
type: file
|
||||||
|
description: Reads passing filter(s) in gzipped FASTQ format
|
||||||
|
pattern: "*_good_out_{R1,R2}.fastq.gz"
|
||||||
|
- single_reads:
|
||||||
|
type: file
|
||||||
|
description: |
|
||||||
|
Single reads without the pair passing filter(s) in gzipped FASTQ format
|
||||||
|
pattern: "*_single_out_{R1,R2}.fastq.gz"
|
||||||
|
- bad_reads:
|
||||||
|
type: file
|
||||||
|
description: |
|
||||||
|
Reads without not passing filter(s) in gzipped FASTQ format
|
||||||
|
pattern: "*_bad_out_{R1,R2}.fastq.gz"
|
||||||
|
- log:
|
||||||
|
type: file
|
||||||
|
description: |
|
||||||
|
Verbose level 2 STDOUT information in a log file
|
||||||
|
pattern: "*.log"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@jfy133"
|
|
@ -66,11 +66,14 @@ params {
|
||||||
longread_clip = false
|
longread_clip = false
|
||||||
|
|
||||||
// Complexity filtering
|
// Complexity filtering
|
||||||
shortread_complexityfilter = false
|
shortread_complexityfilter = false
|
||||||
shortread_complexityfilter_tool = 'bbduk'
|
shortread_complexityfilter_tool = 'bbduk'
|
||||||
shortread_complexityfilter_bbduk_entropy = 0.3
|
shortread_complexityfilter_entropy = 0.3
|
||||||
shortread_complexityfilter_bbduk_windowsize = 50
|
shortread_complexityfilter_bbduk_windowsize = 50
|
||||||
shortread_complexityfilter_bbduk_mask = false
|
shortread_complexityfilter_bbduk_mask = false
|
||||||
|
shortread_complexityfilter_prinseqplusplus_mode = 'entropy'
|
||||||
|
shortread_complexityfilter_prinseqplusplus_dustscore = 0.5
|
||||||
|
|
||||||
|
|
||||||
// MALT
|
// MALT
|
||||||
run_malt = false
|
run_malt = false
|
||||||
|
|
|
@ -10,7 +10,10 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"fa_icon": "fas fa-terminal",
|
"fa_icon": "fas fa-terminal",
|
||||||
"description": "Define where the pipeline should find input data and save output data.",
|
"description": "Define where the pipeline should find input data and save output data.",
|
||||||
"required": ["input", "outdir"],
|
"required": [
|
||||||
|
"input",
|
||||||
|
"outdir"
|
||||||
|
],
|
||||||
"properties": {
|
"properties": {
|
||||||
"input": {
|
"input": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
@ -173,7 +176,14 @@
|
||||||
"description": "Method used to save pipeline results to output directory.",
|
"description": "Method used to save pipeline results to output directory.",
|
||||||
"help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.",
|
"help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.",
|
||||||
"fa_icon": "fas fa-copy",
|
"fa_icon": "fas fa-copy",
|
||||||
"enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"],
|
"enum": [
|
||||||
|
"symlink",
|
||||||
|
"rellink",
|
||||||
|
"link",
|
||||||
|
"copy",
|
||||||
|
"copyNoFollow",
|
||||||
|
"move"
|
||||||
|
],
|
||||||
"hidden": true
|
"hidden": true
|
||||||
},
|
},
|
||||||
"email_on_fail": {
|
"email_on_fail": {
|
||||||
|
@ -284,7 +294,10 @@
|
||||||
"shortread_clipmerge_tool": {
|
"shortread_clipmerge_tool": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"default": "fastp",
|
"default": "fastp",
|
||||||
"enum": ["fastp", "adapterremoval"]
|
"enum": [
|
||||||
|
"fastp",
|
||||||
|
"adapterremoval"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"shortread_clipmerge_skipadaptertrim": {
|
"shortread_clipmerge_skipadaptertrim": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
|
@ -308,10 +321,6 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"default": "bbduk"
|
"default": "bbduk"
|
||||||
},
|
},
|
||||||
"shortread_complexityfilter_bbduk_entropy": {
|
|
||||||
"type": "number",
|
|
||||||
"default": 0.3
|
|
||||||
},
|
|
||||||
"shortread_complexityfilter_bbduk_windowsize": {
|
"shortread_complexityfilter_bbduk_windowsize": {
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"default": 50
|
"default": 50
|
||||||
|
@ -321,6 +330,18 @@
|
||||||
},
|
},
|
||||||
"shortread_complexityfilter": {
|
"shortread_complexityfilter": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"shortread_complexityfilter_entropy": {
|
||||||
|
"type": "number",
|
||||||
|
"default": 0.3
|
||||||
|
},
|
||||||
|
"shortread_complexityfilter_prinseqplusplus_mode": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "entropy"
|
||||||
|
},
|
||||||
|
"shortread_complexityfilter_prinseqplusplus_dustscore": {
|
||||||
|
"type": "number",
|
||||||
|
"default": 0.5
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -2,7 +2,8 @@
|
||||||
// Check input samplesheet and get read channels
|
// Check input samplesheet and get read channels
|
||||||
//
|
//
|
||||||
|
|
||||||
include { BBMAP_BBDUK } from '../../modules/nf-core/modules/bbmap/bbduk/main'
|
include { BBMAP_BBDUK } from '../../modules/nf-core/modules/bbmap/bbduk/main'
|
||||||
|
include { PRINSEQPLUSPLUS } from '../../modules/nf-core/modules/prinseqplusplus/main'
|
||||||
|
|
||||||
workflow SHORTREAD_COMPLEXITYFILTERING {
|
workflow SHORTREAD_COMPLEXITYFILTERING {
|
||||||
take:
|
take:
|
||||||
|
@ -16,6 +17,9 @@ workflow SHORTREAD_COMPLEXITYFILTERING {
|
||||||
ch_filtered_reads = BBMAP_BBDUK ( reads, [] ).reads
|
ch_filtered_reads = BBMAP_BBDUK ( reads, [] ).reads
|
||||||
ch_versions = ch_versions.mix( BBMAP_BBDUK.out.versions.first() )
|
ch_versions = ch_versions.mix( BBMAP_BBDUK.out.versions.first() )
|
||||||
ch_multiqc_files = ch_multiqc_files.mix( BBMAP_BBDUK.out.log )
|
ch_multiqc_files = ch_multiqc_files.mix( BBMAP_BBDUK.out.log )
|
||||||
|
} else if ( params.shortread_complexityfilter_tool == 'prinseqplusplus' ) {
|
||||||
|
ch_filtered_reads = PRINSEQPLUSPLUS ( reads ).good_reads
|
||||||
|
ch_versions = ch_versions.mix( PRINSEQPLUSPLUS.out.versions.first() )
|
||||||
} else {
|
} else {
|
||||||
ch_filtered_reads = reads
|
ch_filtered_reads = reads
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue