1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-29 09:49:54 +00:00

Add short-read host removal via BowTie2 alignment mapping

Add short-read host removal via BowTie2 alignment mapping
This commit is contained in:
James A. Fellows Yates 2022-04-13 15:29:16 +02:00 committed by GitHub
commit fc8a83245a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 294 additions and 4 deletions

View file

@ -41,6 +41,7 @@ jobs:
- "--shortread_complexityfilter_tool prinseq" - "--shortread_complexityfilter_tool prinseq"
- "--perform_runmerging" - "--perform_runmerging"
- "--perform_runmerging --shortread_clipmerge_mergepairs" - "--perform_runmerging --shortread_clipmerge_mergepairs"
- "--shortread_complexityfilter false --perform_shortread_hostremoval"
steps: steps:
- name: Check out pipeline code - name: Check out pipeline code

View file

@ -132,6 +132,24 @@ process {
] ]
} }
withName: BOWTIE2_BUILD {
ext.prefix = { "${meta.id}_${meta.run_accession}" }
publishDir = [
path: { "${params.outdir}/bowtie2/build" },
mode: params.publish_dir_mode,
pattern: '*.bt2'
]
}
withName: BOWTIE2_ALIGN {
ext.prefix = { "${meta.id}_${meta.run_accession}" }
publishDir = [
path: { "${params.outdir}/bowtie2/align" },
mode: params.publish_dir_mode,
pattern: '*.{fastq.gz,bam}'
]
}
withName: BBMAP_BBDUK { withName: BBMAP_BBDUK {
ext.args = [ ext.args = [
"entropy=${params.shortread_complexityfilter_entropy}", "entropy=${params.shortread_complexityfilter_entropy}",

View file

@ -31,4 +31,6 @@ params {
perform_shortread_clipmerge = true perform_shortread_clipmerge = true
perform_longread_clip = false perform_longread_clip = false
perform_shortread_complexityfilter = true perform_shortread_complexityfilter = true
perform_shortread_hostremoval = true
shortread_hostremoval_reference = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/genome.fasta'
} }

View file

@ -9,6 +9,12 @@
"bbmap/bbduk": { "bbmap/bbduk": {
"git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d" "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
}, },
"bowtie2/align": {
"git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
},
"bowtie2/build": {
"git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
},
"cat/fastq": { "cat/fastq": {
"git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d" "git_sha": "e745e167c1020928ef20ea1397b6b4d230681b4d"
}, },

View file

@ -0,0 +1,77 @@
process BOWTIE2_ALIGN {
tag "$meta.id"
label 'process_high'
conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4 bioconda::samtools=1.14 conda-forge::pigz=2.6' : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:4d235f41348a00533f18e47c9669f1ecb327f629-0' :
'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:4d235f41348a00533f18e47c9669f1ecb327f629-0' }"
input:
tuple val(meta), path(reads)
path index
val save_unaligned
output:
tuple val(meta), path('*.bam') , emit: bam
tuple val(meta), path('*.log') , emit: log
tuple val(meta), path('*fastq.gz'), emit: fastq, optional:true
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def args2 = task.ext.args2 ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
if (meta.single_end) {
def unaligned = save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : ''
"""
INDEX=`find -L ./ -name "*.rev.1.bt2" | sed 's/.rev.1.bt2//'`
bowtie2 \\
-x \$INDEX \\
-U $reads \\
--threads $task.cpus \\
$unaligned \\
$args \\
2> ${prefix}.bowtie2.log \\
| samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam -
cat <<-END_VERSIONS > versions.yml
"${task.process}":
bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//')
samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' )
END_VERSIONS
"""
} else {
def unaligned = save_unaligned ? "--un-conc-gz ${prefix}.unmapped.fastq.gz" : ''
"""
INDEX=`find -L ./ -name "*.rev.1.bt2" | sed 's/.rev.1.bt2//'`
bowtie2 \\
-x \$INDEX \\
-1 ${reads[0]} \\
-2 ${reads[1]} \\
--threads $task.cpus \\
$unaligned \\
$args \\
2> ${prefix}.bowtie2.log \\
| samtools view -@ $task.cpus $args2 -bhS -o ${prefix}.bam -
if [ -f ${prefix}.unmapped.fastq.1.gz ]; then
mv ${prefix}.unmapped.fastq.1.gz ${prefix}.unmapped_1.fastq.gz
fi
if [ -f ${prefix}.unmapped.fastq.2.gz ]; then
mv ${prefix}.unmapped.fastq.2.gz ${prefix}.unmapped_2.fastq.gz
fi
cat <<-END_VERSIONS > versions.yml
"${task.process}":
bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//')
samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' )
END_VERSIONS
"""
}
}

View file

@ -0,0 +1,51 @@
name: bowtie2_align
description: Align reads to a reference genome using bowtie2
keywords:
- align
- fasta
- genome
- reference
tools:
- bowtie2:
description: |
Bowtie 2 is an ultrafast and memory-efficient tool for aligning
sequencing reads to long reference sequences.
homepage: http://bowtie-bio.sourceforge.net/bowtie2/index.shtml
documentation: http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml
doi: 10.1038/nmeth.1923
licence: ["GPL-3.0-or-later"]
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- reads:
type: file
description: |
List of input FastQ files of size 1 and 2 for single-end and paired-end data,
respectively.
- index:
type: file
description: Bowtie2 genome index files
pattern: "*.ebwt"
output:
- bam:
type: file
description: Output BAM file containing read alignments
pattern: "*.{bam}"
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- fastq:
type: file
description: Unaligned FastQ files
pattern: "*.fastq.gz"
- log:
type: file
description: Aligment log
pattern: "*.log"
authors:
- "@joseespinosa"
- "@drpatelh"

View file

@ -0,0 +1,30 @@
process BOWTIE2_BUILD {
tag "$fasta"
label 'process_high'
conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4' : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/bowtie2:2.4.4--py39hbb4e92a_0' :
'quay.io/biocontainers/bowtie2:2.4.4--py39hbb4e92a_0' }"
input:
path fasta
output:
path 'bowtie2' , emit: index
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
"""
mkdir bowtie2
bowtie2-build $args --threads $task.cpus $fasta bowtie2/${fasta.baseName}
cat <<-END_VERSIONS > versions.yml
"${task.process}":
bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//')
END_VERSIONS
"""
}

View file

@ -0,0 +1,33 @@
name: bowtie2_build
description: Builds bowtie index for reference genome
keywords:
- build
- index
- fasta
- genome
- reference
tools:
- bowtie2:
description: |
Bowtie 2 is an ultrafast and memory-efficient tool for aligning
sequencing reads to long reference sequences.
homepage: http://bowtie-bio.sourceforge.net/bowtie2/index.shtml
documentation: http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml
doi: 10.1038/nmeth.1923
licence: ["GPL-3.0-or-later"]
input:
- fasta:
type: file
description: Input genome fasta file
output:
- index:
type: file
description: Bowtie2 genome index files
pattern: "*.bt2"
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
authors:
- "@joseespinosa"
- "@drpatelh"

View file

@ -80,6 +80,11 @@ params {
perform_runmerging = false perform_runmerging = false
save_runmerged_reads = false save_runmerged_reads = false
// Host Removal
perform_shortread_hostremoval = false
shortread_hostremoval_reference = null
shortread_hostremoval_index = null
// MALT // MALT
run_malt = false run_malt = false
malt_mode = 'BlastN' malt_mode = 'BlastN'

View file

@ -358,6 +358,17 @@
}, },
"perform_runmerging": { "perform_runmerging": {
"type": "boolean" "type": "boolean"
},
"perform_shortread_hostremoval": {
"type": "boolean"
},
"shortread_hostremoval_reference": {
"type": "string",
"default": null
},
"shortread_hostremoval_index": {
"type": "string",
"default": null
} }
} }
} }

View file

@ -0,0 +1,34 @@
//
// Remove host reads via alignment and export off-target reads
//
include { BOWTIE2_BUILD } from '../../modules/nf-core/modules/bowtie2/build/main'
include { BOWTIE2_ALIGN } from '../../modules/nf-core/modules/bowtie2/align/main'
workflow SHORTREAD_HOSTREMOVAL {
take:
reads // [ [ meta ], [ reads ] ]
reference // /path/to/fasta
index // /path/to/index
main:
ch_versions = Channel.empty()
ch_multiqc_files = Channel.empty()
if ( !params.shortread_hostremoval_index ) {
ch_bowtie2_index = BOWTIE2_BUILD ( reference ).index
ch_versions = ch_versions.mix( BOWTIE2_BUILD.out.versions )
} else {
ch_bowtie2_index = index.first()
}
BOWTIE2_ALIGN ( reads, ch_bowtie2_index, true )
ch_versions = ch_versions.mix( BOWTIE2_ALIGN.out.versions.first() )
ch_multiqc_files = ch_multiqc_files.mix( BOWTIE2_ALIGN.out.log )
emit:
reads = BOWTIE2_ALIGN.out.fastq // channel: [ val(meta), [ reads ] ]
versions = ch_versions // channel: [ versions.yml ]
mqc = ch_multiqc_files
}

View file

@ -11,7 +11,9 @@ WorkflowTaxprofiler.initialise(params, log)
// TODO nf-core: Add all file path parameters for the pipeline to the list below // TODO nf-core: Add all file path parameters for the pipeline to the list below
// Check input path parameters to see if they exist // Check input path parameters to see if they exist
def checkPathParamList = [ params.input, params.databases, params.multiqc_config ] def checkPathParamList = [ params.input, params.databases, params.shortread_hostremoval_reference,
params.shortread_hostremoval_index, params.multiqc_config
]
for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } } for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } }
// Check mandatory parameters // Check mandatory parameters
@ -20,6 +22,12 @@ if (params.databases) { ch_databases = file(params.databases) } else { exit 1, '
if (params.shortread_clipmerge_mergepairs && params.run_malt ) log.warn "[nf-core/taxprofiler] warning: MALT does not accept uncollapsed paired-reads. Pairs will be profiled as separate files." if (params.shortread_clipmerge_mergepairs && params.run_malt ) log.warn "[nf-core/taxprofiler] warning: MALT does not accept uncollapsed paired-reads. Pairs will be profiled as separate files."
if (params.shortread_clipmerge_excludeunmerged && !params.shortread_clipmerge_mergepairs) exit 1, "[nf-core/taxprofiler] error: cannot include unmerged reads when merging not turned on. Please specify --shortread_clipmerge_mergepairs" if (params.shortread_clipmerge_excludeunmerged && !params.shortread_clipmerge_mergepairs) exit 1, "[nf-core/taxprofiler] error: cannot include unmerged reads when merging not turned on. Please specify --shortread_clipmerge_mergepairs"
if (params.perform_shortread_hostremoval && !params.shortread_hostremoval_reference) { exit 1, "[nf-core/taxprofiler] error: --shortread_hostremoval requested but no --shortread_hostremoval_reference FASTA supplied. Check input." }
if (!params.shortread_hostremoval_reference && params.shortread_hostremoval_reference_index) { exit 1, "[nf-core/taxprofiler] error: --shortread_hostremoval_index provided but no --shortread_hostremoval_reference FASTA supplied. Check input." }
if (params.shortread_hostremoval_reference ) { ch_reference = file(params.shortread_hostremoval_reference) }
if (params.shortread_hostremoval_index ) { ch_reference_index = file(params.shortread_hostremoval_index ) } else { ch_reference_index = [] }
/* /*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CONFIG FILES CONFIG FILES
@ -43,6 +51,7 @@ include { INPUT_CHECK } from '../subworkflows/local/input_check'
include { DB_CHECK } from '../subworkflows/local/db_check' include { DB_CHECK } from '../subworkflows/local/db_check'
include { SHORTREAD_PREPROCESSING } from '../subworkflows/local/shortread_preprocessing' include { SHORTREAD_PREPROCESSING } from '../subworkflows/local/shortread_preprocessing'
include { LONGREAD_PREPROCESSING } from '../subworkflows/local/longread_preprocessing' include { LONGREAD_PREPROCESSING } from '../subworkflows/local/longread_preprocessing'
include { SHORTREAD_HOSTREMOVAL } from '../subworkflows/local/shortread_hostremoval'
include { SHORTREAD_COMPLEXITYFILTERING } from '../subworkflows/local/shortread_complexityfiltering' include { SHORTREAD_COMPLEXITYFILTERING } from '../subworkflows/local/shortread_complexityfiltering'
include { PROFILING } from '../subworkflows/local/profiling' include { PROFILING } from '../subworkflows/local/profiling'
@ -111,6 +120,7 @@ workflow TAXPROFILER {
if ( params.perform_longread_clip ) { if ( params.perform_longread_clip ) {
ch_longreads_preprocessed = LONGREAD_PREPROCESSING ( INPUT_CHECK.out.nanopore ).reads ch_longreads_preprocessed = LONGREAD_PREPROCESSING ( INPUT_CHECK.out.nanopore ).reads
.map { it -> [ it[0], [it[1]] ] } .map { it -> [ it[0], [it[1]] ] }
ch_versions = ch_versions.mix(LONGREAD_PREPROCESSING.out.versions.first())
} else { } else {
ch_longreads_preprocessed = INPUT_CHECK.out.nanopore ch_longreads_preprocessed = INPUT_CHECK.out.nanopore
} }
@ -126,12 +136,19 @@ workflow TAXPROFILER {
} }
/* /*
STEP: Run merging SUBWORKFLOW: HOST REMOVAL
*/ */
if ( params.perform_shortread_hostremoval ) {
ch_shortreads_hostremoved = SHORTREAD_HOSTREMOVAL ( ch_shortreads_filtered, ch_reference, ch_reference_index ).reads
ch_versions = ch_versions.mix(SHORTREAD_HOSTREMOVAL.out.versions.first())
} else {
ch_shortreads_hostremoved = ch_shortreads_filtered
}
if ( params.perform_runmerging ) { if ( params.perform_runmerging ) {
ch_reads_for_cat_branch = ch_shortreads_filtered ch_reads_for_cat_branch = ch_shortreads_hostremoved
.mix( ch_longreads_preprocessed ) .mix( ch_longreads_preprocessed )
.map { .map {
meta, reads -> meta, reads ->
@ -160,7 +177,7 @@ workflow TAXPROFILER {
} }
} else { } else {
ch_reads_runmerged = ch_shortreads_filtered ch_reads_runmerged = ch_shortreads_hostremoved
.mix( ch_longreads_preprocessed ) .mix( ch_longreads_preprocessed )
} }
@ -205,6 +222,11 @@ workflow TAXPROFILER {
ch_versions = ch_versions.mix( SHORTREAD_COMPLEXITYFILTERING.out.versions ) ch_versions = ch_versions.mix( SHORTREAD_COMPLEXITYFILTERING.out.versions )
} }
if (params.perform_shortread_hostremoval) {
ch_multiqc_files = ch_multiqc_files.mix(SHORTREAD_HOSTREMOVAL.out.mqc.collect{it[1]}.ifEmpty([]))
ch_versions = ch_versions.mix(SHORTREAD_HOSTREMOVAL.out.versions)
}
if (params.perform_runmerging){ if (params.perform_runmerging){
ch_versions = ch_versions.mix(CAT_FASTQ.out.versions) ch_versions = ch_versions.mix(CAT_FASTQ.out.versions)
} }