mirror of
https://github.com/MillironX/nf-core_modules.git
synced 2024-11-10 20:23:10 +00:00
module: picard filtersamreads (#602)
* Start work filtersamreads * Refactored to allow optional input * Use proper readlist test data * Remove typo * Fix if else condition * Remove debugging code * Fix container URLs * Add required input specification meta * Cleanup * Apply suggestions from code review Co-authored-by: Harshil Patel <drpatelh@users.noreply.github.com> * Fix suffixing * Additional formatting tweaks * Update modules/picard/filtersamreads/main.nf Co-authored-by: Harshil Patel <drpatelh@users.noreply.github.com> * Update modules/picard/filtersamreads/meta.yml Co-authored-by: Harshil Patel <drpatelh@users.noreply.github.com>
This commit is contained in:
parent
2d26b037a1
commit
3cc43838e1
7 changed files with 236 additions and 1 deletions
68
modules/picard/filtersamreads/functions.nf
Normal file
68
modules/picard/filtersamreads/functions.nf
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
if (!args.filename.endsWith('.version.txt')) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
65
modules/picard/filtersamreads/main.nf
Normal file
65
modules/picard/filtersamreads/main.nf
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process PICARD_FILTERSAMREADS {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::picard=2.25.6" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/picard:2.25.6--hdfd78af_0"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/picard:2.25.6--hdfd78af_0"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(bam)
|
||||||
|
val filter
|
||||||
|
path readlist
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.bam"), emit: bam
|
||||||
|
path "*.version.txt" , emit: version
|
||||||
|
|
||||||
|
script:
|
||||||
|
def software = getSoftwareName(task.process)
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def avail_mem = 3
|
||||||
|
if (!task.memory) {
|
||||||
|
log.info '[Picard FilterSamReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.'
|
||||||
|
} else {
|
||||||
|
avail_mem = task.memory.giga
|
||||||
|
}
|
||||||
|
if ( filter == 'includeAligned' || filter == 'excludeAligned' ) {
|
||||||
|
"""
|
||||||
|
picard \\
|
||||||
|
FilterSamReads \\
|
||||||
|
-Xmx${avail_mem}g \\
|
||||||
|
--INPUT $bam \\
|
||||||
|
--OUTPUT ${prefix}.bam \\
|
||||||
|
--FILTER $filter \\
|
||||||
|
$options.args
|
||||||
|
|
||||||
|
echo \$(picard FilterSamReads --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt
|
||||||
|
"""
|
||||||
|
} else if ( filter == 'includeReadList' || filter == 'excludeReadList' ) {
|
||||||
|
"""
|
||||||
|
picard \\
|
||||||
|
FilterSamReads \\
|
||||||
|
-Xmx${avail_mem}g \\
|
||||||
|
--INPUT $bam \\
|
||||||
|
--OUTPUT ${prefix}.bam \\
|
||||||
|
--FILTER $filter \\
|
||||||
|
--READ_LIST_FILE $readlist \\
|
||||||
|
$options.args
|
||||||
|
|
||||||
|
echo \$(picard FilterSamReads --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d: > ${software}.version.txt
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
}
|
51
modules/picard/filtersamreads/meta.yml
Normal file
51
modules/picard/filtersamreads/meta.yml
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
name: picard_filtersamreads
|
||||||
|
description: Filters SAM/BAM files to include/exclude either aligned/unaligned reads or based on a read list
|
||||||
|
keywords:
|
||||||
|
- bam
|
||||||
|
- filter
|
||||||
|
tools:
|
||||||
|
- picard:
|
||||||
|
description: |
|
||||||
|
A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS)
|
||||||
|
data and formats such as SAM/BAM/CRAM and VCF.
|
||||||
|
homepage: https://broadinstitute.github.io/picard/
|
||||||
|
documentation: https://broadinstitute.github.io/picard/
|
||||||
|
tool_dev_url: https://github.com/broadinstitute/picard
|
||||||
|
doi: ""
|
||||||
|
licence: ['MIT']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- bam:
|
||||||
|
type: file
|
||||||
|
description: List of BAM files. If filtering without read list must be sorted by queryname with picard sortsam
|
||||||
|
pattern: "*.{bam}"
|
||||||
|
- filter:
|
||||||
|
type: value
|
||||||
|
description: Picard filter type
|
||||||
|
pattern: "includeAligned|excludeAligned|includeReadList|excludeReadList"
|
||||||
|
- readlist:
|
||||||
|
type: file
|
||||||
|
description: Optional text file containing reads IDs to include or exclude
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- bam:
|
||||||
|
type: file
|
||||||
|
description: Filtered BAM file
|
||||||
|
pattern: "*.{bam}"
|
||||||
|
- version:
|
||||||
|
type: file
|
||||||
|
description: File containing software version
|
||||||
|
pattern: "*.{version.txt}"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@jfy133"
|
|
@ -559,6 +559,10 @@ picard/collectwgsmetrics:
|
||||||
- modules/picard/collectwgsmetrics/**
|
- modules/picard/collectwgsmetrics/**
|
||||||
- tests/modules/picard/collectwgsmetrics/**
|
- tests/modules/picard/collectwgsmetrics/**
|
||||||
|
|
||||||
|
picard/filtersamreads:
|
||||||
|
- modules/picard/filtersamreads/**
|
||||||
|
- tests/modules/picard/filtersamreads/**
|
||||||
|
|
||||||
picard/markduplicates:
|
picard/markduplicates:
|
||||||
- modules/picard/markduplicates/**
|
- modules/picard/markduplicates/**
|
||||||
- tests/modules/picard/markduplicates/**
|
- tests/modules/picard/markduplicates/**
|
||||||
|
|
|
@ -74,6 +74,8 @@ params {
|
||||||
scaffolds_fasta = "${test_data_dir}/genomics/sarscov2/illumina/fasta/scaffolds.fasta"
|
scaffolds_fasta = "${test_data_dir}/genomics/sarscov2/illumina/fasta/scaffolds.fasta"
|
||||||
|
|
||||||
assembly_gfa = "${test_data_dir}/genomics/sarscov2/illumina/gfa/assembly.gfa"
|
assembly_gfa = "${test_data_dir}/genomics/sarscov2/illumina/gfa/assembly.gfa"
|
||||||
|
|
||||||
|
test_single_end_bam_readlist_txt = "${test_data_dir}/genomics/sarscov2/illumina/picard/test.single_end.bam.readlist.txt"
|
||||||
}
|
}
|
||||||
'nanopore' {
|
'nanopore' {
|
||||||
test_sorted_bam = "${test_data_dir}/genomics/sarscov2/nanopore/bam/test.sorted.bam"
|
test_sorted_bam = "${test_data_dir}/genomics/sarscov2/nanopore/bam/test.sorted.bam"
|
||||||
|
|
27
tests/modules/picard/filtersamreads/main.nf
Normal file
27
tests/modules/picard/filtersamreads/main.nf
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { PICARD_SORTSAM } from '../../../../modules/picard/sortsam/main.nf' addParams( options: [suffix:'.sorted'] )
|
||||||
|
include { PICARD_FILTERSAMREADS } from '../../../../modules/picard/filtersamreads/main.nf' addParams( options: [suffix:'.filtered'] )
|
||||||
|
|
||||||
|
workflow test_picard_filtersamreads {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) ]
|
||||||
|
sort_order = 'queryname'
|
||||||
|
filter = 'includeAligned'
|
||||||
|
|
||||||
|
PICARD_SORTSAM ( input, sort_order )
|
||||||
|
PICARD_FILTERSAMREADS ( PICARD_SORTSAM.out.bam, filter, [] )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_picard_filtersamreads_readlist {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) ]
|
||||||
|
filter = 'includeReadList'
|
||||||
|
readlist = file(params.test_data['sarscov2']['illumina']['test_single_end_bam_readlist_txt'], checkIfExists: true)
|
||||||
|
|
||||||
|
PICARD_FILTERSAMREADS ( input, filter, readlist )
|
||||||
|
}
|
18
tests/modules/picard/filtersamreads/test.yml
Normal file
18
tests/modules/picard/filtersamreads/test.yml
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
- name: picard filtersamreads
|
||||||
|
command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- picard
|
||||||
|
- picard/filtersamreads
|
||||||
|
files:
|
||||||
|
- path: output/picard/test.filtered.bam
|
||||||
|
md5sum: b44a6ca04811a9470c7813c3c9465fd5
|
||||||
|
|
||||||
|
|
||||||
|
- name: picard filtersamreads_readlist
|
||||||
|
command: nextflow run ./tests/modules/picard/filtersamreads -entry test_picard_filtersamreads_readlist -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- picard
|
||||||
|
- picard/filtersamreads
|
||||||
|
files:
|
||||||
|
- path: output/picard/test.filtered.bam
|
||||||
|
md5sum: 1e86b738b56f2c2b09f4cab52baf05c7
|
Loading…
Reference in a new issue