mirror of
https://github.com/MillironX/nf-core_modules.git
synced 2024-12-22 02:58:17 +00:00
New module: Ultra
(#871)
* 👌 IMPROVE: Update .gitignore * 📦 Add ultra module * 👌 IMPROVE: Update test input * 👌 IMPROVE: Update and clean code - Update to last versions.yml file - Update meta.yml - Correct typos * 👌 IMPROVE: Update output channels + Rename following subtool * 👌 IMPROVE: Remove old ultre files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 👌 IMPROVE: Remove old ultra files * 👌 IMPROVE: Update of pytest_modules.yml * 👌 IMPROVE: Update test.yml * 👌 IMPROVE: Keep md5sum as much as possible * 🐛 Fix: add unsaved modifications * 🐛 FIX: Remove one inconstant md5sum * 🐛 FIX: Grab software name using ${getSoftwareName(task.process)} * 🐛 FIX: Remove md5sums for pickle files (not constant). * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel <drpatelh@users.noreply.github.com> * Update modules/ultra/pipeline/main.nf Co-authored-by: Harshil Patel <drpatelh@users.noreply.github.com> * 👌 IMPROVE: update output directory, update meta.yml * 👌 IMPROVE: Use modules to gunzip and sort gtf * 🐛 FIX: Set up channel correctly * 👌 IMPROVE: Remove pickles files and databases Those data might be useful in a debugging purpose. * Apply suggestions from code review * Update main.nf Co-authored-by: Harshil Patel <drpatelh@users.noreply.github.com>
This commit is contained in:
parent
0754d49db8
commit
4a9bfec61d
6 changed files with 217 additions and 0 deletions
78
modules/ultra/pipeline/functions.nf
Normal file
78
modules/ultra/pipeline/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
|||
//
|
||||
// Utility functions used in nf-core DSL2 module files
|
||||
//
|
||||
|
||||
//
|
||||
// Extract name of software tool from process name using $task.process
|
||||
//
|
||||
def getSoftwareName(task_process) {
|
||||
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||
}
|
||||
|
||||
//
|
||||
// Extract name of module from process name using $task.process
|
||||
//
|
||||
def getProcessName(task_process) {
|
||||
return task_process.tokenize(':')[-1]
|
||||
}
|
||||
|
||||
//
|
||||
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||
//
|
||||
def initOptions(Map args) {
|
||||
def Map options = [:]
|
||||
options.args = args.args ?: ''
|
||||
options.args2 = args.args2 ?: ''
|
||||
options.args3 = args.args3 ?: ''
|
||||
options.publish_by_meta = args.publish_by_meta ?: []
|
||||
options.publish_dir = args.publish_dir ?: ''
|
||||
options.publish_files = args.publish_files
|
||||
options.suffix = args.suffix ?: ''
|
||||
return options
|
||||
}
|
||||
|
||||
//
|
||||
// Tidy up and join elements of a list to return a path string
|
||||
//
|
||||
def getPathFromList(path_list) {
|
||||
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||
return paths.join('/')
|
||||
}
|
||||
|
||||
//
|
||||
// Function to save/publish module results
|
||||
//
|
||||
def saveFiles(Map args) {
|
||||
def ioptions = initOptions(args.options)
|
||||
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||
|
||||
// Do not publish versions.yml unless running from pytest workflow
|
||||
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||
return null
|
||||
}
|
||||
if (ioptions.publish_by_meta) {
|
||||
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||
for (key in key_list) {
|
||||
if (args.meta && key instanceof String) {
|
||||
def path = key
|
||||
if (args.meta.containsKey(key)) {
|
||||
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||
}
|
||||
path = path instanceof String ? path : ''
|
||||
path_list.add(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ioptions.publish_files instanceof Map) {
|
||||
for (ext in ioptions.publish_files) {
|
||||
if (args.filename.endsWith(ext.key)) {
|
||||
def ext_list = path_list.collect()
|
||||
ext_list.add(ext.value)
|
||||
return "${getPathFromList(ext_list)}/$args.filename"
|
||||
}
|
||||
}
|
||||
} else if (ioptions.publish_files == null) {
|
||||
return "${getPathFromList(path_list)}/$args.filename"
|
||||
}
|
||||
}
|
48
modules/ultra/pipeline/main.nf
Normal file
48
modules/ultra/pipeline/main.nf
Normal file
|
@ -0,0 +1,48 @@
|
|||
// Import generic module functions
|
||||
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||
|
||||
params.options = [:]
|
||||
options = initOptions(params.options)
|
||||
|
||||
process ULTRA_PIPELINE {
|
||||
tag "$meta.id"
|
||||
label 'process_high'
|
||||
publishDir "${params.outdir}",
|
||||
mode: params.publish_dir_mode,
|
||||
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||
|
||||
conda (params.enable_conda ? "bioconda::ultra_bioinformatics=0.0.4" : null)
|
||||
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||
container "https://depot.galaxyproject.org/singularity/ultra_bioinformatics:0.0.4--pyh5e36f6f_1"
|
||||
} else {
|
||||
container "quay.io/biocontainers/ultra_bioinformatics:0.0.4--pyh5e36f6f_1"
|
||||
}
|
||||
|
||||
input:
|
||||
tuple val(meta), path(reads)
|
||||
path genome
|
||||
path gtf
|
||||
|
||||
output:
|
||||
tuple val(meta), path("*.sam"), emit: sam
|
||||
path "versions.yml" , emit: versions
|
||||
|
||||
script:
|
||||
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||
"""
|
||||
uLTRA \\
|
||||
pipeline \\
|
||||
--t $task.cpus \\
|
||||
--prefix $prefix \\
|
||||
$options.args \\
|
||||
\$(pwd)/$genome \\
|
||||
\$(pwd)/$gtf \\
|
||||
\$(pwd)/$reads \\
|
||||
./
|
||||
|
||||
cat <<-END_VERSIONS > versions.yml
|
||||
${getProcessName(task.process)}:
|
||||
${getSoftwareName(task.process)}: \$( uLTRA --version|sed 's/uLTRA //g' )
|
||||
END_VERSIONS
|
||||
"""
|
||||
}
|
50
modules/ultra/pipeline/meta.yml
Normal file
50
modules/ultra/pipeline/meta.yml
Normal file
|
@ -0,0 +1,50 @@
|
|||
name: ultra_pipeline
|
||||
description: uLTRA aligner - A wrapper around minimap2 to improve small exon detection
|
||||
keywords:
|
||||
- uLTRA
|
||||
- minimap2
|
||||
tools:
|
||||
- ultra:
|
||||
description: Splice aligner of long transcriptomic reads to genome.
|
||||
homepage: https://github.com/ksahlin/uLTRA
|
||||
documentation: https://github.com/ksahlin/uLTRA
|
||||
tool_dev_url: https://github.com/ksahlin/uLTRA
|
||||
doi: "10.1093/bioinformatics/btab540"
|
||||
licence: ['GNU GPLV3']
|
||||
|
||||
input:
|
||||
- meta:
|
||||
type: map
|
||||
description: |
|
||||
Groovy Map containing sample information
|
||||
e.g. [ id:'test', single_end:false ]
|
||||
- genome:
|
||||
type: file
|
||||
description: fasta file of reference genome
|
||||
pattern: "*.fasta"
|
||||
- gtf:
|
||||
type: file
|
||||
description: A annotation of use the genome
|
||||
pattern: "*.gtf"
|
||||
- reads:
|
||||
type: file
|
||||
description: A fasta or fastq file of reads to align
|
||||
pattern: "*.{fasta,fastq}"
|
||||
|
||||
output:
|
||||
- meta:
|
||||
type: map
|
||||
description: |
|
||||
Groovy Map containing sample information
|
||||
e.g. [ id:'test', single_end:false ]
|
||||
- sam:
|
||||
type: file
|
||||
description: The aligned reads in sam format
|
||||
pattern: "*.sam"
|
||||
- versions:
|
||||
type: file
|
||||
description: File containing software versions
|
||||
pattern: "versions.yml"
|
||||
|
||||
authors:
|
||||
- "@sguizard"
|
|
@ -1311,6 +1311,10 @@ ucsc/wigtobigwig:
|
|||
- modules/ucsc/wigtobigwig/**
|
||||
- tests/modules/ucsc/wigtobigwig/**
|
||||
|
||||
ultra/pipeline:
|
||||
- modules/ultra/pipeline/**
|
||||
- tests/modules/ultra/pipeline/**
|
||||
|
||||
unicycler:
|
||||
- modules/unicycler/**
|
||||
- tests/modules/unicycler/**
|
||||
|
|
25
tests/modules/ultra/pipeline/main.nf
Normal file
25
tests/modules/ultra/pipeline/main.nf
Normal file
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env nextflow
|
||||
|
||||
nextflow.enable.dsl = 2
|
||||
|
||||
include { ULTRA_PIPELINE } from '../../../../modules/ultra/pipeline/main.nf' addParams( options: [:] )
|
||||
include { GUNZIP } from '../../../../modules/gunzip/main.nf' addParams( options: [:] )
|
||||
include { GFFREAD } from '../../../../modules/gffread/main.nf' addParams( options: [args: "--sort-alpha --keep-genes -T", suffix: "_sorted"] )
|
||||
|
||||
workflow test_ultra_pipeline {
|
||||
|
||||
fastq = file(params.test_data['homo_sapiens']['pacbio']['hifi'] , checkIfExists: true)
|
||||
gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'] , checkIfExists: true)
|
||||
genome = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||
|
||||
GUNZIP ( fastq )
|
||||
GFFREAD ( gtf )
|
||||
|
||||
GUNZIP
|
||||
.out
|
||||
.gunzip
|
||||
.map { [ [ id:'test', single_end:false ], it ] }
|
||||
.set { input }
|
||||
|
||||
ULTRA_PIPELINE ( input, genome, GFFREAD.out.gtf )
|
||||
}
|
12
tests/modules/ultra/pipeline/test.yml
Normal file
12
tests/modules/ultra/pipeline/test.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
- name: ultra pipeline test_ultra_pipeline
|
||||
command: nextflow run tests/modules/ultra/pipeline -entry test_ultra_pipeline -c tests/config/nextflow.config
|
||||
tags:
|
||||
- ultra/pipeline
|
||||
- ultra
|
||||
files:
|
||||
- path: output/gffread/genome_sorted.gtf
|
||||
md5sum: c0b034860c679a354cd093109ed90437
|
||||
- path: output/gunzip/test_hifi.fastq
|
||||
md5sum: 20e41c569d5828c1e87337e13a5185d3
|
||||
- path: output/ultra/test.sam
|
||||
md5sum: a37a1f9594a3099522dc1f6a903b2b12
|
Loading…
Reference in a new issue