mirror of
https://github.com/MillironX/nf-core_modules.git
synced 2024-11-13 13:23:09 +00:00
Merge remote-tracking branch 'nf-core/master'
This commit is contained in:
commit
de862972ab
88 changed files with 3404 additions and 45 deletions
78
modules/assemblyscan/functions.nf
Normal file
78
modules/assemblyscan/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
38
modules/assemblyscan/main.nf
Normal file
38
modules/assemblyscan/main.nf
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process ASSEMBLYSCAN {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::assembly-scan=0.4.1" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/assembly-scan:0.4.1--pyhdfd78af_0"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/assembly-scan:0.4.1--pyhdfd78af_0"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(assembly)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.json"), emit: json
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
"""
|
||||||
|
assembly-scan $assembly > ${prefix}.json
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( assembly-scan --version 2>&1 | sed 's/^.*assembly-scan //; s/Using.*\$//' )
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
43
modules/assemblyscan/meta.yml
Normal file
43
modules/assemblyscan/meta.yml
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
name: assemblyscan
|
||||||
|
description: Assembly summary statistics in JSON format
|
||||||
|
keywords:
|
||||||
|
- assembly
|
||||||
|
- statistics
|
||||||
|
tools:
|
||||||
|
- assemblyscan:
|
||||||
|
description: Assembly summary statistics in JSON format
|
||||||
|
homepage: https://github.com/rpetit3/assembly-scan
|
||||||
|
documentation: https://github.com/rpetit3/assembly-scan
|
||||||
|
tool_dev_url: https://github.com/rpetit3/assembly-scan
|
||||||
|
doi: ""
|
||||||
|
licence: ['MIT']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- assembly:
|
||||||
|
type: file
|
||||||
|
description: FASTA file for a given assembly
|
||||||
|
pattern: "*.fasta"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- json:
|
||||||
|
type: file
|
||||||
|
description: Assembly statistics in JSON format
|
||||||
|
pattern: "*.json"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@sateeshperi"
|
||||||
|
- "@mjcipriano"
|
|
@ -19,7 +19,7 @@ process BEDTOOLS_GENOMECOV {
|
||||||
}
|
}
|
||||||
|
|
||||||
input:
|
input:
|
||||||
tuple val(meta), path(intervals)
|
tuple val(meta), path(intervals), val(scale)
|
||||||
path sizes
|
path sizes
|
||||||
val extension
|
val extension
|
||||||
|
|
||||||
|
@ -28,13 +28,21 @@ process BEDTOOLS_GENOMECOV {
|
||||||
path "versions.yml" , emit: versions
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
script:
|
script:
|
||||||
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def args_token = options.args.tokenize()
|
||||||
|
def args = options.args
|
||||||
|
args += (scale > 0 && scale != 1) ? " -scale $scale" : ""
|
||||||
|
|
||||||
|
if (!args_token.contains('-bg') && (scale > 0 && scale != 1)) {
|
||||||
|
args += " -bg"
|
||||||
|
}
|
||||||
|
|
||||||
if (intervals.name =~ /\.bam/) {
|
if (intervals.name =~ /\.bam/) {
|
||||||
"""
|
"""
|
||||||
bedtools \\
|
bedtools \\
|
||||||
genomecov \\
|
genomecov \\
|
||||||
-ibam $intervals \\
|
-ibam $intervals \\
|
||||||
$options.args \\
|
$args \\
|
||||||
> ${prefix}.${extension}
|
> ${prefix}.${extension}
|
||||||
|
|
||||||
cat <<-END_VERSIONS > versions.yml
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
@ -48,7 +56,7 @@ process BEDTOOLS_GENOMECOV {
|
||||||
genomecov \\
|
genomecov \\
|
||||||
-i $intervals \\
|
-i $intervals \\
|
||||||
-g $sizes \\
|
-g $sizes \\
|
||||||
$options.args \\
|
$args \\
|
||||||
> ${prefix}.${extension}
|
> ${prefix}.${extension}
|
||||||
|
|
||||||
cat <<-END_VERSIONS > versions.yml
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
|
|
@ -20,6 +20,9 @@ input:
|
||||||
type: file
|
type: file
|
||||||
description: BAM/BED/GFF/VCF
|
description: BAM/BED/GFF/VCF
|
||||||
pattern: "*.{bam|bed|gff|vcf}"
|
pattern: "*.{bam|bed|gff|vcf}"
|
||||||
|
- scale:
|
||||||
|
type: value
|
||||||
|
description: Number containing the scale factor for the output. Set to 1 to disable. Setting to a value other than 1 will also get the -bg bedgraph output format as this is required for this command switch
|
||||||
- sizes:
|
- sizes:
|
||||||
type: file
|
type: file
|
||||||
description: Tab-delimited table of chromosome names in the first column and chromosome sizes in the second column
|
description: Tab-delimited table of chromosome names in the first column and chromosome sizes in the second column
|
||||||
|
@ -45,3 +48,4 @@ authors:
|
||||||
- "@sruthipsuresh"
|
- "@sruthipsuresh"
|
||||||
- "@drpatelh"
|
- "@drpatelh"
|
||||||
- "@sidorov-si"
|
- "@sidorov-si"
|
||||||
|
- "@chris-cheshire"
|
||||||
|
|
21
modules/cellranger/Dockerfile
Normal file
21
modules/cellranger/Dockerfile
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
FROM continuumio/miniconda3:4.8.2
|
||||||
|
LABEL authors="Gisela Gabernet <gisela.gabernet@gmail.com>" \
|
||||||
|
description="Docker image containing Cell Ranger"
|
||||||
|
# Disclaimer: this container is not provided nor supported by 10x Genomics.
|
||||||
|
|
||||||
|
# Install procps and clean apt cache
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y procps \
|
||||||
|
&& apt-get clean -y && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy pre-downloaded cellranger file
|
||||||
|
ENV CELLRANGER_VER 6.0.2
|
||||||
|
COPY cellranger-$CELLRANGER_VER.tar.gz /opt/cellranger-$CELLRANGER_VER.tar.gz
|
||||||
|
|
||||||
|
# Install cellranger
|
||||||
|
RUN \
|
||||||
|
cd /opt && \
|
||||||
|
tar -xzvf cellranger-$CELLRANGER_VER.tar.gz && \
|
||||||
|
export PATH=/opt/cellranger-$CELLRANGER_VER:$PATH && \
|
||||||
|
ln -s /opt/cellranger-$CELLRANGER_VER/cellranger /usr/bin/cellranger && \
|
||||||
|
rm -rf /opt/cellranger-$CELLRANGER_VER.tar.gz
|
78
modules/cellranger/mkref/functions.nf
Normal file
78
modules/cellranger/mkref/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
40
modules/cellranger/mkref/main.nf
Normal file
40
modules/cellranger/mkref/main.nf
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process CELLRANGER_MKREF {
|
||||||
|
tag 'mkref'
|
||||||
|
label 'process_high'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:[:], publish_by_meta:[]) }
|
||||||
|
|
||||||
|
if (params.enable_conda) {
|
||||||
|
exit 1, "Conda environments cannot be used when using the Cell Ranger tool. Please use docker or singularity containers."
|
||||||
|
}
|
||||||
|
container "nfcore/cellranger:6.0.2"
|
||||||
|
|
||||||
|
input:
|
||||||
|
path fasta
|
||||||
|
path gtf
|
||||||
|
val(reference_name)
|
||||||
|
|
||||||
|
output:
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
path "${reference_name}", emit: reference
|
||||||
|
|
||||||
|
script:
|
||||||
|
"""
|
||||||
|
cellranger mkref \\
|
||||||
|
--genome=${reference_name} \\
|
||||||
|
--fasta=${fasta} \\
|
||||||
|
--genes=${gtf}
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$(echo \$( cellranger --version 2>&1) | sed 's/^.*[^0-9]\\([0-9]*\\.[0-9]*\\.[0-9]*\\).*\$/\\1/' )
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
39
modules/cellranger/mkref/meta.yml
Normal file
39
modules/cellranger/mkref/meta.yml
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
name: cellranger_mkref
|
||||||
|
description: Module to build the reference needed by the 10x Genomics Cell Ranger tool. Uses the cellranger mkref command.
|
||||||
|
keywords:
|
||||||
|
- reference
|
||||||
|
- mkref
|
||||||
|
- index
|
||||||
|
tools:
|
||||||
|
- cellranger:
|
||||||
|
description: Cell Ranger by 10x Genomics is a set of analysis pipelines that process Chromium single-cell data to align reads, generate feature-barcode matrices, perform clustering and other secondary analysis, and more.
|
||||||
|
homepage: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/what-is-cell-ranger
|
||||||
|
documentation: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov
|
||||||
|
tool_dev_url: https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/using/tutorial_ov
|
||||||
|
doi: ""
|
||||||
|
licence: 10x Genomics EULA
|
||||||
|
|
||||||
|
input:
|
||||||
|
- fasta:
|
||||||
|
type: file
|
||||||
|
description: fasta genome file
|
||||||
|
pattern: "*.{fasta,fa}"
|
||||||
|
- gtf:
|
||||||
|
type: file
|
||||||
|
description: gtf transcriptome file
|
||||||
|
pattern: "*.gtf"
|
||||||
|
- reference_name:
|
||||||
|
type: val
|
||||||
|
description: name to give the reference folder
|
||||||
|
pattern: str
|
||||||
|
|
||||||
|
output:
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software version
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- reference:
|
||||||
|
type: folder
|
||||||
|
description: Folder containing all the reference indices needed by Cell Ranger
|
||||||
|
authors:
|
||||||
|
- "@ggabernet"
|
18
modules/cellranger/readme.md
Normal file
18
modules/cellranger/readme.md
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Updating the docker container and making a new module release
|
||||||
|
|
||||||
|
Cell Ranger is a commercial tool by 10X Genomics. The container provided for the cellranger nf-core module is not provided nor supported by 10x Genomics. Updating the Cell Ranger version in the container and pushing the update to Dockerhub needs to be done manually.
|
||||||
|
|
||||||
|
1. Navigate to the [Cell Ranger download page](https://support.10xgenomics.com/single-cell-gene-expression/software/downloads/latest) and download the tar ball of the desired Cell Ranger version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies.
|
||||||
|
|
||||||
|
2. Edit the Dockerfile: update the Cell Ranger version in this line:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ENV CELLRANGER_VER <VERSION>
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Create the container:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker build . -t nfcore/cellranger:<VERSION>
|
||||||
|
docker push nfcore/cellranger:<VERSION>
|
||||||
|
```
|
78
modules/dedup/functions.nf
Normal file
78
modules/dedup/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
47
modules/dedup/main.nf
Normal file
47
modules/dedup/main.nf
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process DEDUP {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::dedup=0.12.8" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/dedup:0.12.8--hdfd78af_1"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/dedup:0.12.8--hdfd78af_1"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(bam)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*_rmdup.bam"), emit: bam // _rmdup is hardcoded output from dedup
|
||||||
|
tuple val(meta), path("*.json") , emit: json
|
||||||
|
tuple val(meta), path("*.hist") , emit: hist
|
||||||
|
tuple val(meta), path("*log") , emit: log
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
|
||||||
|
"""
|
||||||
|
dedup \\
|
||||||
|
-Xmx${task.memory.toGiga()}g \\
|
||||||
|
-i $bam \\
|
||||||
|
-o . \\
|
||||||
|
$options.args
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( echo \$(dedup --version 2>&1) | tail -n 1 | sed 's/.* v//')
|
||||||
|
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
60
modules/dedup/meta.yml
Normal file
60
modules/dedup/meta.yml
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
name: dedup
|
||||||
|
description: DeDup is a tool for read deduplication in paired-end read merging (e.g. for ancient DNA experiments).
|
||||||
|
keywords:
|
||||||
|
- dedup
|
||||||
|
- deduplication
|
||||||
|
- pcr duplicates
|
||||||
|
- ancient DNA
|
||||||
|
- paired-end
|
||||||
|
- bam
|
||||||
|
tools:
|
||||||
|
- dedup:
|
||||||
|
description: DeDup is a tool for read deduplication in paired-end read merging (e.g. for ancient DNA experiments).
|
||||||
|
homepage: https://github.com/apeltzer/DeDup
|
||||||
|
documentation: https://dedup.readthedocs.io/en/latest/
|
||||||
|
tool_dev_url: https://github.com/apeltzer/DeDup
|
||||||
|
doi: "10.1186/s13059-016-0918-z"
|
||||||
|
licence: ['GPL v3']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- bam:
|
||||||
|
type: file
|
||||||
|
description: BAM/SAM file
|
||||||
|
pattern: "*.{bam,sam}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- bam:
|
||||||
|
type: file
|
||||||
|
description: Deduplicated BAM file
|
||||||
|
pattern: "*_rmdup.bam"
|
||||||
|
- json:
|
||||||
|
type: file
|
||||||
|
description: JSON file for MultiQC
|
||||||
|
pattern: "*.json"
|
||||||
|
- hist:
|
||||||
|
type: file
|
||||||
|
description: Histogram data of amount of deduplication
|
||||||
|
pattern: "*.hist"
|
||||||
|
- log:
|
||||||
|
type: file
|
||||||
|
description: Dedup log information
|
||||||
|
pattern: "*log"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@jfy133"
|
78
modules/fgbio/fastqtobam/functions.nf
Normal file
78
modules/fgbio/fastqtobam/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
51
modules/fgbio/fastqtobam/main.nf
Normal file
51
modules/fgbio/fastqtobam/main.nf
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process FGBIO_FASTQTOBAM {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::fgbio=1.4.0" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/fgbio:1.4.0--hdfd78af_0"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/fgbio:1.4.0--hdfd78af_0"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(reads)
|
||||||
|
val(read_structure)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*_umi_converted.bam"), emit: umibam
|
||||||
|
path "versions.yml" , emit: version
|
||||||
|
|
||||||
|
script:
|
||||||
|
def software = getSoftwareName(task.process)
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
|
||||||
|
"""
|
||||||
|
mkdir tmpFolder
|
||||||
|
|
||||||
|
fgbio \\
|
||||||
|
--tmp-dir=${PWD}/tmpFolder \\
|
||||||
|
FastqToBam \\
|
||||||
|
-i $reads \\
|
||||||
|
-o "${prefix}_umi_converted.bam" \\
|
||||||
|
--read-structures $read_structure \\
|
||||||
|
--sample $meta.id \\
|
||||||
|
--library $meta.id \\
|
||||||
|
$options.args
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( echo \$(fgbio --version 2>&1 | tr -d '[:cntrl:]' ) | sed -e 's/^.*Version: //;s/\\[.*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
47
modules/fgbio/fastqtobam/meta.yml
Normal file
47
modules/fgbio/fastqtobam/meta.yml
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
name: fgbio_fastqtobam
|
||||||
|
description: |
|
||||||
|
Using the FGBIO tools, converts FASTQ files sequenced with UMIs into BAM files, moving the UMI barcode into the RX field of the BAM file
|
||||||
|
keywords:
|
||||||
|
- fastqtobam
|
||||||
|
- fgbio
|
||||||
|
tools:
|
||||||
|
- fgbio:
|
||||||
|
description: A set of tools for working with genomic and high throughput sequencing data, including UMIs
|
||||||
|
homepage: http://fulcrumgenomics.github.io/fgbio/
|
||||||
|
documentation: http://fulcrumgenomics.github.io/fgbio/tools/latest/
|
||||||
|
tool_dev_url: https://github.com/fulcrumgenomics/fgbio
|
||||||
|
doi: ""
|
||||||
|
licence: ['MIT']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: pair of reads to be converted into BAM file
|
||||||
|
pattern: "*.{fastq.gz}"
|
||||||
|
|
||||||
|
- read_structure:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
A read structure should always be provided for each of the fastq files.
|
||||||
|
If single end, the string will contain only one structure (i.e. "2M11S+T"), if paired-end the string
|
||||||
|
will contain two structures separated by a blank space (i.e. "2M11S+T 2M11S+T").
|
||||||
|
If the read does not contain any UMI, the structure will be +T (i.e. only template of any length).
|
||||||
|
https://github.com/fulcrumgenomics/fgbio/wiki/Read-Structures
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- version:
|
||||||
|
type: file
|
||||||
|
description: File containing software version
|
||||||
|
pattern: "*.{version.yml}"
|
||||||
|
- umibam:
|
||||||
|
type: file
|
||||||
|
description: Converted, unsorted BAM file with RX tag reporting UMI sequence (if any)
|
||||||
|
pattern: "*.{bam}"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@lescai"
|
78
modules/filtlong/functions.nf
Normal file
78
modules/filtlong/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
43
modules/filtlong/main.nf
Normal file
43
modules/filtlong/main.nf
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process FILTLONG {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::filtlong=0.2.1" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/filtlong:0.2.1--h9a82719_0"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/filtlong:0.2.1--h9a82719_0"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(shortreads), path(longreads)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("${meta.id}_lr_filtlong.fastq.gz"), emit: reads
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def short_reads = meta.single_end ? "-1 $shortreads" : "-1 ${shortreads[0]} -2 ${shortreads[1]}"
|
||||||
|
"""
|
||||||
|
filtlong \\
|
||||||
|
$short_reads \\
|
||||||
|
$options.args \\
|
||||||
|
$longreads \\
|
||||||
|
| gzip -n > ${prefix}_lr_filtlong.fastq.gz
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( filtlong --version | sed -e "s/Filtlong v//g" )
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
50
modules/filtlong/meta.yml
Normal file
50
modules/filtlong/meta.yml
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
name: filtlong
|
||||||
|
description: Filtlong filters long reads based on quality measures or short read data.
|
||||||
|
keywords:
|
||||||
|
- nanopore
|
||||||
|
- quality control
|
||||||
|
- QC
|
||||||
|
- filtering
|
||||||
|
- long reads
|
||||||
|
- short reads
|
||||||
|
tools:
|
||||||
|
- filtlong:
|
||||||
|
description: Filtlong is a tool for filtering long reads. It can take a set of long reads and produce a smaller, better subset. It uses both read length (longer is better) and read identity (higher is better) when choosing which reads pass the filter.
|
||||||
|
homepage: https://anaconda.org/bioconda/filtlong
|
||||||
|
documentation: None
|
||||||
|
tool_dev_url: https://github.com/rrwick/Filtlong
|
||||||
|
doi: ""
|
||||||
|
licence: ['GPL v3']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- shortreads:
|
||||||
|
type: file
|
||||||
|
description: fastq file
|
||||||
|
pattern: "*.{fq,fastq,fq.gz,fastq.gz}"
|
||||||
|
- longreads:
|
||||||
|
type: file
|
||||||
|
description: fastq file
|
||||||
|
pattern: "*.{fq,fastq,fq.gz,fastq.gz}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: Filtered (compressed) fastq file
|
||||||
|
pattern: "*.fastq.gz"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@d4straub"
|
78
modules/gatk4/estimatelibrarycomplexity/functions.nf
Normal file
78
modules/gatk4/estimatelibrarycomplexity/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
54
modules/gatk4/estimatelibrarycomplexity/main.nf
Normal file
54
modules/gatk4/estimatelibrarycomplexity/main.nf
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process GATK4_ESTIMATELIBRARYCOMPLEXITY {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_medium'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::gatk4=4.2.2.0" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/gatk4:4.2.2.0--hdfd78af_1"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/gatk4:4.2.2.0--hdfd78af_1"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(cram)
|
||||||
|
path(fasta)
|
||||||
|
path(fai)
|
||||||
|
path(dict)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path('*.metrics'), emit: metrics
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ")
|
||||||
|
|
||||||
|
def avail_mem = 3
|
||||||
|
if (!task.memory) {
|
||||||
|
log.info '[GATK EstimateLibraryComplexity] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.'
|
||||||
|
} else {
|
||||||
|
avail_mem = task.memory.giga
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
gatk EstimateLibraryComplexity \
|
||||||
|
${crams} \
|
||||||
|
-O ${prefix}.metrics \
|
||||||
|
--REFERENCE_SEQUENCE ${fasta} \
|
||||||
|
--VALIDATION_STRINGENCY SILENT \
|
||||||
|
--TMP_DIR . $options.args
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
56
modules/gatk4/estimatelibrarycomplexity/meta.yml
Normal file
56
modules/gatk4/estimatelibrarycomplexity/meta.yml
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
name: gatk4_estimatelibrarycomplexity
|
||||||
|
description: Estimates the numbers of unique molecules in a sequencing library.
|
||||||
|
keywords:
|
||||||
|
- gatk4
|
||||||
|
- gatk4_estimatelibrarycomplexity
|
||||||
|
- duplication_metrics
|
||||||
|
- reporting
|
||||||
|
tools:
|
||||||
|
- gatk4:
|
||||||
|
description: Genome Analysis Toolkit (GATK4)
|
||||||
|
homepage: https://gatk.broadinstitute.org/hc/en-us
|
||||||
|
documentation: https://gatk.broadinstitute.org/hc/en-us
|
||||||
|
tool_dev_url: https://github.com/broadinstitute/gatk
|
||||||
|
doi: "10.1158/1538-7445.AM2017-3590"
|
||||||
|
licence: ['Apache-2.0']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- cram:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM file
|
||||||
|
pattern: "*.{bam,cram,sam}"
|
||||||
|
- fasta:
|
||||||
|
type: file
|
||||||
|
description: The reference fasta file
|
||||||
|
pattern: "*.fasta"
|
||||||
|
- fai:
|
||||||
|
type: file
|
||||||
|
description: Index of reference fasta file
|
||||||
|
pattern: "fasta.fai"
|
||||||
|
- dict:
|
||||||
|
type: file
|
||||||
|
description: GATK sequence dictionary
|
||||||
|
pattern: "*.dict"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- metrics:
|
||||||
|
type: file
|
||||||
|
description: File containing metrics on the input files
|
||||||
|
pattern: "*.{metrics}"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@FriederikeHanssen"
|
78
modules/manta/germline/functions.nf
Normal file
78
modules/manta/germline/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
68
modules/manta/germline/main.nf
Normal file
68
modules/manta/germline/main.nf
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process MANTA_GERMLINE {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_high'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::manta=1.6.0" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(cram), path(crai)
|
||||||
|
path fasta
|
||||||
|
path fai
|
||||||
|
path target_bed
|
||||||
|
path target_bed_tbi
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf
|
||||||
|
tuple val(meta), path("*candidate_small_indels.vcf.gz.tbi"), emit: candidate_small_indels_vcf_tbi
|
||||||
|
tuple val(meta), path("*candidate_sv.vcf.gz") , emit: candidate_sv_vcf
|
||||||
|
tuple val(meta), path("*candidate_sv.vcf.gz.tbi") , emit: candidate_sv_vcf_tbi
|
||||||
|
tuple val(meta), path("*diploid_sv.vcf.gz") , emit: diploid_sv_vcf
|
||||||
|
tuple val(meta), path("*diploid_sv.vcf.gz.tbi") , emit: diploid_sv_vcf_tbi
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def options_manta = target_bed ? "--exome --callRegions $target_bed" : ""
|
||||||
|
"""
|
||||||
|
configManta.py \
|
||||||
|
--bam $cram \
|
||||||
|
--reference $fasta \
|
||||||
|
$options_manta \
|
||||||
|
--runDir manta
|
||||||
|
|
||||||
|
python manta/runWorkflow.py -m local -j $task.cpus
|
||||||
|
|
||||||
|
mv manta/results/variants/candidateSmallIndels.vcf.gz \
|
||||||
|
${prefix}.candidate_small_indels.vcf.gz
|
||||||
|
mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \
|
||||||
|
${prefix}.candidate_small_indels.vcf.gz.tbi
|
||||||
|
mv manta/results/variants/candidateSV.vcf.gz \
|
||||||
|
${prefix}.candidate_sv.vcf.gz
|
||||||
|
mv manta/results/variants/candidateSV.vcf.gz.tbi \
|
||||||
|
${prefix}.candidate_sv.vcf.gz.tbi
|
||||||
|
mv manta/results/variants/diploidSV.vcf.gz \
|
||||||
|
${prefix}.diploid_sv.vcf.gz
|
||||||
|
mv manta/results/variants/diploidSV.vcf.gz.tbi \
|
||||||
|
${prefix}.diploid_sv.vcf.gz.tbi
|
||||||
|
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( configManta.py --version )
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
87
modules/manta/germline/meta.yml
Normal file
87
modules/manta/germline/meta.yml
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
name: manta_germline
|
||||||
|
description: Manta calls structural variants (SVs) and indels from mapped paired-end sequencing reads. It is optimized for analysis of germline variation in small sets of individuals and somatic variation in tumor/normal sample pairs.
|
||||||
|
keywords:
|
||||||
|
- somatic
|
||||||
|
- wgs
|
||||||
|
- wxs
|
||||||
|
- panel
|
||||||
|
- vcf
|
||||||
|
- structural variants
|
||||||
|
- small indels
|
||||||
|
tools:
|
||||||
|
- manta:
|
||||||
|
description: Structural variant and indel caller for mapped sequencing data
|
||||||
|
homepage: https://github.com/Illumina/manta
|
||||||
|
documentation: https://github.com/Illumina/manta/blob/v1.6.0/docs/userGuide/README.md
|
||||||
|
tool_dev_url: https://github.com/Illumina/manta
|
||||||
|
doi: "10.1093/bioinformatics/btv710"
|
||||||
|
licence: ['GPL v3']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- cram:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM file
|
||||||
|
pattern: "*.{bam,cram,sam}"
|
||||||
|
- crai:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM index file
|
||||||
|
pattern: "*.{bai,crai,sai}"
|
||||||
|
- fasta:
|
||||||
|
type: file
|
||||||
|
description: Genome reference FASTA file
|
||||||
|
pattern: "*.{fa,fasta}"
|
||||||
|
- fai:
|
||||||
|
type: file
|
||||||
|
description: Genome reference FASTA index file
|
||||||
|
pattern: "*.{fa.fai,fasta.fai}"
|
||||||
|
- target_bed:
|
||||||
|
type: file
|
||||||
|
description: BED file containing target regions for variant calling
|
||||||
|
pattern: "*.{bed}"
|
||||||
|
- target_bed_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for BED file containing target regions for variant calling
|
||||||
|
pattern: "*.{bed.tbi}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- candidate_small_indels_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- candidate_small_indels_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- candidate_sv_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- candidate_sv_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- diploid_sv_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- diploid_sv_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@maxulysse"
|
78
modules/manta/somatic/functions.nf
Normal file
78
modules/manta/somatic/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
67
modules/manta/somatic/main.nf
Normal file
67
modules/manta/somatic/main.nf
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process MANTA_SOMATIC {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_high'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::manta=1.6.0" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor)
|
||||||
|
path fasta
|
||||||
|
path fai
|
||||||
|
path target_bed
|
||||||
|
path target_bed_tbi
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf
|
||||||
|
tuple val(meta), path("*.candidate_small_indels.vcf.gz.tbi") , emit: candidate_small_indels_vcf_tbi
|
||||||
|
tuple val(meta), path("*.candidate_sv.vcf.gz") , emit: candidate_sv_vcf
|
||||||
|
tuple val(meta), path("*.candidate_sv.vcf.gz.tbi") , emit: candidate_sv_vcf_tbi
|
||||||
|
tuple val(meta), path("*.diploid_sv.vcf.gz") , emit: diploid_sv_vcf
|
||||||
|
tuple val(meta), path("*.diploid_sv.vcf.gz.tbi") , emit: diploid_sv_vcf_tbi
|
||||||
|
tuple val(meta), path("*.somatic_sv.vcf.gz") , emit: somatic_sv_vcf
|
||||||
|
tuple val(meta), path("*.somatic_sv.vcf.gz.tbi") , emit: somatic_sv_vcf_tbi
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def options_manta = target_bed ? "--exome --callRegions $target_bed" : ""
|
||||||
|
|
||||||
|
"""
|
||||||
|
configManta.py \
|
||||||
|
--tumorBam $cram_tumor \
|
||||||
|
--normalBam $cram_normal \
|
||||||
|
--reference $fasta \
|
||||||
|
$options_manta \
|
||||||
|
--runDir manta
|
||||||
|
|
||||||
|
python manta/runWorkflow.py -m local -j $task.cpus
|
||||||
|
|
||||||
|
mv manta/results/variants/candidateSmallIndels.vcf.gz ${prefix}.candidate_small_indels.vcf.gz
|
||||||
|
mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi ${prefix}.candidate_small_indels.vcf.gz.tbi
|
||||||
|
mv manta/results/variants/candidateSV.vcf.gz ${prefix}.candidate_sv.vcf.gz
|
||||||
|
mv manta/results/variants/candidateSV.vcf.gz.tbi ${prefix}.candidate_sv.vcf.gz.tbi
|
||||||
|
mv manta/results/variants/diploidSV.vcf.gz ${prefix}.diploid_sv.vcf.gz
|
||||||
|
mv manta/results/variants/diploidSV.vcf.gz.tbi ${prefix}.diploid_sv.vcf.gz.tbi
|
||||||
|
mv manta/results/variants/somaticSV.vcf.gz ${prefix}.somatic_sv.vcf.gz
|
||||||
|
mv manta/results/variants/somaticSV.vcf.gz.tbi ${prefix}.somatic_sv.vcf.gz.tbi
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( configManta.py --version )
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
103
modules/manta/somatic/meta.yml
Normal file
103
modules/manta/somatic/meta.yml
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
name: manta_somatic
|
||||||
|
description: Manta calls structural variants (SVs) and indels from mapped paired-end sequencing reads. It is optimized for analysis of germline variation in small sets of individuals and somatic variation in tumor/normal sample pairs.
|
||||||
|
keywords:
|
||||||
|
- somatic
|
||||||
|
- wgs
|
||||||
|
- wxs
|
||||||
|
- panel
|
||||||
|
- vcf
|
||||||
|
- structural variants
|
||||||
|
- small indels
|
||||||
|
tools:
|
||||||
|
- manta:
|
||||||
|
description: Structural variant and indel caller for mapped sequencing data
|
||||||
|
homepage: https://github.com/Illumina/manta
|
||||||
|
documentation: https://github.com/Illumina/manta/blob/v1.6.0/docs/userGuide/README.md
|
||||||
|
tool_dev_url: https://github.com/Illumina/manta
|
||||||
|
doi: "10.1093/bioinformatics/btv710"
|
||||||
|
licence: ['GPL v3']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- cram_normal:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM file
|
||||||
|
pattern: "*.{bam,cram,sam}"
|
||||||
|
- crai_normal:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM index file
|
||||||
|
pattern: "*.{bai,crai,sai}"
|
||||||
|
- cram_tumor:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM file
|
||||||
|
pattern: "*.{bam,cram,sam}"
|
||||||
|
- crai_tumor:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM index file
|
||||||
|
pattern: "*.{bai,crai,sai}"
|
||||||
|
- fasta:
|
||||||
|
type: file
|
||||||
|
description: Genome reference FASTA file
|
||||||
|
pattern: "*.{fa,fasta}"
|
||||||
|
- fai:
|
||||||
|
type: file
|
||||||
|
description: Genome reference FASTA index file
|
||||||
|
pattern: "*.{fa.fai,fasta.fai}"
|
||||||
|
- target_bed:
|
||||||
|
type: file
|
||||||
|
description: BED file containing target regions for variant calling
|
||||||
|
pattern: "*.{bed}"
|
||||||
|
- target_bed_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for BED file containing target regions for variant calling
|
||||||
|
pattern: "*.{bed.tbi}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- candidate_small_indels_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- candidate_small_indels_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- candidate_sv_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- candidate_sv_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- diploid_sv_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- diploid_sv_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- somatic_sv_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- somatic_sv_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@FriederikeHanssen"
|
78
modules/manta/tumoronly/functions.nf
Normal file
78
modules/manta/tumoronly/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
68
modules/manta/tumoronly/main.nf
Normal file
68
modules/manta/tumoronly/main.nf
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process MANTA_TUMORONLY {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_high'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::manta=1.6.0" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/manta:1.6.0--h9ee0642_1"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/manta:1.6.0--h9ee0642_1"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(cram), path(crai)
|
||||||
|
path fasta
|
||||||
|
path fai
|
||||||
|
path target_bed
|
||||||
|
path target_bed_tbi
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf
|
||||||
|
tuple val(meta), path("*candidate_small_indels.vcf.gz.tbi"), emit: candidate_small_indels_vcf_tbi
|
||||||
|
tuple val(meta), path("*candidate_sv.vcf.gz") , emit: candidate_sv_vcf
|
||||||
|
tuple val(meta), path("*candidate_sv.vcf.gz.tbi") , emit: candidate_sv_vcf_tbi
|
||||||
|
tuple val(meta), path("*tumor_sv.vcf.gz") , emit: tumor_sv_vcf
|
||||||
|
tuple val(meta), path("*tumor_sv.vcf.gz.tbi") , emit: tumor_sv_vcf_tbi
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def options_manta = target_bed ? "--exome --callRegions $target_bed" : ""
|
||||||
|
"""
|
||||||
|
configManta.py \
|
||||||
|
--tumorBam $cram \
|
||||||
|
--reference $fasta \
|
||||||
|
$options_manta \
|
||||||
|
--runDir manta
|
||||||
|
|
||||||
|
python manta/runWorkflow.py -m local -j $task.cpus
|
||||||
|
|
||||||
|
mv manta/results/variants/candidateSmallIndels.vcf.gz \
|
||||||
|
${prefix}.candidate_small_indels.vcf.gz
|
||||||
|
mv manta/results/variants/candidateSmallIndels.vcf.gz.tbi \
|
||||||
|
${prefix}.candidate_small_indels.vcf.gz.tbi
|
||||||
|
mv manta/results/variants/candidateSV.vcf.gz \
|
||||||
|
${prefix}.candidate_sv.vcf.gz
|
||||||
|
mv manta/results/variants/candidateSV.vcf.gz.tbi \
|
||||||
|
${prefix}.candidate_sv.vcf.gz.tbi
|
||||||
|
mv manta/results/variants/tumorSV.vcf.gz \
|
||||||
|
${prefix}.tumor_sv.vcf.gz
|
||||||
|
mv manta/results/variants/tumorSV.vcf.gz.tbi \
|
||||||
|
${prefix}.tumor_sv.vcf.gz.tbi
|
||||||
|
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( configManta.py --version )
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
88
modules/manta/tumoronly/meta.yml
Normal file
88
modules/manta/tumoronly/meta.yml
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
name: manta_tumoronly
|
||||||
|
description: Manta calls structural variants (SVs) and indels from mapped paired-end sequencing reads. It is optimized for analysis of germline variation in small sets of individuals and somatic variation in tumor/normal sample pairs.
|
||||||
|
keywords:
|
||||||
|
- somatic
|
||||||
|
- wgs
|
||||||
|
- wxs
|
||||||
|
- panel
|
||||||
|
- vcf
|
||||||
|
- structural variants
|
||||||
|
- small indels
|
||||||
|
tools:
|
||||||
|
- manta:
|
||||||
|
description: Structural variant and indel caller for mapped sequencing data
|
||||||
|
homepage: https://github.com/Illumina/manta
|
||||||
|
documentation: https://github.com/Illumina/manta/blob/v1.6.0/docs/userGuide/README.md
|
||||||
|
tool_dev_url: https://github.com/Illumina/manta
|
||||||
|
doi: "10.1093/bioinformatics/btv710"
|
||||||
|
licence: ['GPL v3']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- cram:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM file
|
||||||
|
pattern: "*.{bam,cram,sam}"
|
||||||
|
- crai:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM index file
|
||||||
|
pattern: "*.{bai,crai,sai}"
|
||||||
|
- fasta:
|
||||||
|
type: file
|
||||||
|
description: Genome reference FASTA file
|
||||||
|
pattern: "*.{fa,fasta}"
|
||||||
|
- fai:
|
||||||
|
type: file
|
||||||
|
description: Genome reference FASTA index file
|
||||||
|
pattern: "*.{fa.fai,fasta.fai}"
|
||||||
|
- target_bed:
|
||||||
|
type: file
|
||||||
|
description: BED file containing target regions for variant calling
|
||||||
|
pattern: "*.{bed}"
|
||||||
|
- target_bed_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for BED file containing target regions for variant calling
|
||||||
|
pattern: "*.{bed.tbi}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
|
||||||
|
- candidate_small_indels_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- candidate_small_indels_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- candidate_sv_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- candidate_sv_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- tumor_sv_vcf:
|
||||||
|
type: file
|
||||||
|
description: Gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- tumor_sv_vcf_tbi:
|
||||||
|
type: file
|
||||||
|
description: Index for gzipped VCF file containing variants
|
||||||
|
pattern: "*.{vcf.gz.tbi}"
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@maxulysse"
|
78
modules/paraclu/functions.nf
Normal file
78
modules/paraclu/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
45
modules/paraclu/main.nf
Normal file
45
modules/paraclu/main.nf
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process PARACLU {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::paraclu=10" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/paraclu%3A10--h9a82719_1"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/paraclu:10--h9a82719_1"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(bed)
|
||||||
|
val(min_cluster)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.bed"), emit: bed
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
def VERSION=10
|
||||||
|
"""
|
||||||
|
|
||||||
|
awk -F "\t" '{print\$1"\t"\$6"\t"\$2"\t"\$5}' < $bed > ${bed}_4P
|
||||||
|
sort -k1,1 -k3n ${bed}_4P > ${bed}_4Ps
|
||||||
|
paraclu $min_cluster ${bed}_4Ps > ${prefix}.clustered
|
||||||
|
paraclu-cut ${prefix}.clustered > ${prefix}.clustered.simplified
|
||||||
|
awk -F '\t' '{print \$1"\t"\$3"\t"\$4"\t"\$1":"\$3".."\$4","\$2"\t"\$6"\t"\$2}' ${prefix}.clustered.simplified > ${prefix}.clustered.simplified.bed
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: $VERSION
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
45
modules/paraclu/meta.yml
Normal file
45
modules/paraclu/meta.yml
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
name: paraclu
|
||||||
|
description: Paraclu finds clusters in data attached to sequences.
|
||||||
|
keywords:
|
||||||
|
- sort
|
||||||
|
tools:
|
||||||
|
- paraclu:
|
||||||
|
description: Paraclu finds clusters in data attached to sequences.
|
||||||
|
homepage: https://gitlab.com/mcfrith/paraclu
|
||||||
|
documentation: https://gitlab.com/mcfrith/paraclu
|
||||||
|
tool_dev_url: https://gitlab.com/mcfrith/paraclu
|
||||||
|
doi: ""
|
||||||
|
licence: ['GPL v3-or-later']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- bed:
|
||||||
|
type: file
|
||||||
|
description: BED file
|
||||||
|
pattern: "*.bed"
|
||||||
|
- min_cluster:
|
||||||
|
type: integer
|
||||||
|
description: Minimum size of cluster
|
||||||
|
pattern: "*.bed"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- bed:
|
||||||
|
type: file
|
||||||
|
description: clustered BED file
|
||||||
|
pattern: "*.bed"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@mashehu"
|
78
modules/porechop/functions.nf
Normal file
78
modules/porechop/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
42
modules/porechop/main.nf
Normal file
42
modules/porechop/main.nf
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process PORECHOP {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_medium'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::porechop=0.2.4" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/porechop:0.2.4--py39h7cff6ad_2"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/porechop:0.2.4--py38h8c62d01_2"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(reads)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.fastq.gz") , emit: reads
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
"""
|
||||||
|
porechop \\
|
||||||
|
-i ${reads} \\
|
||||||
|
-t ${task.cpus} \\
|
||||||
|
${options.args} \\
|
||||||
|
-o ${prefix}.fastq.gz
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$( porechop --version )
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
50
modules/porechop/meta.yml
Normal file
50
modules/porechop/meta.yml
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
name: porechop
|
||||||
|
description: Adapter removal and demultiplexing of Oxford Nanopore reads
|
||||||
|
keywords:
|
||||||
|
- adapter
|
||||||
|
- nanopore
|
||||||
|
- demultiplexing
|
||||||
|
tools:
|
||||||
|
- porechop:
|
||||||
|
description: Adapter removal and demultiplexing of Oxford Nanopore reads
|
||||||
|
homepage: "https://github.com/rrwick/Porechop"
|
||||||
|
documentation: "https://github.com/rrwick/Porechop"
|
||||||
|
tool_dev_url: "https://github.com/rrwick/Porechop"
|
||||||
|
doi: "10.1099/mgen.0.000132"
|
||||||
|
licence: ["GPL v3"]
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: fastq/fastq.gz file
|
||||||
|
pattern: "*.{fastq,fastq.gz,fq,fq.gz}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: Demultiplexed and/or adapter-trimmed fastq.gz file
|
||||||
|
pattern: "*.{fastq.gz}"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@ggabernet"
|
||||||
|
- "@jasmezz"
|
||||||
|
- "@d4straub"
|
||||||
|
- "@LaurenceKuhl"
|
||||||
|
- "@SusiJo"
|
||||||
|
- "@jonasscheid"
|
||||||
|
- "@jonoave"
|
||||||
|
- "@GokceOGUZ"
|
78
modules/samtools/bam2fq/functions.nf
Normal file
78
modules/samtools/bam2fq/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
64
modules/samtools/bam2fq/main.nf
Normal file
64
modules/samtools/bam2fq/main.nf
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process SAMTOOLS_BAM2FQ {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/samtools:1.14--hb421002_0"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(inputbam)
|
||||||
|
val(split)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.fq.gz"), emit: reads
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
|
||||||
|
if (split){
|
||||||
|
"""
|
||||||
|
samtools \\
|
||||||
|
bam2fq \\
|
||||||
|
$options.args \\
|
||||||
|
-@ $task.cpus \\
|
||||||
|
-1 ${prefix}_1.fq.gz \\
|
||||||
|
-2 ${prefix}_2.fq.gz \\
|
||||||
|
-0 ${prefix}_other.fq.gz \\
|
||||||
|
-s ${prefix}_singleton.fq.gz \\
|
||||||
|
$inputbam
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
} else {
|
||||||
|
"""
|
||||||
|
samtools \\
|
||||||
|
bam2fq \\
|
||||||
|
$options.args \\
|
||||||
|
-@ $task.cpus \\
|
||||||
|
$inputbam >${prefix}_interleaved.fq.gz
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
55
modules/samtools/bam2fq/meta.yml
Normal file
55
modules/samtools/bam2fq/meta.yml
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
name: samtools_bam2fq
|
||||||
|
description: |
|
||||||
|
The module uses bam2fq method from samtools to
|
||||||
|
convert a SAM, BAM or CRAM file to FASTQ format
|
||||||
|
keywords:
|
||||||
|
- bam2fq
|
||||||
|
- samtools
|
||||||
|
- fastq
|
||||||
|
tools:
|
||||||
|
- samtools:
|
||||||
|
description: Tools for dealing with SAM, BAM and CRAM files
|
||||||
|
homepage: None
|
||||||
|
documentation: http://www.htslib.org/doc/1.1/samtools.html
|
||||||
|
tool_dev_url: None
|
||||||
|
doi: ""
|
||||||
|
licence: ['MIT']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- inputbam:
|
||||||
|
type: file
|
||||||
|
description: BAM/CRAM/SAM file
|
||||||
|
pattern: "*.{bam,cram,sam}"
|
||||||
|
- split:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
TRUE/FALSE value to indicate if reads should be separated into
|
||||||
|
/1, /2 and if present other, or singleton.
|
||||||
|
Note: choosing TRUE will generate 4 different files.
|
||||||
|
Choosing FALSE will produce a single file, which will be interleaved in case
|
||||||
|
the input contains paired reads.
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: |
|
||||||
|
FASTQ files, which will be either a group of 4 files (read_1, read_2, other and singleton)
|
||||||
|
or a single interleaved .fq.gz file if the user chooses not to split the reads.
|
||||||
|
pattern: "*.fq.gz"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@lescai"
|
78
modules/samtools/depth/functions.nf
Normal file
78
modules/samtools/depth/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
43
modules/samtools/depth/main.nf
Normal file
43
modules/samtools/depth/main.nf
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process SAMTOOLS_DEPTH {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::samtools=1.14" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/samtools:1.14--hb421002_0"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(bam)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.tsv"), emit: tsv
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
|
||||||
|
"""
|
||||||
|
samtools \\
|
||||||
|
depth \\
|
||||||
|
$options.args \\
|
||||||
|
-o ${prefix}.tsv \\
|
||||||
|
$bam
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
44
modules/samtools/depth/meta.yml
Normal file
44
modules/samtools/depth/meta.yml
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
name: samtools_depth
|
||||||
|
description: Computes the depth at each position or region.
|
||||||
|
keywords:
|
||||||
|
- depth
|
||||||
|
- samtools
|
||||||
|
- statistics
|
||||||
|
- coverage
|
||||||
|
tools:
|
||||||
|
- samtools:
|
||||||
|
description: Tools for dealing with SAM, BAM and CRAM files; samtools depth – computes the read depth at each position or region
|
||||||
|
homepage: http://www.htslib.org
|
||||||
|
documentation: http://www.htslib.org/doc/samtools-depth.html
|
||||||
|
tool_dev_url: https://github.com/samtools/samtools
|
||||||
|
doi: "10.1093/bioinformatics/btp352"
|
||||||
|
licence: ['MIT']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- bam:
|
||||||
|
type: file
|
||||||
|
description: sorted BAM/CRAM/SAM file
|
||||||
|
pattern: "*.{bam,cram,sam}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- tsv:
|
||||||
|
type: file
|
||||||
|
description: The output of samtools depth has three columns - the name of the contig or chromosome, the position and the number of reads aligned at that position
|
||||||
|
pattern: "*.{tsv}"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@louperelo"
|
78
modules/seqtk/mergepe/functions.nf
Normal file
78
modules/seqtk/mergepe/functions.nf
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//
|
||||||
|
// Utility functions used in nf-core DSL2 module files
|
||||||
|
//
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of software tool from process name using $task.process
|
||||||
|
//
|
||||||
|
def getSoftwareName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Extract name of module from process name using $task.process
|
||||||
|
//
|
||||||
|
def getProcessName(task_process) {
|
||||||
|
return task_process.tokenize(':')[-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to initialise default values and to generate a Groovy Map of available options for nf-core modules
|
||||||
|
//
|
||||||
|
def initOptions(Map args) {
|
||||||
|
def Map options = [:]
|
||||||
|
options.args = args.args ?: ''
|
||||||
|
options.args2 = args.args2 ?: ''
|
||||||
|
options.args3 = args.args3 ?: ''
|
||||||
|
options.publish_by_meta = args.publish_by_meta ?: []
|
||||||
|
options.publish_dir = args.publish_dir ?: ''
|
||||||
|
options.publish_files = args.publish_files
|
||||||
|
options.suffix = args.suffix ?: ''
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Tidy up and join elements of a list to return a path string
|
||||||
|
//
|
||||||
|
def getPathFromList(path_list) {
|
||||||
|
def paths = path_list.findAll { item -> !item?.trim().isEmpty() } // Remove empty entries
|
||||||
|
paths = paths.collect { it.trim().replaceAll("^[/]+|[/]+\$", "") } // Trim whitespace and trailing slashes
|
||||||
|
return paths.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Function to save/publish module results
|
||||||
|
//
|
||||||
|
def saveFiles(Map args) {
|
||||||
|
def ioptions = initOptions(args.options)
|
||||||
|
def path_list = [ ioptions.publish_dir ?: args.publish_dir ]
|
||||||
|
|
||||||
|
// Do not publish versions.yml unless running from pytest workflow
|
||||||
|
if (args.filename.equals('versions.yml') && !System.getenv("NF_CORE_MODULES_TEST")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
if (ioptions.publish_by_meta) {
|
||||||
|
def key_list = ioptions.publish_by_meta instanceof List ? ioptions.publish_by_meta : args.publish_by_meta
|
||||||
|
for (key in key_list) {
|
||||||
|
if (args.meta && key instanceof String) {
|
||||||
|
def path = key
|
||||||
|
if (args.meta.containsKey(key)) {
|
||||||
|
path = args.meta[key] instanceof Boolean ? "${key}_${args.meta[key]}".toString() : args.meta[key]
|
||||||
|
}
|
||||||
|
path = path instanceof String ? path : ''
|
||||||
|
path_list.add(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ioptions.publish_files instanceof Map) {
|
||||||
|
for (ext in ioptions.publish_files) {
|
||||||
|
if (args.filename.endsWith(ext.key)) {
|
||||||
|
def ext_list = path_list.collect()
|
||||||
|
ext_list.add(ext.value)
|
||||||
|
return "${getPathFromList(ext_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (ioptions.publish_files == null) {
|
||||||
|
return "${getPathFromList(path_list)}/$args.filename"
|
||||||
|
}
|
||||||
|
}
|
53
modules/seqtk/mergepe/main.nf
Normal file
53
modules/seqtk/mergepe/main.nf
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
// Import generic module functions
|
||||||
|
include { initOptions; saveFiles; getSoftwareName; getProcessName } from './functions'
|
||||||
|
|
||||||
|
params.options = [:]
|
||||||
|
options = initOptions(params.options)
|
||||||
|
|
||||||
|
process SEQTK_MERGEPE {
|
||||||
|
tag "$meta.id"
|
||||||
|
label 'process_low'
|
||||||
|
publishDir "${params.outdir}",
|
||||||
|
mode: params.publish_dir_mode,
|
||||||
|
saveAs: { filename -> saveFiles(filename:filename, options:params.options, publish_dir:getSoftwareName(task.process), meta:meta, publish_by_meta:['id']) }
|
||||||
|
|
||||||
|
conda (params.enable_conda ? "bioconda::seqtk=1.3" : null)
|
||||||
|
if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) {
|
||||||
|
container "https://depot.galaxyproject.org/singularity/seqtk:1.3--h5bf99c6_3"
|
||||||
|
} else {
|
||||||
|
container "quay.io/biocontainers/seqtk:1.3--h5bf99c6_3"
|
||||||
|
}
|
||||||
|
|
||||||
|
input:
|
||||||
|
tuple val(meta), path(reads)
|
||||||
|
|
||||||
|
output:
|
||||||
|
tuple val(meta), path("*.fastq.gz"), emit: reads
|
||||||
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
|
script:
|
||||||
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
|
if (meta.single_end) {
|
||||||
|
"""
|
||||||
|
ln -s ${reads} ${prefix}.fastq.gz
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
} else {
|
||||||
|
"""
|
||||||
|
seqtk \\
|
||||||
|
mergepe \\
|
||||||
|
$options.args \\
|
||||||
|
${reads} \\
|
||||||
|
| gzip -n >> ${prefix}.fastq.gz
|
||||||
|
|
||||||
|
cat <<-END_VERSIONS > versions.yml
|
||||||
|
${getProcessName(task.process)}:
|
||||||
|
${getSoftwareName(task.process)}: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//')
|
||||||
|
END_VERSIONS
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
}
|
40
modules/seqtk/mergepe/meta.yml
Normal file
40
modules/seqtk/mergepe/meta.yml
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
name: seqtk_mergepe
|
||||||
|
description: Interleave pair-end reads from FastQ files
|
||||||
|
keywords:
|
||||||
|
- interleave
|
||||||
|
tools:
|
||||||
|
- seqtk:
|
||||||
|
description: Seqtk is a fast and lightweight tool for processing sequences in the FASTA or FASTQ format. Seqtk mergepe command merges pair-end reads into one interleaved file.
|
||||||
|
homepage: https://github.com/lh3/seqtk
|
||||||
|
documentation: https://docs.csc.fi/apps/seqtk/
|
||||||
|
tool_dev_url: https://github.com/lh3/seqtk
|
||||||
|
licence: ['MIT']
|
||||||
|
|
||||||
|
input:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: List of input FastQ files of size 1 and 2 for single-end and paired-end data,respectively.
|
||||||
|
pattern: "*.{fastq.gz}"
|
||||||
|
|
||||||
|
output:
|
||||||
|
- meta:
|
||||||
|
type: map
|
||||||
|
description: |
|
||||||
|
Groovy Map containing sample information
|
||||||
|
e.g. [ id:'test', single_end:false ]
|
||||||
|
- versions:
|
||||||
|
type: file
|
||||||
|
description: File containing software versions
|
||||||
|
pattern: "versions.yml"
|
||||||
|
- reads:
|
||||||
|
type: file
|
||||||
|
description: If single-end reads, the output is the same as the input, 1 FastQ file for each read. If pair-end reads, the read pairs will be interleaved and output as 1 FastQ file for each read pair.
|
||||||
|
pattern: "*.{fastq.gz}"
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- "@emnilsson"
|
|
@ -23,17 +23,19 @@ process STRELKA_GERMLINE {
|
||||||
path fasta
|
path fasta
|
||||||
path fai
|
path fai
|
||||||
path target_bed
|
path target_bed
|
||||||
|
path target_bed_tbi
|
||||||
|
|
||||||
|
|
||||||
output:
|
output:
|
||||||
tuple val(meta), path("*variants.vcf.gz") , emit: vcf
|
tuple val(meta), path("*variants.vcf.gz") , emit: vcf
|
||||||
tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi
|
tuple val(meta), path("*variants.vcf.gz.tbi"), emit: vcf_tbi
|
||||||
tuple val(meta), path("*genome.vcf.gz") , emit: genome_vcf
|
tuple val(meta), path("*genome.vcf.gz") , emit: genome_vcf
|
||||||
tuple val(meta), path("*genome.vcf.gz.tbi") , emit: genome_vcf_tbi
|
tuple val(meta), path("*genome.vcf.gz.tbi") , emit: genome_vcf_tbi
|
||||||
path "versions.yml" , emit: versions
|
path "versions.yml" , emit: versions
|
||||||
|
|
||||||
script:
|
script:
|
||||||
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
def regions = params.target_bed ? "--exome --callRegions ${target_bed}" : ""
|
def regions = target_bed ? "--exome --callRegions ${target_bed}" : ""
|
||||||
"""
|
"""
|
||||||
configureStrelkaGermlineWorkflow.py \\
|
configureStrelkaGermlineWorkflow.py \\
|
||||||
--bam $bam \\
|
--bam $bam \\
|
||||||
|
|
|
@ -19,7 +19,7 @@ process STRELKA_SOMATIC {
|
||||||
}
|
}
|
||||||
|
|
||||||
input:
|
input:
|
||||||
tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor)
|
tuple val(meta), path(cram_normal), path(crai_normal), path(cram_tumor), path(crai_tumor), path(manta_candidate_small_indels), path(manta_candidate_small_indels_tbi)
|
||||||
path fasta
|
path fasta
|
||||||
path fai
|
path fai
|
||||||
path target_bed
|
path target_bed
|
||||||
|
@ -34,13 +34,15 @@ process STRELKA_SOMATIC {
|
||||||
|
|
||||||
script:
|
script:
|
||||||
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
def prefix = options.suffix ? "${meta.id}${options.suffix}" : "${meta.id}"
|
||||||
def options_strelka = params.target_bed ? "--exome --callRegions ${target_bed}" : ""
|
def options_target_bed = target_bed ? "--exome --callRegions ${target_bed}" : ""
|
||||||
|
def options_manta = manta_candidate_small_indels ? "--indelCandidates ${manta_candidate_small_indels}" : ""
|
||||||
"""
|
"""
|
||||||
configureStrelkaSomaticWorkflow.py \\
|
configureStrelkaSomaticWorkflow.py \\
|
||||||
--tumor $cram_tumor \\
|
--tumor $cram_tumor \\
|
||||||
--normal $cram_normal \\
|
--normal $cram_normal \\
|
||||||
--referenceFasta $fasta \\
|
--referenceFasta $fasta \\
|
||||||
$options_strelka \\
|
$options_target_bed \\
|
||||||
|
$options_manta \\
|
||||||
$options.args \\
|
$options.args \\
|
||||||
--runDir strelka
|
--runDir strelka
|
||||||
|
|
||||||
|
|
|
@ -37,6 +37,14 @@ input:
|
||||||
type: file
|
type: file
|
||||||
description: BAM/CRAM/SAM index file
|
description: BAM/CRAM/SAM index file
|
||||||
pattern: "*.{bai,crai,sai}"
|
pattern: "*.{bai,crai,sai}"
|
||||||
|
- manta_candidate_small_indels:
|
||||||
|
type: file
|
||||||
|
description: VCF.gz file
|
||||||
|
pattern: "*.{vcf.gz}"
|
||||||
|
- manta_candidate_small_indels_tbi:
|
||||||
|
type: file
|
||||||
|
description: VCF.gz index file
|
||||||
|
pattern: "*.tbi"
|
||||||
- fasta:
|
- fasta:
|
||||||
type: file
|
type: file
|
||||||
description: Genome reference FASTA file
|
description: Genome reference FASTA file
|
||||||
|
|
Binary file not shown.
|
@ -30,6 +30,10 @@ artic/minion:
|
||||||
- modules/artic/minion/**
|
- modules/artic/minion/**
|
||||||
- tests/modules/artic/minion/**
|
- tests/modules/artic/minion/**
|
||||||
|
|
||||||
|
assemblyscan:
|
||||||
|
- modules/assemblyscan/**
|
||||||
|
- tests/modules/assemblyscan/**
|
||||||
|
|
||||||
bamaligncleaner:
|
bamaligncleaner:
|
||||||
- modules/bamaligncleaner/**
|
- modules/bamaligncleaner/**
|
||||||
- tests/modules/bamaligncleaner/**
|
- tests/modules/bamaligncleaner/**
|
||||||
|
@ -254,6 +258,10 @@ cat/fastq:
|
||||||
- modules/cat/fastq/**
|
- modules/cat/fastq/**
|
||||||
- tests/modules/cat/fastq/**
|
- tests/modules/cat/fastq/**
|
||||||
|
|
||||||
|
cellranger/mkref:
|
||||||
|
- modules/cellranger/mkref/**
|
||||||
|
- tests/modules/cellranger/mkref/**
|
||||||
|
|
||||||
checkm/lineagewf:
|
checkm/lineagewf:
|
||||||
- modules/checkm/lineagewf/**
|
- modules/checkm/lineagewf/**
|
||||||
- tests/modules/checkm/lineagewf/**
|
- tests/modules/checkm/lineagewf/**
|
||||||
|
@ -294,6 +302,10 @@ damageprofiler:
|
||||||
- modules/damageprofiler/**
|
- modules/damageprofiler/**
|
||||||
- tests/modules/damageprofiler/**
|
- tests/modules/damageprofiler/**
|
||||||
|
|
||||||
|
dedup:
|
||||||
|
- modules/dedup/**
|
||||||
|
- tests/modules/dedup/**
|
||||||
|
|
||||||
deeptools/computematrix:
|
deeptools/computematrix:
|
||||||
- modules/deeptools/computematrix/**
|
- modules/deeptools/computematrix/**
|
||||||
- tests/modules/deeptools/computematrix/**
|
- tests/modules/deeptools/computematrix/**
|
||||||
|
@ -378,10 +390,18 @@ fgbio/callmolecularconsensusreads:
|
||||||
- modules/fgbio/callmolecularconsensusreads/**
|
- modules/fgbio/callmolecularconsensusreads/**
|
||||||
- tests/modules/fgbio/callmolecularconsensusreads/**
|
- tests/modules/fgbio/callmolecularconsensusreads/**
|
||||||
|
|
||||||
|
fgbio/fastqtobam:
|
||||||
|
- modules/fgbio/fastqtobam/**
|
||||||
|
- tests/modules/fgbio/fastqtobam/**
|
||||||
|
|
||||||
fgbio/sortbam:
|
fgbio/sortbam:
|
||||||
- modules/fgbio/sortbam/**
|
- modules/fgbio/sortbam/**
|
||||||
- tests/modules/fgbio/sortbam/**
|
- tests/modules/fgbio/sortbam/**
|
||||||
|
|
||||||
|
filtlong:
|
||||||
|
- modules/filtlong/**
|
||||||
|
- tests/modules/filtlong/**
|
||||||
|
|
||||||
flash:
|
flash:
|
||||||
- modules/flash/**
|
- modules/flash/**
|
||||||
- tests/modules/flash/**
|
- tests/modules/flash/**
|
||||||
|
@ -414,6 +434,10 @@ gatk4/createsomaticpanelofnormals:
|
||||||
- modules/gatk4/createsomaticpanelofnormals/**
|
- modules/gatk4/createsomaticpanelofnormals/**
|
||||||
- tests/modules/gatk4/createsomaticpanelofnormals/**
|
- tests/modules/gatk4/createsomaticpanelofnormals/**
|
||||||
|
|
||||||
|
gatk4/estimatelibrarycomplexity:
|
||||||
|
- modules/gatk4/estimatelibrarycomplexity/**
|
||||||
|
- tests/modules/gatk4/estimatelibrarycomplexity/**
|
||||||
|
|
||||||
gatk4/fastqtosam:
|
gatk4/fastqtosam:
|
||||||
- modules/gatk4/fastqtosam/**
|
- modules/gatk4/fastqtosam/**
|
||||||
- tests/modules/gatk4/fastqtosam/**
|
- tests/modules/gatk4/fastqtosam/**
|
||||||
|
@ -678,6 +702,18 @@ maltextract:
|
||||||
- modules/maltextract/**
|
- modules/maltextract/**
|
||||||
- tests/modules/maltextract/**
|
- tests/modules/maltextract/**
|
||||||
|
|
||||||
|
manta/germline:
|
||||||
|
- modules/manta/germline/**
|
||||||
|
- tests/modules/manta/germline/**
|
||||||
|
|
||||||
|
manta/somatic:
|
||||||
|
- modules/manta/somatic/**
|
||||||
|
- tests/modules/manta/somatic/**
|
||||||
|
|
||||||
|
manta/tumoronly:
|
||||||
|
- modules/manta/tumoronly/**
|
||||||
|
- tests/modules/manta/tumoronly/**
|
||||||
|
|
||||||
mash/sketch:
|
mash/sketch:
|
||||||
- modules/mash/sketch/**
|
- modules/mash/sketch/**
|
||||||
- tests/modules/mash/sketch/**
|
- tests/modules/mash/sketch/**
|
||||||
|
@ -791,6 +827,10 @@ pangolin:
|
||||||
- modules/pangolin/**
|
- modules/pangolin/**
|
||||||
- tests/modules/pangolin/**
|
- tests/modules/pangolin/**
|
||||||
|
|
||||||
|
paraclu:
|
||||||
|
- modules/paraclu/**
|
||||||
|
- tests/modules/paraclu/**
|
||||||
|
|
||||||
pbbam/pbmerge:
|
pbbam/pbmerge:
|
||||||
- modules/pbbam/pbmerge/**
|
- modules/pbbam/pbmerge/**
|
||||||
- tests/modules/pbbam/pbmerge/**
|
- tests/modules/pbbam/pbmerge/**
|
||||||
|
@ -835,6 +875,10 @@ plink/vcf:
|
||||||
- modules/plink/vcf/**
|
- modules/plink/vcf/**
|
||||||
- tests/modules/plink/vcf/**
|
- tests/modules/plink/vcf/**
|
||||||
|
|
||||||
|
porechop:
|
||||||
|
- modules/porechop/**
|
||||||
|
- tests/modules/porechop/**
|
||||||
|
|
||||||
preseq/lcextrap:
|
preseq/lcextrap:
|
||||||
- modules/preseq/lcextrap/**
|
- modules/preseq/lcextrap/**
|
||||||
- tests/modules/preseq/lcextrap/**
|
- tests/modules/preseq/lcextrap/**
|
||||||
|
@ -939,6 +983,14 @@ samtools/ampliconclip:
|
||||||
- modules/samtools/ampliconclip/**
|
- modules/samtools/ampliconclip/**
|
||||||
- tests/modules/samtools/ampliconclip/**
|
- tests/modules/samtools/ampliconclip/**
|
||||||
|
|
||||||
|
samtools/bam2fq:
|
||||||
|
- modules/samtools/bam2fq/**
|
||||||
|
- tests/modules/samtools/bam2fq/**
|
||||||
|
|
||||||
|
samtools/depth:
|
||||||
|
- modules/samtools/depth/**
|
||||||
|
- tests/modules/samtools/depth/**
|
||||||
|
|
||||||
samtools/faidx:
|
samtools/faidx:
|
||||||
- modules/samtools/faidx/**
|
- modules/samtools/faidx/**
|
||||||
- tests/modules/samtools/faidx/**
|
- tests/modules/samtools/faidx/**
|
||||||
|
@ -987,6 +1039,10 @@ seqkit/split2:
|
||||||
- modules/seqkit/split2/**
|
- modules/seqkit/split2/**
|
||||||
- tests/modules/seqkit/split2/**
|
- tests/modules/seqkit/split2/**
|
||||||
|
|
||||||
|
seqtk/mergepe:
|
||||||
|
- modules/seqtk/mergepe/**
|
||||||
|
- tests/modules/seqtk/mergepe/**
|
||||||
|
|
||||||
seqtk/sample:
|
seqtk/sample:
|
||||||
- modules/seqtk/sample/**
|
- modules/seqtk/sample/**
|
||||||
- tests/modules/seqtk/sample/**
|
- tests/modules/seqtk/sample/**
|
||||||
|
|
|
@ -135,6 +135,7 @@ params {
|
||||||
test_paired_end_umi_histogram_txt = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_histogram.txt"
|
test_paired_end_umi_histogram_txt = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_histogram.txt"
|
||||||
test_paired_end_umi_unsorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_unsorted.bam"
|
test_paired_end_umi_unsorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_unsorted.bam"
|
||||||
test_paired_end_umi_unsorted_tagged_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.unsorted_tagged.bam"
|
test_paired_end_umi_unsorted_tagged_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.unsorted_tagged.bam"
|
||||||
|
test_paired_end_hla = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/example_hla_pe.bam"
|
||||||
|
|
||||||
test2_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam"
|
test2_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam"
|
||||||
test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam.bai"
|
test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam.bai"
|
||||||
|
|
13
tests/modules/assemblyscan/main.nf
Normal file
13
tests/modules/assemblyscan/main.nf
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { ASSEMBLYSCAN } from '../../../modules/assemblyscan/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_assemblyscan {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ]
|
||||||
|
|
||||||
|
ASSEMBLYSCAN ( input )
|
||||||
|
}
|
7
tests/modules/assemblyscan/test.yml
Normal file
7
tests/modules/assemblyscan/test.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
- name: assemblyscan test_assemblyscan
|
||||||
|
command: nextflow run tests/modules/assemblyscan -entry test_assemblyscan -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- assemblyscan
|
||||||
|
files:
|
||||||
|
- path: output/assemblyscan/test.json
|
||||||
|
md5sum: 9140e3d43f2d676f62e1325ace5dd8bd
|
|
@ -4,10 +4,37 @@ nextflow.enable.dsl = 2
|
||||||
|
|
||||||
include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' addParams( options: [suffix: '_out'] )
|
include { BEDTOOLS_GENOMECOV } from '../../../../modules/bedtools/genomecov/main.nf' addParams( options: [suffix: '_out'] )
|
||||||
|
|
||||||
workflow test_bedtools_genomecov {
|
workflow test_bedtools_genomecov_noscale {
|
||||||
input = [
|
input = [
|
||||||
[ id:'test'],
|
[ id:'test'],
|
||||||
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true)
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true),
|
||||||
|
1
|
||||||
|
]
|
||||||
|
|
||||||
|
sizes = []
|
||||||
|
extension = 'txt'
|
||||||
|
|
||||||
|
BEDTOOLS_GENOMECOV ( input, sizes, extension )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_bedtools_genomecov_nonbam_noscale {
|
||||||
|
input = [
|
||||||
|
[ id:'test'],
|
||||||
|
file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true),
|
||||||
|
1
|
||||||
|
]
|
||||||
|
|
||||||
|
sizes = file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true)
|
||||||
|
extension = 'txt'
|
||||||
|
|
||||||
|
BEDTOOLS_GENOMECOV ( input, sizes, extension )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_bedtools_genomecov_scale {
|
||||||
|
input = [
|
||||||
|
[ id:'test'],
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true),
|
||||||
|
0.5
|
||||||
]
|
]
|
||||||
|
|
||||||
sizes = file('dummy_chromosome_sizes')
|
sizes = file('dummy_chromosome_sizes')
|
||||||
|
@ -16,10 +43,11 @@ workflow test_bedtools_genomecov {
|
||||||
BEDTOOLS_GENOMECOV ( input, sizes, extension )
|
BEDTOOLS_GENOMECOV ( input, sizes, extension )
|
||||||
}
|
}
|
||||||
|
|
||||||
workflow test_bedtools_genomecov_nonbam {
|
workflow test_bedtools_genomecov_nonbam_scale {
|
||||||
input = [
|
input = [
|
||||||
[ id:'test'],
|
[ id:'test'],
|
||||||
file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true)
|
file(params.test_data['sarscov2']['genome']['baits_bed'], checkIfExists: true),
|
||||||
|
0.5
|
||||||
]
|
]
|
||||||
|
|
||||||
sizes = file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true)
|
sizes = file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
- name: bedtools genomecov test_bedtools_genomecov
|
- name: bedtools genomecov test_bedtools_genomecov_noscale
|
||||||
command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov -c tests/config/nextflow.config
|
command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_noscale -c tests/config/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- bedtools
|
- bedtools
|
||||||
- bedtools/genomecov
|
- bedtools/genomecov
|
||||||
|
@ -7,11 +7,29 @@
|
||||||
- path: output/bedtools/test_out.txt
|
- path: output/bedtools/test_out.txt
|
||||||
md5sum: 66083198daca6c001d328ba9616e9b53
|
md5sum: 66083198daca6c001d328ba9616e9b53
|
||||||
|
|
||||||
- name: bedtools genomecov test_bedtools_genomecov_nonbam
|
- name: bedtools genomecov test_bedtools_genomecov_nonbam_noscale
|
||||||
command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam -c tests/config/nextflow.config
|
command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_noscale -c tests/config/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- bedtools
|
- bedtools
|
||||||
- bedtools/genomecov
|
- bedtools/genomecov
|
||||||
files:
|
files:
|
||||||
- path: output/bedtools/test_out.txt
|
- path: output/bedtools/test_out.txt
|
||||||
md5sum: f47b58840087426e5b643d8dfd155c1f
|
md5sum: f47b58840087426e5b643d8dfd155c1f
|
||||||
|
|
||||||
|
- name: bedtools genomecov test_bedtools_genomecov_scale
|
||||||
|
command: nextflow run ./tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_scale -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- bedtools
|
||||||
|
- bedtools/genomecov
|
||||||
|
files:
|
||||||
|
- path: output/bedtools/test_out.txt
|
||||||
|
md5sum: 01291b6e1beab72e046653e709eb0e10
|
||||||
|
|
||||||
|
- name: bedtools genomecov test_bedtools_genomecov_nonbam_scale
|
||||||
|
command: nextflow run tests/modules/bedtools/genomecov -entry test_bedtools_genomecov_nonbam_scale -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- bedtools
|
||||||
|
- bedtools/genomecov
|
||||||
|
files:
|
||||||
|
- path: output/bedtools/test_out.txt
|
||||||
|
md5sum: de3c59c0ea123bcdbbad27bc0a0a601e
|
||||||
|
|
16
tests/modules/cellranger/mkref/main.nf
Normal file
16
tests/modules/cellranger/mkref/main.nf
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { CELLRANGER_MKREF } from '../../../../modules/cellranger/mkref/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_cellranger_mkref {
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true)
|
||||||
|
reference_name = "homo_sapiens_chr22_reference"
|
||||||
|
|
||||||
|
CELLRANGER_MKREF ( fasta,
|
||||||
|
gtf,
|
||||||
|
reference_name )
|
||||||
|
}
|
43
tests/modules/cellranger/mkref/test.yml
Normal file
43
tests/modules/cellranger/mkref/test.yml
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
- name: cellranger mkref test_cellranger_mkref
|
||||||
|
command: nextflow run tests/modules/cellranger/mkref -entry test_cellranger_mkref -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- cellranger
|
||||||
|
- cellranger/mkref
|
||||||
|
files:
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/fasta/genome.fa
|
||||||
|
md5sum: f315020d899597c1b57e5fe9f60f4c3e
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/fasta/genome.fa.fai
|
||||||
|
md5sum: 3520cd30e1b100e55f578db9c855f685
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/genes/genes.gtf.gz
|
||||||
|
md5sum: 6d9b5f409bfea95022bc25b9590e194e
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/reference.json
|
||||||
|
md5sum: a4e2b9bbf016c55b0d4d7bc1fa53896f
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/Genome
|
||||||
|
md5sum: 22102926fadf5890e905ca71b2da3f35
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/SA
|
||||||
|
md5sum: bcf3e1a855783105150b46c905465333
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/SAindex
|
||||||
|
md5sum: b93fb07d342e6c32a00ebc4311c0ad38
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/chrLength.txt
|
||||||
|
md5sum: c81f40f27e72606d7d07097c1d56a5b5
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/chrName.txt
|
||||||
|
md5sum: 5ae68a67b70976ee95342a7451cb5af1
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/chrNameLength.txt
|
||||||
|
md5sum: b190587cae0531f3cf25552d8aa674db
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/chrStart.txt
|
||||||
|
md5sum: bc73df776dd3d5bb9cfcbcba60880519
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/exonGeTrInfo.tab
|
||||||
|
md5sum: d04497f69d6ef889efd4d34fe63edcc4
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/exonInfo.tab
|
||||||
|
md5sum: 0d560290fab688b7268d88d5494bf9fe
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/geneInfo.tab
|
||||||
|
md5sum: 8b608537307443ffaee4927d2b428805
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/genomeParameters.txt
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/sjdbInfo.txt
|
||||||
|
md5sum: 5690ea9d9f09f7ff85b7fd47bd234903
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/sjdbList.fromGTF.out.tab
|
||||||
|
md5sum: 8760c33e966dad0b39f440301ebbdee4
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/sjdbList.out.tab
|
||||||
|
md5sum: 9e4f991abbbfeb3935a2bb21b9e258f1
|
||||||
|
- path: output/cellranger/homo_sapiens_chr22_reference/star/transcriptInfo.tab
|
||||||
|
md5sum: 0c3a5adb49d15e5feff81db8e29f2e36
|
13
tests/modules/dedup/main.nf
Normal file
13
tests/modules/dedup/main.nf
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { DEDUP } from '../../../modules/dedup/main.nf' addParams( options: [args: "-m"] )
|
||||||
|
|
||||||
|
workflow test_dedup {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ]
|
||||||
|
|
||||||
|
DEDUP ( input )
|
||||||
|
}
|
13
tests/modules/dedup/test.yml
Normal file
13
tests/modules/dedup/test.yml
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
- name: dedup test_dedup
|
||||||
|
command: nextflow run tests/modules/dedup -entry test_dedup -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- dedup
|
||||||
|
files:
|
||||||
|
- path: output/dedup/test.paired_end.dedup.json
|
||||||
|
md5sum: 2def0b54aba1fafa21b274f260de1b6f
|
||||||
|
- path: output/dedup/test.paired_end.hist
|
||||||
|
md5sum: df3492273a1db0d8152e35d9d5e38aa6
|
||||||
|
- path: output/dedup/test.paired_end.log
|
||||||
|
md5sum: 4b8855bd63b2f4b37da4cfb17e61fb00
|
||||||
|
- path: output/dedup/test.paired_end_rmdup.bam
|
||||||
|
md5sum: 8b0408fe3e258989095303a47e5b5061
|
16
tests/modules/fgbio/fastqtobam/main.nf
Normal file
16
tests/modules/fgbio/fastqtobam/main.nf
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
params.read_structure = "+T 12M11S+T"
|
||||||
|
|
||||||
|
include { FGBIO_FASTQTOBAM } from '../../../../modules/fgbio/fastqtobam/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_fgbio_fastqtobam {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
[ file(params.test_data['homo_sapiens']['illumina']['test_umi_1_fastq_gz'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_umi_2_fastq_gz'], checkIfExists: true) ]
|
||||||
|
]
|
||||||
|
|
||||||
|
FGBIO_FASTQTOBAM ( input, "${params.read_structure}" )
|
||||||
|
}
|
10
tests/modules/fgbio/fastqtobam/test.yml
Normal file
10
tests/modules/fgbio/fastqtobam/test.yml
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
- name: fgbio fastqtobam test_fgbio_fastqtobam
|
||||||
|
command: nextflow run tests/modules/fgbio/fastqtobam -entry test_fgbio_fastqtobam -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- fgbio/fastqtobam
|
||||||
|
- fgbio
|
||||||
|
files:
|
||||||
|
- path: output/fgbio/test_umi_converted.bam
|
||||||
|
md5sum: 9510735554e5eff29244077a72075fb6
|
||||||
|
- path: output/fgbio/versions.yml
|
||||||
|
md5sum: 524815093b96759060d0d800fc6a3f25
|
36
tests/modules/filtlong/main.nf
Normal file
36
tests/modules/filtlong/main.nf
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { FILTLONG } from '../../../modules/filtlong/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_filtlong {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
[],
|
||||||
|
[ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ]
|
||||||
|
]
|
||||||
|
|
||||||
|
FILTLONG ( input )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_filtlong_illumina_se {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:true ], // meta map
|
||||||
|
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ],
|
||||||
|
[ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ]
|
||||||
|
]
|
||||||
|
|
||||||
|
FILTLONG ( input )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_filtlong_illumina_pe {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ],
|
||||||
|
[ file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ]
|
||||||
|
]
|
||||||
|
|
||||||
|
FILTLONG ( input )
|
||||||
|
}
|
23
tests/modules/filtlong/test.yml
Normal file
23
tests/modules/filtlong/test.yml
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
- name: filtlong test_filtlong
|
||||||
|
command: nextflow run tests/modules/filtlong -entry test_filtlong -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- filtlong
|
||||||
|
files:
|
||||||
|
- path: output/filtlong/test_lr_filtlong.fastq.gz
|
||||||
|
md5sum: 7029066c27ac6f5ef18d660d5741979a
|
||||||
|
|
||||||
|
- name: filtlong test_filtlong_illumina_se
|
||||||
|
command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_se -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- filtlong
|
||||||
|
files:
|
||||||
|
- path: output/filtlong/test_lr_filtlong.fastq.gz
|
||||||
|
md5sum: 7029066c27ac6f5ef18d660d5741979a
|
||||||
|
|
||||||
|
- name: filtlong test_filtlong_illumina_pe
|
||||||
|
command: nextflow run tests/modules/filtlong -entry test_filtlong_illumina_pe -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- filtlong
|
||||||
|
files:
|
||||||
|
- path: output/filtlong/test_lr_filtlong.fastq.gz
|
||||||
|
md5sum: 7029066c27ac6f5ef18d660d5741979a
|
18
tests/modules/gatk4/estimatelibrarycomplexity/main.nf
Normal file
18
tests/modules/gatk4/estimatelibrarycomplexity/main.nf
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { GATK4_ESTIMATELIBRARYCOMPLEXITY } from '../../../../modules/gatk4/estimatelibrarycomplexity/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_gatk4_estimatelibrarycomplexity {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true)
|
||||||
|
|
||||||
|
GATK4_ESTIMATELIBRARYCOMPLEXITY ( input, fasta, fai, dict )
|
||||||
|
}
|
7
tests/modules/gatk4/estimatelibrarycomplexity/test.yml
Normal file
7
tests/modules/gatk4/estimatelibrarycomplexity/test.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
- name: gatk4 estimatelibrarycomplexity test_gatk4_estimatelibrarycomplexity
|
||||||
|
command: nextflow run tests/modules/gatk4/estimatelibrarycomplexity -entry test_gatk4_estimatelibrarycomplexity -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- gatk4/estimatelibrarycomplexity
|
||||||
|
- gatk4
|
||||||
|
files:
|
||||||
|
- path: output/gatk4/test.metrics
|
35
tests/modules/manta/germline/main.nf
Normal file
35
tests/modules/manta/germline/main.nf
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_manta_germline {
|
||||||
|
input = [
|
||||||
|
[ id:'test'], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
bed = []
|
||||||
|
bed_tbi = []
|
||||||
|
|
||||||
|
MANTA_GERMLINE ( input, fasta, fai, bed, bed_tbi )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_manta_germline_target_bed {
|
||||||
|
input = [
|
||||||
|
[ id:'test'], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true)
|
||||||
|
bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true)
|
||||||
|
|
||||||
|
MANTA_GERMLINE ( input, fasta, fai, bed, bed_tbi )
|
||||||
|
}
|
24
tests/modules/manta/germline/test.yml
Normal file
24
tests/modules/manta/germline/test.yml
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
- name: manta germline
|
||||||
|
command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- manta
|
||||||
|
- manta/germline
|
||||||
|
files:
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.diploid_sv.vcf.gz
|
||||||
|
- path: output/manta/test.diploid_sv.vcf.gz.tbi
|
||||||
|
- name: manta germline target bed
|
||||||
|
command: nextflow run ./tests/modules/manta/germline -entry test_manta_germline_target_bed -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- manta
|
||||||
|
- manta/germline
|
||||||
|
files:
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.diploid_sv.vcf.gz
|
||||||
|
- path: output/manta/test.diploid_sv.vcf.gz.tbi
|
23
tests/modules/manta/somatic/main.nf
Normal file
23
tests/modules/manta/somatic/main.nf
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { MANTA_SOMATIC } from '../../../../modules/manta/somatic/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_manta_somatic {
|
||||||
|
|
||||||
|
input = [
|
||||||
|
[ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true)
|
||||||
|
bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true)
|
||||||
|
|
||||||
|
MANTA_SOMATIC ( input, fasta, fai, bed, bed_tbi )
|
||||||
|
}
|
18
tests/modules/manta/somatic/test.yml
Normal file
18
tests/modules/manta/somatic/test.yml
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
- name: manta somatic test_manta_somatic
|
||||||
|
command: nextflow run tests/modules/manta/somatic -entry test_manta_somatic -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- manta/somatic
|
||||||
|
- manta
|
||||||
|
files:
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz.tbi
|
||||||
|
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz.tbi
|
||||||
|
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
||||||
|
- path: output/manta/test.diploid_sv.vcf.gz
|
||||||
|
- path: output/manta/test.diploid_sv.vcf.gz.tbi
|
||||||
|
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
||||||
|
- path: output/manta/test.somatic_sv.vcf.gz
|
||||||
|
- path: output/manta/test.somatic_sv.vcf.gz.tbi
|
||||||
|
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
35
tests/modules/manta/tumoronly/main.nf
Normal file
35
tests/modules/manta/tumoronly/main.nf
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { MANTA_TUMORONLY } from '../../../../modules/manta/tumoronly/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_manta_tumoronly {
|
||||||
|
input = [
|
||||||
|
[ id:'test'], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
bed = []
|
||||||
|
bed_tbi = []
|
||||||
|
|
||||||
|
MANTA_TUMORONLY ( input, fasta, fai, bed, bed_tbi )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_manta_tumoronly_target_bed {
|
||||||
|
input = [
|
||||||
|
[ id:'test'], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true)
|
||||||
|
bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true)
|
||||||
|
|
||||||
|
MANTA_TUMORONLY ( input, fasta, fai, bed, bed_tbi )
|
||||||
|
}
|
24
tests/modules/manta/tumoronly/test.yml
Normal file
24
tests/modules/manta/tumoronly/test.yml
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
- name: manta tumoronly
|
||||||
|
command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- manta
|
||||||
|
- manta/tumoronly
|
||||||
|
files:
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.tumor_sv.vcf.gz
|
||||||
|
- path: output/manta/test.tumor_sv.vcf.gz.tbi
|
||||||
|
- name: manta tumoronly target bed
|
||||||
|
command: nextflow run ./tests/modules/manta/tumoronly -entry test_manta_tumoronly_target_bed -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- manta
|
||||||
|
- manta/tumoronly
|
||||||
|
files:
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_small_indels.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz
|
||||||
|
- path: output/manta/test.candidate_sv.vcf.gz.tbi
|
||||||
|
- path: output/manta/test.tumor_sv.vcf.gz
|
||||||
|
- path: output/manta/test.tumor_sv.vcf.gz.tbi
|
|
@ -6,8 +6,8 @@ include { OPTITYPE } from '../../../modules/optitype/main.nf' addParams( options
|
||||||
|
|
||||||
workflow test_optitype {
|
workflow test_optitype {
|
||||||
input = [ [ id:'test', seq_type:'dna' ], // meta map
|
input = [ [ id:'test', seq_type:'dna' ], // meta map
|
||||||
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_bam'], checkIfExists: true)
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_hla'], checkIfExists: true)
|
||||||
]
|
]
|
||||||
|
|
||||||
OPTITYPE ( input )
|
OPTITYPE ( input )
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,5 +3,7 @@
|
||||||
tags:
|
tags:
|
||||||
- optitype
|
- optitype
|
||||||
files:
|
files:
|
||||||
- path: output/optitype/test/test_result.tsv
|
|
||||||
- path: output/optitype/test/test_coverage_plot.pdf
|
- path: output/optitype/test/test_coverage_plot.pdf
|
||||||
|
- path: output/optitype/test/test_result.tsv
|
||||||
|
contains:
|
||||||
|
- '1446'
|
||||||
|
|
15
tests/modules/paraclu/main.nf
Normal file
15
tests/modules/paraclu/main.nf
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { PARACLU } from '../../../modules/paraclu/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_paraclu {
|
||||||
|
|
||||||
|
input = [[ id:'test' ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)
|
||||||
|
]
|
||||||
|
min_cluster = 30
|
||||||
|
|
||||||
|
PARACLU ( input, min_cluster )
|
||||||
|
}
|
7
tests/modules/paraclu/test.yml
Normal file
7
tests/modules/paraclu/test.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
- name: paraclu test_paraclu
|
||||||
|
command: nextflow run tests/modules/paraclu -entry test_paraclu -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- paraclu
|
||||||
|
files:
|
||||||
|
- path: output/paraclu/test.clustered.simplified.bed
|
||||||
|
md5sum: d41d8cd98f00b204e9800998ecf8427e
|
13
tests/modules/porechop/main.nf
Normal file
13
tests/modules/porechop/main.nf
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { PORECHOP } from '../../../modules/porechop/main.nf' addParams( options: [args: '', suffix: '_porechop'] )
|
||||||
|
|
||||||
|
workflow test_porechop {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:true ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ]
|
||||||
|
|
||||||
|
PORECHOP ( input )
|
||||||
|
}
|
7
tests/modules/porechop/test.yml
Normal file
7
tests/modules/porechop/test.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
- name: porechop test_porechop
|
||||||
|
command: nextflow run tests/modules/porechop -entry test_porechop -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- porechop
|
||||||
|
files:
|
||||||
|
- path: output/porechop/test_porechop.fastq.gz
|
||||||
|
md5sum: 08f314ae9f162c8dcc27e5b513d2064d
|
24
tests/modules/samtools/bam2fq/main.nf
Normal file
24
tests/modules/samtools/bam2fq/main.nf
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { SAMTOOLS_BAM2FQ } from '../../../../modules/samtools/bam2fq/main.nf' addParams( options: [args: "-T RX"] )
|
||||||
|
|
||||||
|
workflow test_samtools_bam2fq_nosplit {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_converted_bam'], checkIfExists: true) ]
|
||||||
|
split = false
|
||||||
|
|
||||||
|
SAMTOOLS_BAM2FQ ( input, split )
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
workflow test_samtools_bam2fq_withsplit {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_umi_converted_bam'], checkIfExists: true) ]
|
||||||
|
split = true
|
||||||
|
|
||||||
|
SAMTOOLS_BAM2FQ ( input, split )
|
||||||
|
}
|
23
tests/modules/samtools/bam2fq/test.yml
Normal file
23
tests/modules/samtools/bam2fq/test.yml
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
- name: samtools bam2fq test_samtools_bam2fq_nosplit
|
||||||
|
command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_nosplit -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- samtools/bam2fq
|
||||||
|
- samtools
|
||||||
|
files:
|
||||||
|
- path: output/samtools/test_interleaved.fq.gz
|
||||||
|
md5sum: d733e66d29a4b366bf9df8c42f845256
|
||||||
|
|
||||||
|
- name: samtools bam2fq test_samtools_bam2fq_withsplit
|
||||||
|
command: nextflow run tests/modules/samtools/bam2fq -entry test_samtools_bam2fq_withsplit -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- samtools/bam2fq
|
||||||
|
- samtools
|
||||||
|
files:
|
||||||
|
- path: output/samtools/test_1.fq.gz
|
||||||
|
md5sum: 4522edbe158ec4804765794569f67493
|
||||||
|
- path: output/samtools/test_2.fq.gz
|
||||||
|
md5sum: 7e00ef40d5cfe272b67461381019dcc1
|
||||||
|
- path: output/samtools/test_other.fq.gz
|
||||||
|
md5sum: 709872fc2910431b1e8b7074bfe38c67
|
||||||
|
- path: output/samtools/test_singleton.fq.gz
|
||||||
|
md5sum: 709872fc2910431b1e8b7074bfe38c67
|
13
tests/modules/samtools/depth/main.nf
Normal file
13
tests/modules/samtools/depth/main.nf
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { SAMTOOLS_DEPTH } from '../../../../modules/samtools/depth/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
|
workflow test_samtools_depth {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true) ]
|
||||||
|
|
||||||
|
SAMTOOLS_DEPTH ( input )
|
||||||
|
}
|
8
tests/modules/samtools/depth/test.yml
Normal file
8
tests/modules/samtools/depth/test.yml
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
- name: samtools depth
|
||||||
|
command: nextflow run tests/modules/samtools/depth -entry test_samtools_depth -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- samtools/depth
|
||||||
|
- samtools
|
||||||
|
files:
|
||||||
|
- path: output/samtools/test.tsv
|
||||||
|
md5sum: aa27ebf69663ebded553b4d6538219d9
|
31
tests/modules/seqtk/mergepe/main.nf
Normal file
31
tests/modules/seqtk/mergepe/main.nf
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
nextflow.enable.dsl = 2
|
||||||
|
|
||||||
|
include { SEQTK_MERGEPE } from '../../../../modules/seqtk/mergepe/main.nf' addParams( options: [ 'suffix':'.processed' ] )
|
||||||
|
|
||||||
|
//
|
||||||
|
// Test with single-end data
|
||||||
|
//
|
||||||
|
|
||||||
|
workflow test_seqtk_mergepe_single_end {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:true ], // meta map
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ]
|
||||||
|
|
||||||
|
SEQTK_MERGEPE ( input )
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Test with paired-end data
|
||||||
|
//
|
||||||
|
|
||||||
|
workflow test_seqtk_mergepe_paired_end {
|
||||||
|
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
[ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
|
||||||
|
file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ]
|
||||||
|
]
|
||||||
|
|
||||||
|
SEQTK_MERGEPE ( input )
|
||||||
|
}
|
17
tests/modules/seqtk/mergepe/test.yml
Normal file
17
tests/modules/seqtk/mergepe/test.yml
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
- name: seqtk mergepe test_seqtk_mergepe_single_end
|
||||||
|
command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_single_end -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- seqtk/mergepe
|
||||||
|
- seqtk
|
||||||
|
files:
|
||||||
|
- path: output/seqtk/test.processed.fastq.gz
|
||||||
|
md5sum: e325ef7deb4023447a1f074e285761af
|
||||||
|
|
||||||
|
- name: seqtk mergepe test_seqtk_mergepe_paired_end
|
||||||
|
command: nextflow run tests/modules/seqtk/mergepe -entry test_seqtk_mergepe_paired_end -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- seqtk/mergepe
|
||||||
|
- seqtk
|
||||||
|
files:
|
||||||
|
- path: output/seqtk/test.processed.fastq.gz
|
||||||
|
md5sum: 3f094ef62d9bfe06aa25174a06bc7d04
|
|
@ -5,30 +5,32 @@ nextflow.enable.dsl = 2
|
||||||
include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' addParams( options: [:] )
|
include { STRELKA_GERMLINE } from '../../../../modules/strelka/germline/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
workflow test_strelka_germline {
|
workflow test_strelka_germline {
|
||||||
input = [
|
input = [
|
||||||
[ id:'test'], // meta map
|
[ id:'test'], // meta map
|
||||||
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
||||||
]
|
]
|
||||||
|
|
||||||
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
targets = []
|
target_bed = []
|
||||||
|
target_bed_tbi = []
|
||||||
STRELKA_GERMLINE ( input, fasta, fai, targets )
|
|
||||||
|
STRELKA_GERMLINE ( input, fasta, fai, target_bed, target_bed_tbi )
|
||||||
}
|
}
|
||||||
|
|
||||||
workflow test_strelka_germline_target_bed {
|
workflow test_strelka_germline_target_bed {
|
||||||
input = [
|
input = [
|
||||||
[ id:'test'], // meta map
|
[ id:'test'], // meta map
|
||||||
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true),
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
||||||
]
|
]
|
||||||
|
|
||||||
fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true)
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true)
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
targets = file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)
|
target_bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true)
|
||||||
|
target_bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true)
|
||||||
|
|
||||||
STRELKA_GERMLINE ( input, fasta, fai, targets )
|
STRELKA_GERMLINE ( input, fasta, fai, target_bed, target_bed_tbi )
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,21 @@
|
||||||
- name: strelka germline
|
- name: strelka germline test_strelka_germline
|
||||||
command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline -c tests/config/nextflow.config
|
command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline -c tests/config/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- strelka
|
- strelka
|
||||||
- strelka/germline
|
- strelka/germline
|
||||||
files:
|
files:
|
||||||
- path: output/strelka/test.variants.vcf.gz
|
|
||||||
- path: output/strelka/test.variants.vcf.gz.tbi
|
|
||||||
- path: output/strelka/test.genome.vcf.gz
|
- path: output/strelka/test.genome.vcf.gz
|
||||||
- path: output/strelka/test.genome.vcf.gz.tbi
|
- path: output/strelka/test.genome.vcf.gz.tbi
|
||||||
- name: strelka germline target bed
|
- path: output/strelka/test.variants.vcf.gz
|
||||||
command: nextflow run ./tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c tests/config/nextflow.config
|
- path: output/strelka/test.variants.vcf.gz.tbi
|
||||||
|
|
||||||
|
- name: strelka germline test_strelka_germline_target_bed
|
||||||
|
command: nextflow run tests/modules/strelka/germline -entry test_strelka_germline_target_bed -c tests/config/nextflow.config
|
||||||
tags:
|
tags:
|
||||||
- strelka
|
- strelka
|
||||||
- strelka/germline
|
- strelka/germline
|
||||||
files:
|
files:
|
||||||
- path: output/strelka/test.variants.vcf.gz
|
|
||||||
- path: output/strelka/test.variants.vcf.gz.tbi
|
|
||||||
- path: output/strelka/test.genome.vcf.gz
|
- path: output/strelka/test.genome.vcf.gz
|
||||||
- path: output/strelka/test.genome.vcf.gz.tbi
|
- path: output/strelka/test.genome.vcf.gz.tbi
|
||||||
|
- path: output/strelka/test.variants.vcf.gz
|
||||||
|
- path: output/strelka/test.variants.vcf.gz.tbi
|
||||||
|
|
|
@ -11,7 +11,28 @@ workflow test_strelka_somatic {
|
||||||
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
||||||
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
||||||
|
[],[]
|
||||||
|
]
|
||||||
|
|
||||||
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true)
|
||||||
|
bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true)
|
||||||
|
bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true)
|
||||||
|
|
||||||
|
STRELKA_SOMATIC (input, fasta, fai, bed, bed_tbi )
|
||||||
|
}
|
||||||
|
|
||||||
|
workflow test_strelka__best_practices_somatic {
|
||||||
|
|
||||||
|
input = [
|
||||||
|
[ id:'test', single_end:false ], // meta map
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true),
|
||||||
|
file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true)
|
||||||
]
|
]
|
||||||
|
|
||||||
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true)
|
||||||
|
|
|
@ -10,3 +10,16 @@
|
||||||
- path: output/strelka/test.somatic_snvs.vcf.gz
|
- path: output/strelka/test.somatic_snvs.vcf.gz
|
||||||
- path: output/strelka/test.somatic_snvs.vcf.gz.tbi
|
- path: output/strelka/test.somatic_snvs.vcf.gz.tbi
|
||||||
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
||||||
|
|
||||||
|
- name: strelka somatic test_strelka__best_practices_somatic
|
||||||
|
command: nextflow run tests/modules/strelka/somatic -entry test_strelka__best_practices_somatic -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- strelka
|
||||||
|
- strelka/somatic
|
||||||
|
files:
|
||||||
|
- path: output/strelka/test.somatic_indels.vcf.gz
|
||||||
|
- path: output/strelka/test.somatic_indels.vcf.gz.tbi
|
||||||
|
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
||||||
|
- path: output/strelka/test.somatic_snvs.vcf.gz
|
||||||
|
- path: output/strelka/test.somatic_snvs.vcf.gz.tbi
|
||||||
|
md5sum: 4cb176febbc8c26d717a6c6e67b9c905
|
||||||
|
|
Loading…
Reference in a new issue