Merge branch 'master' into busco

This commit is contained in:
Mahesh Binzer-Panchal 2022-05-06 13:49:01 +02:00 committed by GitHub
commit a9f23bb477
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 235 additions and 23 deletions

View file

@ -20,7 +20,6 @@ process CNVPYTOR_CALLCNVS {
script:
def bins = bin_sizes ?: '1000'
def prefix = task.ext.prefix ?: "${meta.id}"
"""
cnvpytor \\
-root $pytor \\

View file

@ -8,7 +8,7 @@ process CNVPYTOR_VIEW {
'quay.io/biocontainers/cnvpytor:1.2.1--pyhdfd78af_0' }"
input:
tuple val(meta), path(pytor)
tuple val(meta), path(pytor_files)
val bin_sizes
val output_format
@ -23,17 +23,18 @@ process CNVPYTOR_VIEW {
script:
def output_suffix = output_format ?: 'vcf'
def bins = bin_sizes ?: '1000'
def bins = bin_sizes ?: '1000'
def input = pytor_files.join(" ")
def prefix = task.ext.prefix ?: "${meta.id}"
"""
python3 <<CODE
import cnvpytor,os
from pathlib import Path
pytor_file = Path("$pytor")
binsizes = "${bins}".split(" ")
for binsize in binsizes:
app = cnvpytor.Viewer(["$pytor"], params={} )
outputfile = "{}_{}.{}".format(pytor_file.stem,binsize.strip(),"${output_suffix}")
file_list = "${input}".split(" ")
app = cnvpytor.Viewer(file_list, params={} )
outputfile = "{}_{}.{}".format("${prefix}",binsize.strip(),"${output_suffix}")
app.print_filename = outputfile
app.bin_size = int(binsize)
app.print_calls_file()
@ -47,8 +48,9 @@ process CNVPYTOR_VIEW {
stub:
def output_suffix = output_format ?: 'vcf'
def prefix = task.ext.prefix ?: "${meta.id}"
"""
touch ${pytor.baseName}.${output_suffix}
touch ${prefix}.${output_suffix}
cat <<-END_VERSIONS > versions.yml
"${task.process}":

View file

@ -17,9 +17,9 @@ input:
description: |
Groovy Map containing sample information
e.g. [ id:'test' ]
- pytor:
- pytor_files:
type: file
description: pytor file containing read depth data
description: pytor file containing cnv calls. To merge calls from multiple samples use a list of files.
pattern: "*.{pytor}"
- bin_sizes:
type: string

35
modules/md5sum/main.nf Normal file
View file

@ -0,0 +1,35 @@
process MD5SUM {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "conda-forge::coreutils=9.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/ubuntu:20.04' :
'ubuntu:20.04' }"
input:
tuple val(meta), path(file)
output:
tuple val(meta), path("*.md5"), emit: checksum
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
"""
md5sum \\
$args \\
${file} \\
> ${file}.md5
cat <<-END_VERSIONS > versions.yml
"${task.process}":
md5sum: \$(echo \$(md5sum --version 2>&1 | head -n 1| sed 's/^.*) //;' ))
END_VERSIONS
"""
}

39
modules/md5sum/meta.yml Normal file
View file

@ -0,0 +1,39 @@
name: "md5sum"
description: Create an MD5 (128-bit) checksum
keywords:
- checksum
tools:
- "md5sum":
description: Create an MD5 (128-bit) checksum
homepage: "https://www.gnu.org"
documentation: "https://man7.org/linux/man-pages/man1/md5sum.1.html"
licence: GPLv3+
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- file:
type: file
description: Any file
pattern: "*.*"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- checksum:
type: file
description: File containing checksum
pattern: "*.md5"
authors:
- "@matthdsm"

35
modules/shasum/main.nf Normal file
View file

@ -0,0 +1,35 @@
process SHASUM {
tag "$meta.id"
label 'process_low'
conda (params.enable_conda ? "conda-forge::coreutils=9.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/ubuntu:20.04' :
'ubuntu:20.04' }"
input:
tuple val(meta), path(file)
output:
tuple val(meta), path("*.sha256"), emit: checksum
path "versions.yml" , emit: versions
when:
task.ext.when == null || task.ext.when
script:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
"""
sha256sum \\
$args \\
${file} \\
> ${file}.sha256
cat <<-END_VERSIONS > versions.yml
"${task.process}":
sha256sum: \$(echo \$(sha256sum --version 2>&1 | head -n 1| sed 's/^.*) //;' ))
END_VERSIONS
"""
}

40
modules/shasum/meta.yml Normal file
View file

@ -0,0 +1,40 @@
name: "shasum"
description: Print SHA256 (256-bit) checksums.
keywords:
- checksum
- sha256
tools:
- "md5sum":
description: Create an SHA256 (256-bit) checksum.
homepage: "https://www.gnu.org"
documentation: "https://linux.die.net/man/1/shasum"
licence: GPLv3+
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- file:
type: file
description: Any file
pattern: "*.*"
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- versions:
type: file
description: File containing software versions
pattern: "versions.yml"
- checksum:
type: file
description: File containing checksum
pattern: "*.sha256"
authors:
- "@matthdsm"

View file

@ -2,10 +2,10 @@ process SVDB_MERGE {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::svdb=2.6.0" : null)
conda (params.enable_conda ? "bioconda::svdb=2.6.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/svdb:2.6.0--py39h5371cbf_0':
'quay.io/biocontainers/svdb:2.6.0--py39h5371cbf_0' }"
'https://depot.galaxyproject.org/singularity/svdb:2.6.1--py39h5371cbf_0':
'quay.io/biocontainers/svdb:2.6.1--py39h5371cbf_0' }"
input:
tuple val(meta), path(vcfs)

View file

@ -2,10 +2,10 @@ process SVDB_QUERY {
tag "$meta.id"
label 'process_medium'
conda (params.enable_conda ? "bioconda::svdb=2.6.0" : null)
conda (params.enable_conda ? "bioconda::svdb=2.6.1" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/svdb:2.6.0--py39h5371cbf_0':
'quay.io/biocontainers/svdb:2.6.0--py39h5371cbf_0' }"
'https://depot.galaxyproject.org/singularity/svdb:2.6.1--py39h5371cbf_0':
'quay.io/biocontainers/svdb:2.6.1--py39h5371cbf_0' }"
input:
tuple val(meta), path(vcf)

View file

@ -1062,10 +1062,6 @@ krona/kronadb:
- modules/krona/kronadb/**
- tests/modules/krona/kronadb/**
krona/ktupdatetaxonomy:
- modules/krona/ktupdatetaxonomy/**
- tests/modules/krona/ktupdatetaxonomy/**
krona/ktimporttaxonomy:
- modules/krona/ktimporttaxonomy/**
- tests/modules/krona/ktimporttaxonomy/**
@ -1074,6 +1070,10 @@ krona/ktimporttext:
- modules/krona/ktimporttext/**
- tests/modules/krona/ktimporttext/**
krona/ktupdatetaxonomy:
- modules/krona/ktupdatetaxonomy/**
- tests/modules/krona/ktupdatetaxonomy/**
last/dotplot:
- modules/last/dotplot/**
- tests/modules/last/dotplot/**
@ -1194,6 +1194,10 @@ maxbin2:
- modules/maxbin2/**
- tests/modules/maxbin2/**
md5sum:
- modules/md5sum/**
- tests/modules/md5sum/**
medaka:
- modules/medaka/**
- tests/modules/medaka/**
@ -1739,6 +1743,10 @@ seqwish/induce:
- modules/seqwish/induce/**
- tests/modules/seqwish/induce/**
shasum:
- modules/shasum/**
- tests/modules/shasum/**
shigatyper:
- modules/shigatyper/**
- tests/modules/shigatyper/**

View file

@ -8,7 +8,7 @@ workflow test_cnvpytor_view {
input = [
[ id:'test'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true)
[file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true)]
]
bin_sizes = "10000 100000"
@ -20,7 +20,7 @@ workflow test_cnvpytor_view_tsvout {
input = [
[ id:'test'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true)
[file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true)]
]
output_suffix = "tsv"
@ -32,7 +32,7 @@ workflow test_cnvpytor_view_stub {
input = [
[ id:'test'], // meta map
file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true)
[file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true)]
]
bin_sizes = []

View file

@ -0,0 +1,15 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { MD5SUM } from '../../../modules/md5sum/main.nf'
workflow test_md5sum {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true)
]
MD5SUM ( input )
}

View file

@ -0,0 +1,3 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,8 @@
- name: md5sum test_md5sum
command: nextflow run tests/modules/md5sum -entry test_md5sum -c tests/config/nextflow.config
tags:
- md5sum
files:
- path: output/md5sum/test.paired_end.bam.md5
md5sum: 1163095be8fdfb2acb3cc6c027389c4b
- path: output/md5sum/versions.yml

View file

@ -0,0 +1,15 @@
#!/usr/bin/env nextflow
nextflow.enable.dsl = 2
include { SHASUM } from '../../../modules/shasum/main.nf'
workflow test_shasum {
input = [
[ id:'test', single_end:false ], // meta map
file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true)
]
SHASUM ( input )
}

View file

@ -0,0 +1,5 @@
process {
publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }
}

View file

@ -0,0 +1,8 @@
- name: shasum test_shasum
command: nextflow run tests/modules/shasum -entry test_shasum -c tests/config/nextflow.config
tags:
- shasum
files:
- path: output/shasum/test.paired_end.bam.sha256
md5sum: 138a19e100f09fc975ea1b717da9b6dd
- path: output/shasum/versions.yml