1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-25 18:59:56 +00:00

Start work on groovy native datbase checks

This commit is contained in:
James Fellows Yates 2022-12-12 10:02:22 +01:00
parent 13c4f44ca6
commit 07bd989bc6
4 changed files with 30 additions and 44 deletions

View file

@ -12,14 +12,6 @@
process { process {
withName: DATABASE_CHECK {
publishDir = [
path: { "${params.outdir}/pipeline_info" },
mode: params.publish_dir_mode,
saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
]
}
withName: FASTQC { withName: FASTQC {
ext.args = '--quiet' ext.args = '--quiet'
ext.prefix = { "${meta.id}_${meta.run_accession}_raw" } ext.prefix = { "${meta.id}_${meta.run_accession}_raw" }

View file

@ -1,29 +0,0 @@
process DATABASE_CHECK {
tag "$databasesheet"
label 'process_single'
conda (params.enable_conda ? "conda-forge::python=3.8.3" : null)
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/python:3.8.3' :
'quay.io/biocontainers/python:3.8.3' }"
input:
path databasesheet
output:
path '*.csv' , emit: csv
path "versions.yml", emit: versions
when:
task.ext.when == null || task.ext.when
script: // This script is bundled with the pipeline, in nf-core/taxprofiler/bin/
"""
cat $databasesheet >> database_sheet.valid.csv
cat <<-END_VERSIONS > versions.yml
"${task.process}":
python: \$(python --version | sed 's/Python //g')
END_VERSIONS
"""
}

View file

@ -2,7 +2,6 @@
// Check input samplesheet and get read channels // Check input samplesheet and get read channels
// //
include { DATABASE_CHECK } from '../../modules/local/database_check'
include { UNTAR } from '../../modules/nf-core/untar/main' include { UNTAR } from '../../modules/nf-core/untar/main'
workflow DB_CHECK { workflow DB_CHECK {
@ -10,15 +9,18 @@ workflow DB_CHECK {
dbsheet // file: /path/to/dbsheet.csv dbsheet // file: /path/to/dbsheet.csv
main: main:
ch_versions = Channel.empty()
// TODO: make database sheet check // TODO: make database sheet check
// Checks: // Checks:
// 1) no duplicates, // 1) no duplicates,
// 2) args do not have quotes, e.g. just `,,` and NOT `,"",`
parsed_samplesheet = DATABASE_CHECK ( dbsheet ) parsed_samplesheet = Channel.fromPath(dbsheet)
.csv
.splitCsv ( header:true, sep:',' ) .splitCsv ( header:true, sep:',' )
.map { create_db_channels(it) } .map {
validate_db_sheet(it)
create_db_channels(it)
}
ch_dbs_for_untar = parsed_samplesheet ch_dbs_for_untar = parsed_samplesheet
.branch { .branch {
@ -29,12 +31,31 @@ workflow DB_CHECK {
// TODO Filter to only run UNTAR on DBs of tools actually using? // TODO Filter to only run UNTAR on DBs of tools actually using?
// TODO make optional whether to save // TODO make optional whether to save
UNTAR ( ch_dbs_for_untar.untar ) UNTAR ( ch_dbs_for_untar.untar )
ch_versions = ch_versions.mix(UNTAR.out.versions.first())
ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar ) ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar )
emit: emit:
dbs = ch_final_dbs // channel: [ val(meta), [ db ] ] dbs = ch_final_dbs // channel: [ val(meta), [ db ] ]
versions = DATABASE_CHECK.out.versions.mix(UNTAR.out.versions.first()) // channel: [ versions.yml ] versions = ch_versions // channel: [ versions.yml ]
}
def validate_db_sheet(LinkedHashMap row){
// check minimum number of columns
if (row.size() < 4) exit 1, "[nf-core/taxprofiler] error: Invalid database input sheet - malformed row (e.g. missing column). See documentation for more information. Error in: ${row}, "
// all columns there
def expected_headers = ['tool', 'db_name', 'db_params', 'db_path']
if ( !row.keySet().containsAll(expected_headers) ) exit 1, "[nf-core/taxprofiler] error: Invalid database input sheet - malformed column names. Please check input TSV. Column names should be: ${expected_keys.join(", ")}"
// valid tools specified// TIFNISIH LIST
def expected_tools = [ "bracken", "centrifuge", "diamond", "kaiju", "kraken2", "malt", "metaphlan3" ]
// detect quotes in params
if ( row.db_params.contains('"') ) exit 1, "[nf-core/taxprofiler] error: Invalid database db_params entry. No quotes allowed. Error in: ${row}"
if ( row.db_params.contains("'") ) exit 1, "[nf-core/taxprofiler] error: Invalid database db_params entry. No quotes allowed. Error in: ${row}"
} }
def create_db_channels(LinkedHashMap row) { def create_db_channels(LinkedHashMap row) {
@ -51,3 +72,5 @@ def create_db_channels(LinkedHashMap row) {
return array return array
} }

View file

@ -25,7 +25,7 @@ if ( params.input ) {
exit 1, "Input samplesheet, or PEP config and base directory not specified" exit 1, "Input samplesheet, or PEP config and base directory not specified"
} }
if (params.databases) { ch_databases = file(params.databases) } else { exit 1, 'Input database sheet not specified!' } if (params.databases) { ch_databases = file(params.databases, checkIfExists: true) } else { exit 1, 'Input database sheet not specified!' }
if (params.shortread_qc_mergepairs && params.run_malt ) log.warn "[nf-core/taxprofiler] MALT does not accept uncollapsed paired-reads. Pairs will be profiled as separate files." if (params.shortread_qc_mergepairs && params.run_malt ) log.warn "[nf-core/taxprofiler] MALT does not accept uncollapsed paired-reads. Pairs will be profiled as separate files."
if (params.shortread_qc_includeunmerged && !params.shortread_qc_mergepairs) exit 1, "ERROR: [nf-core/taxprofiler] cannot include unmerged reads when merging is not turned on. Please specify --shortread_qc_mergepairs" if (params.shortread_qc_includeunmerged && !params.shortread_qc_mergepairs) exit 1, "ERROR: [nf-core/taxprofiler] cannot include unmerged reads when merging is not turned on. Please specify --shortread_qc_mergepairs"