1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-29 14:49:54 +00:00
taxprofiler/subworkflows/local/db_check.nf

87 lines
3.5 KiB
Text
Raw Normal View History

//
// Check input samplesheet and get read channels
//
include { UNTAR } from '../../modules/nf-core/untar/main'
workflow DB_CHECK {
take:
dbsheet // file: /path/to/dbsheet.csv
main:
ch_versions = Channel.empty()
2022-12-12 10:03:40 +00:00
// special check to check _between_ rows, for which we must group rows together
2022-12-12 10:04:21 +00:00
// note: this will run in parallel to within-row validity, but we can assume this will run faster thus will fail first
2022-12-12 10:03:40 +00:00
Channel.fromPath(dbsheet)
.splitCsv ( header:true, sep:',' )
.map {[it.tool, it.db_name] }
.groupTuple()
.map {
tool, db_name ->
def unique_names = db_name.unique(false)
if ( unique_names.size() < db_name.size() ) exit 1, "[nf-core/taxprofiler] ERROR: Each database for a tool must have a unique name, duplicated detected. Tool: ${tool}, Database name: ${unique_names}"
}
// normal checks for within-row validity, so can be moved to separate functions
parsed_samplesheet = Channel.fromPath(dbsheet)
.splitCsv ( header:true, sep:',' )
.map {
2022-12-12 10:03:40 +00:00
validate_db_rows(it)
create_db_channels(it)
}
2022-03-21 14:07:59 +00:00
2022-03-21 18:54:51 +00:00
ch_dbs_for_untar = parsed_samplesheet
2022-03-21 14:07:59 +00:00
.branch {
2022-04-04 11:51:51 +00:00
untar: it[1].toString().endsWith(".tar.gz")
2022-03-21 14:07:59 +00:00
skip: true
}
2022-03-21 14:58:19 +00:00
// TODO Filter to only run UNTAR on DBs of tools actually using?
// TODO make optional whether to save
2022-03-21 14:07:59 +00:00
UNTAR ( ch_dbs_for_untar.untar )
ch_versions = ch_versions.mix(UNTAR.out.versions.first())
2022-03-21 14:07:59 +00:00
2022-03-21 14:58:19 +00:00
ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar )
2022-03-21 14:07:59 +00:00
emit:
2022-03-21 14:07:59 +00:00
dbs = ch_final_dbs // channel: [ val(meta), [ db ] ]
versions = ch_versions // channel: [ versions.yml ]
}
2022-12-12 10:03:40 +00:00
def validate_db_rows(LinkedHashMap row){
// check minimum number of columns
2022-12-12 10:03:40 +00:00
if (row.size() < 4) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database input sheet - malformed row (e.g. missing column). See documentation for more information. Error in: ${row}"
// all columns there
def expected_headers = ['tool', 'db_name', 'db_params', 'db_path']
2022-12-12 10:03:40 +00:00
if ( !row.keySet().containsAll(expected_headers) ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database input sheet - malformed column names. Please check input TSV. Column names should be: ${expected_keys.join(", ")}"
// valid tools specified// TIFNISIH LIST
2022-12-12 10:03:40 +00:00
def expected_tools = [ "bracken", "centrifuge", "diamond", "kaiju", "kraken2", "krakenuniq", "malt", "metaphlan3", "motus" ]
if ( !expected_tools.contains(row.tool) ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid tool name. Please see documentation for all supported profilers. Error in: ${row}"
// detect quotes in params
2022-12-12 10:03:40 +00:00
if ( row.db_params.contains('"') ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database db_params entry. No quotes allowed. Error in: ${row}"
if ( row.db_params.contains("'") ) exit 1, "[nf-core/taxprofiler] ERROR: Invalid database db_params entry. No quotes allowed. Error in: ${row}"
}
def create_db_channels(LinkedHashMap row) {
def meta = [:]
meta.tool = row.tool
meta.db_name = row.db_name
meta.db_params = row.db_params
def array = []
if (!file(row.db_path, type: 'dir').exists()) {
2022-12-12 10:03:40 +00:00
exit 1, "ERROR: Please check input samplesheet -> database path could not be found!\n${row.db_path}"
}
array = [ meta, file(row.db_path) ]
return array
}