1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-11 02:23:09 +00:00
taxprofiler/subworkflows/local/db_check.nf

54 lines
1.5 KiB
Text
Raw Normal View History

//
// Check input samplesheet and get read channels
//
2022-05-23 12:05:06 +00:00
include { DATABASE_CHECK } from '../../modules/local/database_check'
include { UNTAR } from '../../modules/nf-core/untar/main'
workflow DB_CHECK {
take:
dbsheet // file: /path/to/dbsheet.csv
main:
// TODO: make database sheet check
2022-03-26 20:22:35 +00:00
// Checks:
// 1) no duplicates,
// 2) args do not have quotes, e.g. just `,,` and NOT `,"",`
parsed_samplesheet = DATABASE_CHECK ( dbsheet )
.csv
.splitCsv ( header:true, sep:',' )
.map { create_db_channels(it) }
2022-03-21 14:07:59 +00:00
2022-03-21 18:54:51 +00:00
ch_dbs_for_untar = parsed_samplesheet
2022-03-21 14:07:59 +00:00
.branch {
2022-04-04 11:51:51 +00:00
untar: it[1].toString().endsWith(".tar.gz")
2022-03-21 14:07:59 +00:00
skip: true
}
2022-03-21 14:58:19 +00:00
// TODO Filter to only run UNTAR on DBs of tools actually using?
// TODO make optional whether to save
2022-03-21 14:07:59 +00:00
UNTAR ( ch_dbs_for_untar.untar )
2022-03-21 14:58:19 +00:00
ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar )
2022-03-21 14:07:59 +00:00
emit:
2022-03-21 14:07:59 +00:00
dbs = ch_final_dbs // channel: [ val(meta), [ db ] ]
2022-04-23 06:36:01 +00:00
versions = DATABASE_CHECK.out.versions.mix(UNTAR.out.versions.first()) // channel: [ versions.yml ]
}
def create_db_channels(LinkedHashMap row) {
def meta = [:]
meta.tool = row.tool
meta.db_name = row.db_name
meta.db_params = row.db_params
def array = []
if (!file(row.db_path, type: 'dir').exists()) {
exit 1, "ERROR: Please check input samplesheet -> database could not be found!\n${row.db_path}"
}
array = [ meta, file(row.db_path) ]
return array
}