1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-10 23:23:09 +00:00
taxprofiler/subworkflows/local/db_check.nf

53 lines
1.5 KiB
Text
Raw Normal View History

//
// Check input samplesheet and get read channels
//
include { DATABASE_CHECK } from '../../modules/local/database_check'
2022-03-21 14:58:19 +00:00
include { UNTAR } from '../../modules/nf-core/modules/untar/main'
workflow DB_CHECK {
take:
dbsheet // file: /path/to/dbsheet.csv
main:
// TODO: make database sheet check
parsed_samplesheet = DATABASE_CHECK ( dbsheet )
.csv
.splitCsv ( header:true, sep:',' )
.dump(tag: "db_split_csv_out")
.map { create_db_channels(it) }
.dump(tag: "db_channel_prepped")
2022-03-21 14:07:59 +00:00
2022-03-21 18:54:51 +00:00
ch_dbs_for_untar = parsed_samplesheet
2022-03-21 14:07:59 +00:00
.branch {
2022-03-21 14:58:19 +00:00
untar: it[1].toString().endsWith(".tar.gz")
2022-03-21 14:07:59 +00:00
skip: true
}
2022-03-21 14:58:19 +00:00
// TODO Filter to only run UNTAR on DBs of tools actually using?
// TODO make optional whether to save
2022-03-21 14:07:59 +00:00
UNTAR ( ch_dbs_for_untar.untar )
2022-03-21 14:58:19 +00:00
ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar )
2022-03-21 14:07:59 +00:00
emit:
2022-03-21 14:07:59 +00:00
dbs = ch_final_dbs // channel: [ val(meta), [ db ] ]
versions = DATABASE_CHECK.out.versions // channel: [ versions.yml ]
}
def create_db_channels(LinkedHashMap row) {
def meta = [:]
meta.tool = row.tool
meta.db_name = row.db_name
meta.db_params = row.db_params
def array = []
if (!file(row.db_path, type: 'dir').exists()) {
exit 1, "ERROR: Please check input samplesheet -> database could not be found!\n${row.db_path}"
}
array = [ meta, file(row.db_path) ]
return array
}