mirror of
https://github.com/MillironX/taxprofiler.git
synced 2024-11-10 23:13:09 +00:00
Merge branch 'dev' into allow-targz-motus
This commit is contained in:
commit
c765b3f0bc
4 changed files with 7 additions and 3 deletions
|
@ -230,7 +230,7 @@ process {
|
||||||
path: { "${params.outdir}/minimap2/index" },
|
path: { "${params.outdir}/minimap2/index" },
|
||||||
mode: params.publish_dir_mode,
|
mode: params.publish_dir_mode,
|
||||||
enabled: params.save_hostremoval_index,
|
enabled: params.save_hostremoval_index,
|
||||||
pattern: 'minimap2'
|
pattern: '*.mmi'
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,6 +64,8 @@ ERR3201952,ERR3201952,OXFORD_NANOPORE,/<path>/<to>/fastq/ERR3201952.fastq.gz,,
|
||||||
|
|
||||||
> ⚠️ Input FASTQ and FASTA files _must_ be gzipped
|
> ⚠️ Input FASTQ and FASTA files _must_ be gzipped
|
||||||
|
|
||||||
|
> ⚠️ While one can include both short-read and long-read data in one run, we recommend that you split these across _two_ pipeline runs and database sheets (see below). This will allow classification optimisation for each data type, and make MultiQC run-reports more readable (due to run statistics having vary large number differences).
|
||||||
|
|
||||||
| Column | Description |
|
| Column | Description |
|
||||||
| --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
| --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| `sample` | Unique sample name [required]. |
|
| `sample` | Unique sample name [required]. |
|
||||||
|
|
|
@ -44,6 +44,7 @@ workflow DB_CHECK {
|
||||||
.filter {
|
.filter {
|
||||||
params["run_${it[0]['tool']}"]
|
params["run_${it[0]['tool']}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
UNTAR (ch_input_untar)
|
UNTAR (ch_input_untar)
|
||||||
ch_versions = ch_versions.mix(UNTAR.out.versions.first())
|
ch_versions = ch_versions.mix(UNTAR.out.versions.first())
|
||||||
ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar )
|
ch_final_dbs = ch_dbs_for_untar.skip.mix( UNTAR.out.untar )
|
||||||
|
|
|
@ -203,6 +203,8 @@ workflow PROFILING {
|
||||||
.filter{
|
.filter{
|
||||||
if (it[0].is_fasta) log.warn "[nf-core/taxprofiler] MetaPhlAn3 currently does not accept FASTA files as input. Skipping MetaPhlAn3 for sample ${it[0].id}."
|
if (it[0].is_fasta) log.warn "[nf-core/taxprofiler] MetaPhlAn3 currently does not accept FASTA files as input. Skipping MetaPhlAn3 for sample ${it[0].id}."
|
||||||
!it[0].is_fasta
|
!it[0].is_fasta
|
||||||
|
if (it[0].instrument_platform == 'OXFORD_NANOPORE') log.warn "[nf-core/taxprofiler] MetaPhlAn3 has not been evaluated for Nanopore data. Skipping MetaPhlAn3 for sample ${it[0].id}."
|
||||||
|
!it[0].instrument_platform == 'OXFORD_NANOPORE'
|
||||||
}
|
}
|
||||||
.multiMap {
|
.multiMap {
|
||||||
it ->
|
it ->
|
||||||
|
@ -277,14 +279,13 @@ workflow PROFILING {
|
||||||
[[id: db_meta.db_name, single_end: meta.single_end], reads, db_meta, db]
|
[[id: db_meta.db_name, single_end: meta.single_end], reads, db_meta, db]
|
||||||
}
|
}
|
||||||
.groupTuple(by: [0,2,3])
|
.groupTuple(by: [0,2,3])
|
||||||
.dump(tag: "krakenuniq_premultimap")
|
|
||||||
.multiMap {
|
.multiMap {
|
||||||
single_meta, reads, db_meta, db ->
|
single_meta, reads, db_meta, db ->
|
||||||
reads: [ single_meta + db_meta, reads.flatten() ]
|
reads: [ single_meta + db_meta, reads.flatten() ]
|
||||||
db: db
|
db: db
|
||||||
}
|
}
|
||||||
// Hardcode to _always_ produce the report file (which is our basic otput, and goes into)
|
// Hardcode to _always_ produce the report file (which is our basic otput, and goes into)
|
||||||
KRAKENUNIQ_PRELOADEDKRAKENUNIQ ( ch_input_for_krakenuniq.reads.dump(tag: "krakenuniq_input"), ch_input_for_krakenuniq.db.dump(tag: "krakenuniq_db"), params.krakenuniq_ram_chunk_size, params.krakenuniq_save_reads, true, params.krakenuniq_save_readclassifications )
|
KRAKENUNIQ_PRELOADEDKRAKENUNIQ ( ch_input_for_krakenuniq.reads, ch_input_for_krakenuniq.db, params.krakenuniq_ram_chunk_size, params.krakenuniq_save_reads, true, params.krakenuniq_save_readclassifications )
|
||||||
ch_multiqc_files = ch_multiqc_files.mix( KRAKENUNIQ_PRELOADEDKRAKENUNIQ.out.report )
|
ch_multiqc_files = ch_multiqc_files.mix( KRAKENUNIQ_PRELOADEDKRAKENUNIQ.out.report )
|
||||||
ch_versions = ch_versions.mix( KRAKENUNIQ_PRELOADEDKRAKENUNIQ.out.versions.first() )
|
ch_versions = ch_versions.mix( KRAKENUNIQ_PRELOADEDKRAKENUNIQ.out.versions.first() )
|
||||||
ch_raw_classifications = ch_raw_classifications.mix( KRAKENUNIQ_PRELOADEDKRAKENUNIQ.out.classified_assignment )
|
ch_raw_classifications = ch_raw_classifications.mix( KRAKENUNIQ_PRELOADEDKRAKENUNIQ.out.classified_assignment )
|
||||||
|
|
Loading…
Reference in a new issue