1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-09-21 05:32:05 +00:00

Update usage.md, reformat test.config and modules.config

This commit is contained in:
sofstam 2022-04-19 13:04:58 +02:00
parent 0e5c9e7bdd
commit 1b7ff2265d
3 changed files with 11 additions and 6 deletions

View file

@ -226,7 +226,7 @@ process {
pattern: '*.txt'
]
ext.args = { "${meta.db_params}" }
ext.prefix = { "${meta.id}-${meta.run_accession}-${meta.db_name}" }
ext.prefix = params.perform_runmerging ? { "${meta.id}-${meta.db_name}" } : { "${meta.id}-${meta.run_accession}-${meta.db_name}" }
}
withName: CUSTOM_DUMPSOFTWAREVERSIONS {
@ -252,6 +252,6 @@ process {
pattern: '*.tsv'
]
ext.args = { "${meta.db_params}" }
ext.prefix = { "${meta.id}-${meta.run_accession}-${meta.db_name}" }
ext.prefix = params.perform_runmerging ? { "${meta.id}-${meta.db_name}" } : { "${meta.id}-${meta.run_accession}-${meta.db_name}" }
}
}

View file

@ -24,14 +24,14 @@ params {
// TODO nf-core: Give any required params for the test so that command line flags are not needed
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/taxprofiler/samplesheet.csv'
databases = 'https://raw.githubusercontent.com/nf-core/test-datasets/taxprofiler/database.csv'
run_kraken2 = true
run_malt = true
run_metaphlan3 = true
run_centrifuge = true
perform_shortread_clipmerge = true
perform_longread_clip = false
perform_shortread_complexityfilter = true
perform_shortread_hostremoval = true
shortread_hostremoval_reference = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/genome.fasta'
run_kaiju = true
run_kraken2 = true
run_malt = true
run_metaphlan3 = true
run_centrifuge = true
}

View file

@ -86,6 +86,11 @@ Column specifications are as follows:
> 💡 You can also specify the same database directory/file twice (ensuring unique `db_name`s) and specify different parameters for each database to compare the effect of different parameters during profiling.
## Profilers
- `kaiju`
- The file `nodes.dmp` must be inside the database directory that is specified to `--databases` file
## Running the pipeline
The typical command for running the pipeline is as follows: