1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-25 20:39:54 +00:00
taxprofiler/nextflow.config
2022-10-14 12:18:07 +02:00

326 lines
11 KiB
Text

/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
nf-core/taxprofiler Nextflow config file
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Default config options for all compute environments
----------------------------------------------------------------------------------------
*/
// Global default params, used in configs
params {
// TODO nf-core: Specify your pipeline's command line flags
// Input options
input = null
// References
genome = null
igenomes_base = 's3://ngi-igenomes/igenomes'
igenomes_ignore = false
// MultiQC options
multiqc_config = null
multiqc_title = null
multiqc_logo = null
max_multiqc_email_size = '25.MB'
multiqc_methods_description = null
// Boilerplate options
outdir = null
tracedir = "${params.outdir}/pipeline_info"
publish_dir_mode = 'copy'
email = null
email_on_fail = null
plaintext_email = false
monochrome_logs = false
hook_url = null
help = false
validate_params = true
show_hidden_params = false
schema_ignore_params = 'genomes,fasta'
enable_conda = false
// Config options
custom_config_version = 'master'
custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}"
config_profile_description = null
config_profile_contact = null
config_profile_url = null
config_profile_name = null
// Max resource options
// Defaults only, expecting to be overwritten
max_memory = '128.GB'
max_cpus = 16
max_time = '240.h'
// Databases
databases = null
// FASTQ preprocessing
perform_shortread_qc = false
shortread_qc_tool = 'fastp'
shortread_qc_skipadaptertrim = false
shortread_qc_mergepairs = true
shortread_qc_excludeunmerged = false
shortread_qc_adapter1 = null
shortread_qc_adapter2 = null
shortread_qc_minlength = 15
perform_longread_qc = false
longread_qc_skipadaptertrim = false
longread_qc_skipqualityfilter = false
longread_qc_qualityfilter_minlength = 1000
longread_qc_qualityfilter_keeppercent = 90
longread_qc_qualityfilter_targetbases = 500000000
save_preprocessed_reads = false
// Complexity filtering
perform_shortread_complexityfilter = false
shortread_complexityfilter_tool = 'bbduk'
shortread_complexityfilter_entropy = 0.3
shortread_complexityfilter_bbduk_windowsize = 50
shortread_complexityfilter_bbduk_mask = false
shortread_complexityfilter_prinseqplusplus_mode = 'entropy'
shortread_complexityfilter_prinseqplusplus_dustscore = 0.5
shortread_complexityfilter_fastp_threshold = 30
save_complexityfiltered_reads = false
// run merging
perform_runmerging = false
save_runmerged_reads = false
// Host Removal
perform_shortread_hostremoval = false
perform_longread_hostremoval = false
hostremoval_reference = null
shortread_hostremoval_index = null
longread_hostremoval_index = null
save_hostremoval_index = false
save_hostremoval_mapped = false
save_hostremoval_unmapped = false
// MALT
run_malt = false
malt_mode = 'BlastN'
malt_generate_megansummary = false
malt_save_reads = false // added via map + database args extension in profiling.nf
// kraken2
run_kraken2 = false
kraken2_save_reads = false // added directly to module in profiling.nf
kraken2_save_readclassification = false // added directly to module in profiling.nf
kraken2_save_minimizers = false
// Bracken
run_bracken = true
// centrifuge
run_centrifuge = false
centrifuge_save_reads = false // added directly to module in profiling.nf
// metaphlan3
run_metaphlan3 = false
// kaiju
run_kaiju = false
kaiju_taxon_rank = 'species'
// diamond
run_diamond = false
diamond_output_format = 'tsv' // TSV is only format with taxonomic information apparently
diamond_save_reads = false // this will override default diamond output format so no taxonomic profile is generated! added directly to module in profiling.nf
// mOTUs
run_motus = false
// krona
run_krona = false
krona_taxonomy_directory = null
// profile standardisation
run_profile_standardisation = false
generate_biom_output = false
}
// Load base.config by default for all pipelines
includeConfig 'conf/base.config'
// Load nf-core custom profiles from different Institutions
try {
includeConfig "${params.custom_config_base}/nfcore_custom.config"
} catch (Exception e) {
System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")
}
// Load nf-core/taxprofiler custom profiles from different institutions.
// Warning: Uncomment only if a pipeline-specific instititutional config already exists on nf-core/configs!
try {
includeConfig "${params.custom_config_base}/pipeline/taxprofiler.config"
} catch (Exception e) {
System.err.println("WARNING: Could not load nf-core/config/taxprofiler profiles: ${params.custom_config_base}/pipeline/taxprofiler.config")
}
profiles {
debug { process.beforeScript = 'echo $HOSTNAME' }
conda {
params.enable_conda = true
docker.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
}
mamba {
params.enable_conda = true
conda.useMamba = true
docker.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
}
docker {
docker.enabled = true
docker.userEmulation = true
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
}
singularity {
singularity.enabled = true
singularity.autoMounts = true
docker.enabled = false
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
}
podman {
podman.enabled = true
docker.enabled = false
singularity.enabled = false
shifter.enabled = false
charliecloud.enabled = false
}
shifter {
shifter.enabled = true
docker.enabled = false
singularity.enabled = false
podman.enabled = false
charliecloud.enabled = false
}
charliecloud {
charliecloud.enabled = true
docker.enabled = false
singularity.enabled = false
podman.enabled = false
shifter.enabled = false
}
gitpod {
executor.name = 'local'
executor.cpus = 16
executor.memory = 60.GB
}
test { includeConfig 'conf/test.config' }
test_full { includeConfig 'conf/test_full.config' }
test_noprofiling { includeConfig 'conf/test_noprofiling.config' }
test_nopreprocessing { includeConfig 'conf/test_nopreprocessing.config' }
test_nothing { includeConfig 'conf/test_nothing.config' }
test_motus { includeConfig 'conf/test_motus.config' }
test_pep { includeConfig 'conf/test_pep.config' }
}
// Load igenomes.config if required
if (!params.igenomes_ignore) {
includeConfig 'conf/igenomes.config'
} else {
params.genomes = [:]
}
// Export these variables to prevent local Python/R libraries from conflicting with those in the container
// The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container.
// See https://apeltzer.github.io/post/03-julia-lang-nextflow/ for details on that. Once we have a common agreement on where to keep Julia packages, this is adjustable.
env {
PYTHONNOUSERSITE = '1'
R_PROFILE_USER = "/.Rprofile"
R_ENVIRON_USER = "/.Renviron"
JULIA_DEPOT_PATH = "/usr/local/share/julia"
}
// Capture exit codes from upstream processes when piping
process.shell = ['/bin/bash', '-euo', 'pipefail']
def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
timeline {
enabled = true
file = "${params.tracedir}/execution_timeline_${trace_timestamp}.html"
}
report {
enabled = true
file = "${params.tracedir}/execution_report_${trace_timestamp}.html"
}
trace {
enabled = true
file = "${params.tracedir}/execution_trace_${trace_timestamp}.txt"
}
dag {
enabled = true
file = "${params.tracedir}/pipeline_dag_${trace_timestamp}.html"
}
manifest {
name = 'nf-core/taxprofiler'
author = 'nf-core community'
homePage = 'https://github.com/nf-core/taxprofiler'
description = 'Taxonomic profiling of shotgun metagenomic data'
mainScript = 'main.nf'
nextflowVersion = '!>=21.10.3'
version = '1.0dev'
doi = ''
}
// Load modules.config for DSL2 module specific options
includeConfig 'conf/modules.config'
// Function to ensure that resource requirements don't go beyond
// a maximum limit
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min( obj, params.max_cpus as int )
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}