mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-22 00:26:03 +00:00
Merge pull request #279 from pierrespc/master
Added Configuration files for running in Maestro HPC at Pasteur Institute, Paris
This commit is contained in:
commit
73b6b74c69
8 changed files with 218 additions and 0 deletions
1
.github/workflows/main.yml
vendored
1
.github/workflows/main.yml
vendored
|
@ -49,6 +49,7 @@ jobs:
|
||||||
- 'imperial_mb'
|
- 'imperial_mb'
|
||||||
- 'jax'
|
- 'jax'
|
||||||
- 'lugh'
|
- 'lugh'
|
||||||
|
- 'maestro'
|
||||||
- 'mpcdf'
|
- 'mpcdf'
|
||||||
- 'munin'
|
- 'munin'
|
||||||
- 'nu_genomics'
|
- 'nu_genomics'
|
||||||
|
|
|
@ -118,6 +118,7 @@ Currently documentation is available for the following systems:
|
||||||
* [ICR_DAVROS](docs/icr_davros.md)
|
* [ICR_DAVROS](docs/icr_davros.md)
|
||||||
* [JAX](docs/jax.md)
|
* [JAX](docs/jax.md)
|
||||||
* [LUGH](docs/lugh.md)
|
* [LUGH](docs/lugh.md)
|
||||||
|
* [MAESTRO](docs/maestro.md)
|
||||||
* [MPCDF](docs/mpcdf.md)
|
* [MPCDF](docs/mpcdf.md)
|
||||||
* [MUNIN](docs/munin.md)
|
* [MUNIN](docs/munin.md)
|
||||||
* [NU_GENOMICS](docs/nu_genomics.md)
|
* [NU_GENOMICS](docs/nu_genomics.md)
|
||||||
|
|
49
conf/maestro.config
Normal file
49
conf/maestro.config
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
params {
|
||||||
|
config_profile_description = 'Institut Pasteur Maestro cluster profile'
|
||||||
|
config_profile_url = 'https://research.pasteur.fr/en/equipment/maestro-compute-cluster/'
|
||||||
|
config_profile_contact = 'Pierre Luisi (@pierrespc)'
|
||||||
|
}
|
||||||
|
|
||||||
|
singularity {
|
||||||
|
enabled = true
|
||||||
|
autoMounts = true
|
||||||
|
runOptions = '--home $HOME:/home/$USER --bind /pasteur'
|
||||||
|
}
|
||||||
|
|
||||||
|
profiles {
|
||||||
|
|
||||||
|
normal {
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
scratch = false
|
||||||
|
queue = 'common'
|
||||||
|
clusterOptions = '--qos=normal'
|
||||||
|
}
|
||||||
|
|
||||||
|
params {
|
||||||
|
igenomes_ignore = true
|
||||||
|
igenomesIgnore = true
|
||||||
|
max_memory = 400.GB
|
||||||
|
max_cpus = 96
|
||||||
|
max_time = 24.h
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
long {
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
scratch = false
|
||||||
|
queue = 'common'
|
||||||
|
clusterOptions = '--qos=long'
|
||||||
|
}
|
||||||
|
|
||||||
|
params {
|
||||||
|
igenomes_ignore = true
|
||||||
|
igenomesIgnore = true
|
||||||
|
max_memory = 400.GB
|
||||||
|
max_cpus = 5
|
||||||
|
max_time = 8760.h
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
116
conf/pipeline/eager/maestro.config
Normal file
116
conf/pipeline/eager/maestro.config
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
/*
|
||||||
|
* -------------------------------------------------
|
||||||
|
* Nextflow config file for running nf-core eager on whole genome data or mitogenomes
|
||||||
|
* -------------------------------------------------
|
||||||
|
* nextflow run nf-core/eager -profile maestro,<qos>,maestro,<genome> (where <qos> is long or normal and <genome> is nuclear, mitocondrial or unlimitedtime)
|
||||||
|
*/
|
||||||
|
|
||||||
|
params {
|
||||||
|
|
||||||
|
config_profile_name = 'nf-core/eager nuclear/mitocondrial - human profiles'
|
||||||
|
|
||||||
|
config_profile_description = "Simple profiles for assessing computational ressources that fit human nuclear dna, human mitogenomes processing. unlimitedtime is also available "
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
profiles {
|
||||||
|
|
||||||
|
nuclear {
|
||||||
|
process {
|
||||||
|
errorStrategy = 'retry'
|
||||||
|
maxRetries = 2
|
||||||
|
|
||||||
|
withName:'makeBWAIndex'{
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'adapter_removal'{
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'bwa'{
|
||||||
|
cpus = { check_max( 40 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 40.GB * task.attempt, 'memory' ) }
|
||||||
|
time = 24.h
|
||||||
|
cache = 'deep'
|
||||||
|
}
|
||||||
|
withName:'markduplicates'{
|
||||||
|
errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' }
|
||||||
|
cpus = { check_max( 16 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'damageprofiler'{
|
||||||
|
cpus = 1
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'fastp'{
|
||||||
|
cpus = 8
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'fastqc'{
|
||||||
|
cpus = 2
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mitocondrial {
|
||||||
|
process {
|
||||||
|
errorStrategy = 'retry'
|
||||||
|
maxRetries = 2
|
||||||
|
|
||||||
|
withName:'makeBWAIndex'{
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'adapter_removal'{
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'bwa'{
|
||||||
|
cpus = { check_max( 5 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 5.GB * task.attempt, 'memory' ) }
|
||||||
|
time = 24.h
|
||||||
|
}
|
||||||
|
withName:'markduplicates'{
|
||||||
|
errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' }
|
||||||
|
cpus = { check_max( 5 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 5.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'damageprofiler'{
|
||||||
|
cpus = 1
|
||||||
|
memory = { check_max( 5.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 3.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'fastp'{
|
||||||
|
cpus = 8
|
||||||
|
memory = { check_max( 5.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 3.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
withName:'fastqc'{
|
||||||
|
cpus = 2
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unlimitedtime {
|
||||||
|
process {
|
||||||
|
errorStrategy = 'finish'
|
||||||
|
|
||||||
|
cpus = 5
|
||||||
|
memory = 200.GB
|
||||||
|
time = 8760.h
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
19
docs/maestro.md
Normal file
19
docs/maestro.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# nf-core/configs Maestro (at Pateur Institute, Paris) Configuration
|
||||||
|
|
||||||
|
To use, run the pipeline with `-profile maestro,<qos>` (with qos being long or normal). This will download and launch the maestro.config which has been pre-configured with a setup suitable for the Maestro cluster on either the long or normal qos.
|
||||||
|
Using one of these profiles, a docker image containing all of the required software will be downloaded, and converted to a Singularity image before execution of the pipeline
|
||||||
|
|
||||||
|
## needed Modules
|
||||||
|
|
||||||
|
Please first load java, nextflow and singularity modules
|
||||||
|
`module load java`
|
||||||
|
`module load nextflow`
|
||||||
|
`module load singularity`
|
||||||
|
|
||||||
|
Also, do not forget to run nextflow using tmux or alike.
|
||||||
|
|
||||||
|
## Other profiles at Pasteur
|
||||||
|
|
||||||
|
If you are using TARS cluster, please refer to pasteur profile.
|
||||||
|
|
||||||
|
Please refer to docs/pasteur.md for installing and running nf-core instructions.
|
30
docs/pipeline/eager/maestro.md
Normal file
30
docs/pipeline/eager/maestro.md
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# nf-core/configs maestro eager specific configuration
|
||||||
|
|
||||||
|
Extra specific configuration for eager pipeline for human DNA data processing
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To use, run the pipeline with `-profile maestro,<qos>,<type>`, where qos can be normal or long and type can be nuclear or mitochondrial
|
||||||
|
|
||||||
|
This will download and launch the eager specific [`maestro.config`](../../../conf/pipeline/eager/maestro.config) which has been pre-configured with a setup suitable for the Maestro cluster.
|
||||||
|
|
||||||
|
Example: `nextflow run nf-core/eager -profile maestro,normal,nuclear`
|
||||||
|
|
||||||
|
## eager specific configurations for maestro
|
||||||
|
|
||||||
|
Specific configurations for maestro has been made for eager.
|
||||||
|
|
||||||
|
We decided not to provide any tool parameters here, and focus the profile only for resource management: Maestro profiles runs with default nf-core/eager parameters, but with modifications concerning time (limit to 24h in normal qos, so increasing the memory and CPUs, specially for alignments).
|
||||||
|
|
||||||
|
## nuclear
|
||||||
|
|
||||||
|
Increases the number of CPUs and the amount of memory for key processes
|
||||||
|
|
||||||
|
## mitochondrial
|
||||||
|
|
||||||
|
More limited computational resources
|
||||||
|
|
||||||
|
## unlimitedtime
|
||||||
|
|
||||||
|
Every process has one year time limit. To be used only when some processes can not be completed for time reasons when using mitochondrial or nuclear profiles.
|
||||||
|
Expect slow processes when using this profile because only 5 CPUs are available at a time.
|
|
@ -43,6 +43,7 @@ profiles {
|
||||||
imperial_mb { includeConfig "${params.custom_config_base}/conf/imperial_mb.config" }
|
imperial_mb { includeConfig "${params.custom_config_base}/conf/imperial_mb.config" }
|
||||||
jax { includeConfig "${params.custom_config_base}/conf/jax.config" }
|
jax { includeConfig "${params.custom_config_base}/conf/jax.config" }
|
||||||
lugh { includeConfig "${params.custom_config_base}/conf/lugh.config" }
|
lugh { includeConfig "${params.custom_config_base}/conf/lugh.config" }
|
||||||
|
maestro { includeConfig "${params.custom_config_base}/conf/maestro.config" }
|
||||||
mpcdf { includeConfig "${params.custom_config_base}/conf/mpcdf.config" }
|
mpcdf { includeConfig "${params.custom_config_base}/conf/mpcdf.config" }
|
||||||
munin { includeConfig "${params.custom_config_base}/conf/munin.config" }
|
munin { includeConfig "${params.custom_config_base}/conf/munin.config" }
|
||||||
nu_genomics { includeConfig "${params.custom_config_base}/conf/nu_genomics.config" }
|
nu_genomics { includeConfig "${params.custom_config_base}/conf/nu_genomics.config" }
|
||||||
|
|
|
@ -11,4 +11,5 @@
|
||||||
profiles {
|
profiles {
|
||||||
mpcdf { includeConfig "${params.custom_config_base}/conf/pipeline/eager/mpcdf.config" }
|
mpcdf { includeConfig "${params.custom_config_base}/conf/pipeline/eager/mpcdf.config" }
|
||||||
eva { includeConfig "${params.custom_config_base}/conf/pipeline/eager/eva.config" }
|
eva { includeConfig "${params.custom_config_base}/conf/pipeline/eager/eva.config" }
|
||||||
|
maestro { includeConfig "${params.custom_config_base}/conf/pipeline/eager/maestro.config" }
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue