mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-22 00:26:03 +00:00
Merge pull request #376 from nvnieuwk/vsc_ugent
Added the VSC UGent profile
This commit is contained in:
commit
817f1a4aa7
5 changed files with 155 additions and 0 deletions
1
.github/workflows/main.yml
vendored
1
.github/workflows/main.yml
vendored
|
@ -86,6 +86,7 @@ jobs:
|
||||||
- "utd_sysbio"
|
- "utd_sysbio"
|
||||||
- "uzh"
|
- "uzh"
|
||||||
- "vai"
|
- "vai"
|
||||||
|
- "vsc_ugent"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Install Nextflow
|
- name: Install Nextflow
|
||||||
|
|
|
@ -139,6 +139,7 @@ Currently documentation is available for the following systems:
|
||||||
- [UTD_SYSBIO](docs/utd_sysbio.md)
|
- [UTD_SYSBIO](docs/utd_sysbio.md)
|
||||||
- [UZH](docs/uzh.md)
|
- [UZH](docs/uzh.md)
|
||||||
- [VAI](docs/vai.md)
|
- [VAI](docs/vai.md)
|
||||||
|
- [VSC_UGENT](docs/vsc_ugent.md)
|
||||||
|
|
||||||
### Uploading to `nf-core/configs`
|
### Uploading to `nf-core/configs`
|
||||||
|
|
||||||
|
|
115
conf/vsc_ugent.config
Normal file
115
conf/vsc_ugent.config
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
// Specify the work directory
|
||||||
|
workDir = "$VSC_SCRATCH_VO_USER/work"
|
||||||
|
|
||||||
|
// Perform work directory cleanup when the run has succesfully completed
|
||||||
|
cleanup = true
|
||||||
|
|
||||||
|
// Reduce the job submit rate to about 10 per second, this way the server won't be bombarded with jobs
|
||||||
|
executor {
|
||||||
|
submitRateLimit = '10 sec'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Specify that singularity should be used and where the cache dir will be for the images
|
||||||
|
singularity {
|
||||||
|
enabled = true
|
||||||
|
autoMounts = true
|
||||||
|
cacheDir = "$VSC_SCRATCH_VO_USER/singularity"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define profiles for each cluster
|
||||||
|
profiles {
|
||||||
|
skitty {
|
||||||
|
params {
|
||||||
|
config_profile_description = 'HPC_SKITTY profile for use on the Skitty cluster of the VSC HPC.'
|
||||||
|
config_profile_contact = 'Nicolas Vannieuwkerke (@nvnieuwk)'
|
||||||
|
config_profile_url = 'https://www.ugent.be/hpc/en'
|
||||||
|
max_memory = 177.GB
|
||||||
|
max_cpus = 36
|
||||||
|
max_time = 72.h
|
||||||
|
}
|
||||||
|
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
queue = 'skitty'
|
||||||
|
maxRetries = 2
|
||||||
|
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
||||||
|
scratch = "$VSC_SCRATCH_VO_USER"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
swalot {
|
||||||
|
params {
|
||||||
|
config_profile_description = 'HPC_SWALOT profile for use on the Swalot cluster of the VSC HPC.'
|
||||||
|
config_profile_contact = 'Nicolas Vannieuwkerke (@nvnieuwk)'
|
||||||
|
config_profile_url = 'https://www.ugent.be/hpc/en'
|
||||||
|
max_memory = 116.GB
|
||||||
|
max_cpus = 20
|
||||||
|
max_time = 72.h
|
||||||
|
}
|
||||||
|
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
queue = 'swalot'
|
||||||
|
maxRetries = 2
|
||||||
|
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
||||||
|
scratch = "$VSC_SCRATCH_VO_USER"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
victini {
|
||||||
|
params {
|
||||||
|
config_profile_description = 'HPC_VICTINI profile for use on the Victini cluster of the VSC HPC.'
|
||||||
|
config_profile_contact = 'Nicolas Vannieuwkerke (@nvnieuwk)'
|
||||||
|
config_profile_url = 'https://www.ugent.be/hpc/en'
|
||||||
|
max_memory = 88.GB
|
||||||
|
max_cpus = 36
|
||||||
|
max_time = 72.h
|
||||||
|
}
|
||||||
|
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
queue = 'victini'
|
||||||
|
maxRetries = 2
|
||||||
|
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
||||||
|
scratch = "$VSC_SCRATCH_VO_USER"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
kirlia {
|
||||||
|
params {
|
||||||
|
config_profile_description = 'HPC_KIRLIA profile for use on the Kirlia cluster of the VSC HPC.'
|
||||||
|
config_profile_contact = 'Nicolas Vannieuwkerke (@nvnieuwk)'
|
||||||
|
config_profile_url = 'https://www.ugent.be/hpc/en'
|
||||||
|
max_memory = 738.GB
|
||||||
|
max_cpus = 36
|
||||||
|
max_time = 72.h
|
||||||
|
}
|
||||||
|
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
queue = 'kirlia'
|
||||||
|
maxRetries = 2
|
||||||
|
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
||||||
|
scratch = "$VSC_SCRATCH_VO_USER"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
doduo {
|
||||||
|
params {
|
||||||
|
config_profile_description = 'HPC_DODUO profile for use on the Doduo cluster of the VSC HPC.'
|
||||||
|
config_profile_contact = 'Nicolas Vannieuwkerke (@nvnieuwk)'
|
||||||
|
config_profile_url = 'https://www.ugent.be/hpc/en'
|
||||||
|
max_memory = 250.GB
|
||||||
|
max_cpus = 96
|
||||||
|
max_time = 72.h
|
||||||
|
}
|
||||||
|
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
queue = 'doduo'
|
||||||
|
maxRetries = 2
|
||||||
|
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
||||||
|
scratch = "$VSC_SCRATCH_VO_USER"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
37
docs/vsc_ugent.md
Normal file
37
docs/vsc_ugent.md
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
# nf-core/configs: University of Ghent High Performance Computing Infrastructure (VSC)
|
||||||
|
|
||||||
|
> **NB:** You will need an [account](https://www.ugent.be/hpc/en/access/faq/access) to use the HPC cluster to run the pipeline.
|
||||||
|
|
||||||
|
First you should go to the cluster you want to run the pipeline on. You can check what clusters have the most free space on this [link](https://shieldon.ugent.be:8083/pbsmon-web-users/). Use the following commands to easily switch between clusters:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
module purge
|
||||||
|
module swap cluster/<CLUSTER>
|
||||||
|
```
|
||||||
|
|
||||||
|
Before running the pipeline you will need to create a PBS script to submit as a job.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
module load Nextflow
|
||||||
|
|
||||||
|
nextflow run <pipeline> -profile vsc_ugent,<CLUSTER> <Add your other parameters>
|
||||||
|
```
|
||||||
|
|
||||||
|
I also highly recommend specifying a location of a Singularity cache directory, by specifying the location with the `$SINGULARITY_CACHEDIR` bash environment variable in your `.bash_profile` or `.bashrc` or by adding it to your PBS script. If this cache directory is not specified,
|
||||||
|
|
||||||
|
All of the intermediate files required to run the pipeline will be stored in the `work/` directory. It is recommended to delete this directory after the pipeline has finished successfully because it can get quite large, and all of the main output files will be saved in the `results/` directory anyway.
|
||||||
|
The config contains a `cleanup` command that removes the `work/` directory automatically once the pipeline has completed successfully. If the run does not complete successfully then the `work/` dir should be removed manually to save storage space. The default work directory is set to `$VSC_SCRATCH_VO_USER/work` per this configuration
|
||||||
|
|
||||||
|
You can also add several TORQUE options to the PBS script. More about this on this [link](http://hpcugent.github.io/vsc_user_docs/pdf/intro-HPC-linux-gent.pdf#appendix.B).
|
||||||
|
|
||||||
|
To submit your job to the cluster by using the following command:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
qsub <script name>.pbs
|
||||||
|
```
|
||||||
|
|
||||||
|
> **NB:** The profile only works for the clusters `skitty`, `swalot`, `victini`, `kirlia` and `doduo`.
|
||||||
|
|
||||||
|
> **NB:** The default directory where the `work/` and `singularity/` (cache directory for images) is located in `$VSC_SCRATCH_VO_USER`.
|
|
@ -68,4 +68,5 @@ profiles {
|
||||||
utd_sysbio { includeConfig "${params.custom_config_base}/conf/utd_sysbio.config" }
|
utd_sysbio { includeConfig "${params.custom_config_base}/conf/utd_sysbio.config" }
|
||||||
uzh { includeConfig "${params.custom_config_base}/conf/uzh.config" }
|
uzh { includeConfig "${params.custom_config_base}/conf/uzh.config" }
|
||||||
vai { includeConfig "${params.custom_config_base}/conf/vai.config" }
|
vai { includeConfig "${params.custom_config_base}/conf/vai.config" }
|
||||||
|
vsc_ugent { includeConfig "${params.custom_config_base}/conf/vsc_ugent.config" }
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue