1
0
Fork 0
mirror of https://github.com/MillironX/nf-configs.git synced 2024-11-24 09:09:56 +00:00

Merge pull request #200 from nf-core/ceh-config

Seg_globe config
This commit is contained in:
James A. Fellows Yates 2021-01-21 13:24:26 +01:00 committed by GitHub
commit 5ba1b01c0c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 51 additions and 1 deletions

View file

@ -16,7 +16,7 @@ jobs:
needs: test_all_profiles needs: test_all_profiles
strategy: strategy:
matrix: matrix:
profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'imperial', 'imperial_mb', 'kraken', 'mpcdf', 'munin', 'pasteur', 'phoenix', 'prince', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh'] profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'imperial', 'imperial_mb', 'kraken', 'mpcdf', 'munin', 'pasteur', 'phoenix', 'prince', 'seg_globe', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh']
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: Install Nextflow - name: Install Nextflow

View file

@ -119,6 +119,7 @@ Currently documentation is available for the following systems:
* [PASTEUR](docs/pasteur.md) * [PASTEUR](docs/pasteur.md)
* [PHOENIX](docs/phoenix.md) * [PHOENIX](docs/phoenix.md)
* [PRINCE](docs/prince.md) * [PRINCE](docs/prince.md)
* [SEG_GLOBE](docs/seg_globe.md)
* [SHH](docs/shh.md) * [SHH](docs/shh.md)
* [UCT_HPC](docs/uct_hpc.md) * [UCT_HPC](docs/uct_hpc.md)
* [UPPMAX](docs/uppmax.md) * [UPPMAX](docs/uppmax.md)

27
conf/seg_globe.config Normal file
View file

@ -0,0 +1,27 @@
//Profile config names for nf-core/configs
params {
config_profile_description = 'Section for Evolutionary Genomics @ GLOBE, University of Copenhagen - seg_globe profile provided by nf-core/configs.'
config_profile_contact = 'Aashild Vaagene (@ashildv)'
config_profile_url = 'https://globe.ku.dk/research/evogenomics/'
max_memory = 250.GB
max_cpus = 35
max_time = 720.h
}
singularity {
enabled = true
autoMounts = true
cacheDir = '/shared/volume/hologenomics/data/cache/nf-eager/singularity'
}
process {
executor = 'slurm'
queue = { task.time < 24.h ? 'hologenomics-short' : task.time < 168.h ? 'hologenomics' : 'hologenomics-long' }
}
cleanup = true
executor {
queueSize = 8
}

21
docs/seg_globe.md Normal file
View file

@ -0,0 +1,21 @@
# nf-core/configs: Section for Evolutionary Genomics at GLOBE, Univeristy of Copenhagen (hologenomics partition on HPC) Configuration
> **NB:** You will need an account to use the HPC cluster to run the pipeline. If in doubt contact IT.
The profile is configured to run with Singularity version 3.6.3-1.el7 which is part of the OS installtion and does not need to be loaded as a module.
Before running the pipeline you will need to load Java, miniconda and Nextflow. You can do this by including the commands below in your SLURM/sbatch script:
```bash
## Load Java and Nextflow environment modules
module purge
module load lib
module load java/v1.8.0_202-jdk miniconda nextflow/v20.07.1.5412
```
All of the intermediate files required to run the pipeline will be stored in the `work/` directory. It is recommended to delete this directory after the pipeline has finished successfully because it can get quite large, and all of the main output files will be saved in the `results/` directory anyway.
The config contains a `cleanup` command that removes the `work/` directory automatically once the pipeline has completeed successfully. If the run does not complete successfully then the `work/` dir should be removed manually to save storage space.
This configuration will automatically choose the correct SLURM queue (short,medium,long) depending on the time and memory required by each process.
> **NB:** Nextflow will need to submit the jobs via SLURM to the HPC cluster and as such the commands above will have to be submitted from one of the login nodes.

View file

@ -38,6 +38,7 @@ profiles {
pasteur { includeConfig "${params.custom_config_base}/conf/pasteur.config" } pasteur { includeConfig "${params.custom_config_base}/conf/pasteur.config" }
phoenix { includeConfig "${params.custom_config_base}/conf/phoenix.config" } phoenix { includeConfig "${params.custom_config_base}/conf/phoenix.config" }
prince { includeConfig "${params.custom_config_base}/conf/prince.config" } prince { includeConfig "${params.custom_config_base}/conf/prince.config" }
seg_globe { includeConfig "${params.custom_config_base}/conf/seg_globe.config"}
shh { includeConfig "${params.custom_config_base}/conf/shh.config" } shh { includeConfig "${params.custom_config_base}/conf/shh.config" }
uct_hpc { includeConfig "${params.custom_config_base}/conf/uct_hpc.config" } uct_hpc { includeConfig "${params.custom_config_base}/conf/uct_hpc.config" }
uppmax { includeConfig "${params.custom_config_base}/conf/uppmax.config" } uppmax { includeConfig "${params.custom_config_base}/conf/uppmax.config" }