diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 50dc704..38adb48 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,7 +16,7 @@ jobs: needs: test_all_profiles strategy: matrix: - profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'eddie', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'ifb_core', 'imperial', 'imperial_mb', 'jax', 'kraken', 'mpcdf', 'munin', 'oist', 'pasteur', 'phoenix', 'prince', 'seg_globe', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh'] + profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'biohpc_gen', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'eddie', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'ifb_core', 'imperial', 'imperial_mb', 'jax', 'kraken', 'mpcdf', 'munin', 'oist', 'pasteur', 'phoenix', 'prince', 'seg_globe', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh'] steps: - uses: actions/checkout@v1 - name: Install Nextflow diff --git a/README.md b/README.md index 5616780..4217d62 100644 --- a/README.md +++ b/README.md @@ -99,6 +99,7 @@ Currently documentation is available for the following systems: * [BIGPURPLE](docs/bigpurple.md) * [BI](docs/bi.md) * [BINAC](docs/binac.md) +* [BIOHPC_GEN](docs/biohpc_gen.md) * [CBE](docs/cbe.md) * [CCGA_DX](docs/ccga_dx.md) * [CCGA_MED](docs/ccga_med.md) diff --git a/conf/biohpc_gen.config b/conf/biohpc_gen.config new file mode 100755 index 0000000..b625b20 --- /dev/null +++ b/conf/biohpc_gen.config @@ -0,0 +1,27 @@ +//Profile config names for nf-core/configs +params { + config_profile_description = 'BioHPC Genomics (biohpc_gen) cluster profile provided by nf-core/configs' + config_profile_contact = 'Patrick Hüther (@phue)' + config_profile_url = 'https://collab.lmu.de/display/BioHPCGenomics/BioHPC+Genomics' +} + +env { + SLURM_CLUSTERS='biohpc_gen' +} + +process { + executor = 'slurm' + queue = { task.memory <= 1536.GB ? (task.time > 2.d || task.memory > 384.GB ? 'biohpc_gen_production' : 'biohpc_gen_normal') : 'biohpc_gen_highmem' } + beforeScript = 'module use /dss/dssfs01/pr53da/pr53da-dss-0000/spack/modules/x86_avx2/linux*' + module = 'charliecloud/0.22:miniconda3' +} + +charliecloud { + enabled = true +} + +params { + params.max_time = 14.d + params.max_cpus = 80 + params.max_memory = 3.TB +} diff --git a/docs/biohpc_gen.md b/docs/biohpc_gen.md new file mode 100644 index 0000000..1078835 --- /dev/null +++ b/docs/biohpc_gen.md @@ -0,0 +1,17 @@ +# nf-core/configs: BioHPC Genomics (BIOHPC_GEN) Configuration + +All nf-core pipelines have been successfully configured for use on the BioHPC Genomics (biohpc_gen) cluster that is housed at the Leibniz Rechenzentrum (LRZ) for research groups at the Faculty of Biology of the Ludwig-Maximilians-University (LMU) in Munich. + +To use, run the pipeline with `-profile biohpc_gen`. This will download and launch the [`biohpc_gen.config`](../conf/biohpc_gen.config) which has been pre-configured with a setup suitable for the biohpc_gen cluster. Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Charliecloud container before execution of the pipeline. + +Before running the pipeline you will need to load Nextflow and Charliecloud using the environment module system on biohpc_gen. You can do this by issuing the commands below: + +```bash +## Load Nextflow and Charliecloud environment modules +module purge +module load nextflow charliecloud/0.22 +``` + +>NB: Charliecloud support requires Nextflow version `21.03.0-edge` or later. +>NB: You will need an account to use the LRZ Linux cluster as well as group access to the biohpc_gen cluster in order to run nf-core pipelines. +>NB: Nextflow will need to submit the jobs via the job scheduler to the HPC cluster and as such the commands above will have to be executed on one of the login nodes. diff --git a/nfcore_custom.config b/nfcore_custom.config index eb287b9..6b51561 100644 --- a/nfcore_custom.config +++ b/nfcore_custom.config @@ -15,6 +15,7 @@ profiles { bi { includeConfig "${params.custom_config_base}/conf/bi.config" } bigpurple { includeConfig "${params.custom_config_base}/conf/bigpurple.config" } binac { includeConfig "${params.custom_config_base}/conf/binac.config" } + biohpc_gen { includeConfig "${params.custom_config_base}/conf/biohpc_gen.config" } cbe { includeConfig "${params.custom_config_base}/conf/cbe.config" } ccga_dx { includeConfig "${params.custom_config_base}/conf/ccga_dx.config" } ccga_med { includeConfig "${params.custom_config_base}/conf/ccga_med.config" } @@ -23,7 +24,7 @@ profiles { crick { includeConfig "${params.custom_config_base}/conf/crick.config" } czbiohub_aws { includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config" } ebc { includeConfig "${params.custom_config_base}/conf/ebc.config" } - eddie { includeConfig "${params.custom_config_base}/conf/eddie.config" } + eddie { includeConfig "${params.custom_config_base}/conf/eddie.config" } icr_davros { includeConfig "${params.custom_config_base}/conf/icr_davros.config" } ifb_core { includeConfig "${params.custom_config_base}/conf/ifb_core.config" } imperial { includeConfig "${params.custom_config_base}/conf/imperial.config" }