mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-21 16:16:04 +00:00
Merge branch 'master' into eva
This commit is contained in:
commit
161c3a33d4
7 changed files with 73 additions and 3 deletions
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
|
@ -16,7 +16,7 @@ jobs:
|
|||
needs: test_all_profiles
|
||||
strategy:
|
||||
matrix:
|
||||
profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'eddie', 'eva', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'ifb_core', 'imperial', 'imperial_mb', 'jax', 'kraken', 'mpcdf', 'munin', 'oist', 'pasteur', 'phoenix', 'prince', 'seg_globe', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh']
|
||||
profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'biohpc_gen', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'eddie', 'eva', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'ifb_core', 'imperial', 'imperial_mb', 'jax', 'kraken', 'mpcdf', 'munin', 'oist', 'pasteur', 'phoenix', 'prince', 'seg_globe', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh']
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install Nextflow
|
||||
|
|
|
@ -99,6 +99,7 @@ Currently documentation is available for the following systems:
|
|||
* [BIGPURPLE](docs/bigpurple.md)
|
||||
* [BI](docs/bi.md)
|
||||
* [BINAC](docs/binac.md)
|
||||
* [BIOHPC_GEN](docs/biohpc_gen.md)
|
||||
* [CBE](docs/cbe.md)
|
||||
* [CCGA_DX](docs/ccga_dx.md)
|
||||
* [CCGA_MED](docs/ccga_med.md)
|
||||
|
|
27
conf/biohpc_gen.config
Executable file
27
conf/biohpc_gen.config
Executable file
|
@ -0,0 +1,27 @@
|
|||
//Profile config names for nf-core/configs
|
||||
params {
|
||||
config_profile_description = 'BioHPC Genomics (biohpc_gen) cluster profile provided by nf-core/configs'
|
||||
config_profile_contact = 'Patrick Hüther (@phue)'
|
||||
config_profile_url = 'https://collab.lmu.de/display/BioHPCGenomics/BioHPC+Genomics'
|
||||
}
|
||||
|
||||
env {
|
||||
SLURM_CLUSTERS='biohpc_gen'
|
||||
}
|
||||
|
||||
process {
|
||||
executor = 'slurm'
|
||||
queue = { task.memory <= 1536.GB ? (task.time > 2.d || task.memory > 384.GB ? 'biohpc_gen_production' : 'biohpc_gen_normal') : 'biohpc_gen_highmem' }
|
||||
beforeScript = 'module use /dss/dssfs01/pr53da/pr53da-dss-0000/spack/modules/x86_avx2/linux*'
|
||||
module = 'charliecloud/0.22:miniconda3'
|
||||
}
|
||||
|
||||
charliecloud {
|
||||
enabled = true
|
||||
}
|
||||
|
||||
params {
|
||||
params.max_time = 14.d
|
||||
params.max_cpus = 80
|
||||
params.max_memory = 3.TB
|
||||
}
|
|
@ -24,4 +24,28 @@ process {
|
|||
withName:GatherBQSRReports {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:HaplotypeCaller {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:GenotypeGVCFs {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:Mutect2 {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:MergeMutect2Stats {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:PileupSummariesForMutect2 {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:MergePileupSummaries {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:CalculateContamination {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
withName:FilterMutect2Calls {
|
||||
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
|
||||
}
|
||||
}
|
||||
|
|
17
docs/biohpc_gen.md
Normal file
17
docs/biohpc_gen.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
# nf-core/configs: BioHPC Genomics (BIOHPC_GEN) Configuration
|
||||
|
||||
All nf-core pipelines have been successfully configured for use on the BioHPC Genomics (biohpc_gen) cluster that is housed at the Leibniz Rechenzentrum (LRZ) for research groups at the Faculty of Biology of the Ludwig-Maximilians-University (LMU) in Munich.
|
||||
|
||||
To use, run the pipeline with `-profile biohpc_gen`. This will download and launch the [`biohpc_gen.config`](../conf/biohpc_gen.config) which has been pre-configured with a setup suitable for the biohpc_gen cluster. Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Charliecloud container before execution of the pipeline.
|
||||
|
||||
Before running the pipeline you will need to load Nextflow and Charliecloud using the environment module system on biohpc_gen. You can do this by issuing the commands below:
|
||||
|
||||
```bash
|
||||
## Load Nextflow and Charliecloud environment modules
|
||||
module purge
|
||||
module load nextflow charliecloud/0.22
|
||||
```
|
||||
|
||||
>NB: Charliecloud support requires Nextflow version `21.03.0-edge` or later.
|
||||
>NB: You will need an account to use the LRZ Linux cluster as well as group access to the biohpc_gen cluster in order to run nf-core pipelines.
|
||||
>NB: Nextflow will need to submit the jobs via the job scheduler to the HPC cluster and as such the commands above will have to be executed on one of the login nodes.
|
|
@ -35,13 +35,13 @@ This config enables Nextflow to manage the pipeline jobs via the SGE job schedul
|
|||
|
||||
## Singularity set-up
|
||||
|
||||
Load Singularity from the module system and, if you have access to `/exports/igmm/eddie/NextGenResources`, set the Singularity cache directory to the NextGenResources path for the pipeline and version you want to run. If this does not exist, please contact the [IGMM Data Manager](data.manager@igmm.ed.ac.uk) to have it added. You can add these lines to the file `$HOME/.bashrc`, or you can run these commands before you run an nf-core pipeline.
|
||||
Load Singularity from the module system and, if you have access to `/exports/igmm/eddie/NextGenResources`, set the Singularity cache directory to the NextGenResources path below. If some containers for your pipeline run are not present, please contact the [IGMM Data Manager](data.manager@igmm.ed.ac.uk) to have them added. You can add these lines to the file `$HOME/.bashrc`, or you can run these commands before you run an nf-core pipeline.
|
||||
|
||||
If you do not have access to `/exports/igmm/eddie/NextGenResources`, set the Singularity cache directory to somewhere sensible that is not in your `$HOME` area (which has limited space). It will take time to download all the Singularity containers, but you can use this again.
|
||||
|
||||
```bash
|
||||
module load singularity
|
||||
export NXF_SINGULARITY_CACHEDIR="/exports/igmm/eddie/NextGenResources/nextflow/singularity/nf-core-rnaseq_v3.0"
|
||||
export NXF_SINGULARITY_CACHEDIR="/exports/igmm/eddie/NextGenResources/nextflow/singularity"
|
||||
```
|
||||
|
||||
Singularity will create a directory `.singularity` in your `$HOME` directory on eddie. Space on `$HOME` is very limited, so it is a good idea to create a directory somewhere else with more room and link the locations.
|
||||
|
|
|
@ -15,6 +15,7 @@ profiles {
|
|||
bi { includeConfig "${params.custom_config_base}/conf/bi.config" }
|
||||
bigpurple { includeConfig "${params.custom_config_base}/conf/bigpurple.config" }
|
||||
binac { includeConfig "${params.custom_config_base}/conf/binac.config" }
|
||||
biohpc_gen { includeConfig "${params.custom_config_base}/conf/biohpc_gen.config" }
|
||||
cbe { includeConfig "${params.custom_config_base}/conf/cbe.config" }
|
||||
ccga_dx { includeConfig "${params.custom_config_base}/conf/ccga_dx.config" }
|
||||
ccga_med { includeConfig "${params.custom_config_base}/conf/ccga_med.config" }
|
||||
|
|
Loading…
Reference in a new issue