mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-11 04:23:10 +00:00
commit
b4e8e271be
20 changed files with 152 additions and 56 deletions
29
.travis.yml
Normal file
29
.travis.yml
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
sudo: required
|
||||||
|
language: python
|
||||||
|
jdk: openjdk8
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
python:
|
||||||
|
- '3.6'
|
||||||
|
cache: pip
|
||||||
|
matrix:
|
||||||
|
fast_finish: true
|
||||||
|
|
||||||
|
install:
|
||||||
|
# Install Nextflow
|
||||||
|
- mkdir /tmp/nextflow && cd /tmp/nextflow
|
||||||
|
- wget -qO- get.nextflow.io | bash
|
||||||
|
- sudo ln -s /tmp/nextflow/nextflow /usr/local/bin/nextflow
|
||||||
|
- mkdir -p ${TRAVIS_BUILD_DIR}/tests && cd ${TRAVIS_BUILD_DIR}/tests
|
||||||
|
|
||||||
|
env:
|
||||||
|
- NXF_VER='18.10.1' # Specify a minimum NF version that should be tested and work
|
||||||
|
- NXF_VER='' # Plus: get the latest NF version and check, that it works
|
||||||
|
|
||||||
|
script:
|
||||||
|
# Run the pipeline with the test profile and test remote config
|
||||||
|
- |
|
||||||
|
grep "{.*includeConfig.*[a-z]*\.config\"" ${TRAVIS_BUILD_DIR}/nfcore_custom.config | \
|
||||||
|
tr -s ' ' | \
|
||||||
|
cut -d " " -f 2 | \
|
||||||
|
xargs -I {} nextflow run ${TRAVIS_BUILD_DIR}/configtest.nf -profile {}
|
35
README.md
35
README.md
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
# [nf-core/configs](https://github.com/nf-core/configs)
|
# [nf-core/configs](https://github.com/nf-core/configs)
|
||||||
|
|
||||||
|
[![Build Status](https://travis-ci.org/nf-core/configs.svg?branch=master)](https://travis-ci.org/nf-core/configs)
|
||||||
|
|
||||||
A repository for hosting nextflow config files containing custom parameters required to run nf-core pipelines at different Institutions.
|
A repository for hosting nextflow config files containing custom parameters required to run nf-core pipelines at different Institutions.
|
||||||
|
|
||||||
## Table of contents
|
## Table of contents
|
||||||
|
@ -28,13 +30,42 @@ You should be able to get a good idea as to how other people are customising the
|
||||||
|
|
||||||
### Offline usage
|
### Offline usage
|
||||||
|
|
||||||
If you want to use an existing config available in `nf-core/configs`, and you're running on a system that has no internet connection, you'll need to download the config file and place it in a location that is visible to the file system on which you are running the pipeline. You can then run the pipeline with the `-c` parameter - see [Testing](#testing) for example.
|
If you want to use an existing config available in `nf-core/configs`, and you're running on a system that has no internet connection, you'll need to download the config file and place it in a location that is visible to the file system on which you are running the pipeline. Then run the pipeline with `--custom_config_base`
|
||||||
|
or `params.custom_config_base` set to the location of the directory containing the repository files:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
## Download and unzip the config files
|
||||||
|
cd /path/to/my/configs
|
||||||
|
wget https://github.com/nf-core/configs/archive/master.zip
|
||||||
|
unzip master.zip
|
||||||
|
|
||||||
|
## Run the pipeline
|
||||||
|
cd /path/to/my/data
|
||||||
|
nextflow run /path/to/pipeline/ --custom_config_base /path/to/my/configs/configs-master/
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, instead of using the configuration profiles from this repository, you can run your
|
||||||
|
pipeline directly calling the single institutional config file that you need with the `-c` parameter.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nextflow run /path/to/pipeline/ -c /path/to/my/configs/configs-master/conf/my_config.config
|
||||||
|
```
|
||||||
|
|
||||||
|
> Note that the nf-core/tools helper package has a `download` command to download all required pipeline
|
||||||
|
> files + singularity containers + institutional configs in one go for you, to make this process easier.
|
||||||
|
|
||||||
## Adding a new config
|
## Adding a new config
|
||||||
|
|
||||||
If you decide to upload your custom config file to `nf-core/configs` then this will ensure that your custom config file will be automatically downloaded, and available at run-time to all nf-core pipelines, and to everyone within your organisation. You will simply have to specify `-profile <config_name>` in the command used to run the pipeline. See [`nf-core/configs`](https://github.com/nf-core/configs/tree/master/conf) for examples.
|
If you decide to upload your custom config file to `nf-core/configs` then this will ensure that your custom config file will be automatically downloaded, and available at run-time to all nf-core pipelines, and to everyone within your organisation. You will simply have to specify `-profile <config_name>` in the command used to run the pipeline. See [`nf-core/configs`](https://github.com/nf-core/configs/tree/master/conf) for examples.
|
||||||
|
|
||||||
Please also make sure to add an extra `params` section with `params. config_profile_name`, `params.config_profile_description`, `params.config_profile_contact` and `params.config_profile_url` set to reasonable values. Users will get information on who wrote the configuration profile then when executing a nf-core pipeline and can report back if there are things missing for example.
|
Please also make sure to add an extra `params` section with `params.config_profile_description`, `params.config_profile_contact` and `params.config_profile_url` set to reasonable values. Users will get information on who wrote the configuration profile then when executing a nf-core pipeline and can report back if there are things missing for example.
|
||||||
|
|
||||||
|
## Checking user hostnames
|
||||||
|
|
||||||
|
If your cluster has a set of consistent hostnames, nf-core pipelines can check that users are using your profile.
|
||||||
|
Add one or more hostname substrings to `params.hostnames` under a key that matches the profile name.
|
||||||
|
If the user's hostname contains this string at the start of a run or when a run fails and their profile
|
||||||
|
does not contain the profile name, a warning message will be printed.
|
||||||
|
|
||||||
### Testing
|
### Testing
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'BINAC'
|
|
||||||
config_profile_description = 'BINAC cluster profile provided by nf-core/configs.'
|
config_profile_description = 'BINAC cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Alexander Peltzer (@apeltzer)'
|
config_profile_contact = 'Alexander Peltzer (@apeltzer)'
|
||||||
config_profile_url = 'https://www.bwhpc-c5.de/wiki/index.php/Category:BwForCluster_BinAC'
|
config_profile_url = 'https://www.bwhpc-c5.de/wiki/index.php/Category:BwForCluster_BinAC'
|
||||||
|
@ -17,7 +16,7 @@ process {
|
||||||
}
|
}
|
||||||
|
|
||||||
params {
|
params {
|
||||||
igenomesIgnore = true
|
igenomes_base = '/nfsmounts/igenomes'
|
||||||
max_memory = 128.GB
|
max_memory = 128.GB
|
||||||
max_cpus = 28
|
max_cpus = 28
|
||||||
max_time = 48.h
|
max_time = 48.h
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'CCGA'
|
|
||||||
config_profile_description = 'CCGA cluster profile provided by nf-core/configs.'
|
config_profile_description = 'CCGA cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Marc Hoeppner (@marchoeppner)'
|
config_profile_contact = 'Marc Hoeppner (@marchoeppner)'
|
||||||
config_profile_url = 'https://www.ikmb.uni-kiel.de/'
|
config_profile_url = 'https://www.ikmb.uni-kiel.de/'
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'CFC'
|
|
||||||
config_profile_description = 'QBiC Core Facility cluster profile provided by nf-core/configs.'
|
config_profile_description = 'QBiC Core Facility cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Alexander Peltzer (@apeltzer)'
|
config_profile_contact = 'Alexander Peltzer (@apeltzer)'
|
||||||
config_profile_url = 'http://qbic.uni-tuebingen.de/'
|
config_profile_url = 'http://qbic.uni-tuebingen.de/'
|
||||||
|
@ -11,13 +10,13 @@ singularity {
|
||||||
}
|
}
|
||||||
|
|
||||||
process {
|
process {
|
||||||
beforeScript = 'module load qbic/singularity_slurm/3.0.1'
|
beforeScript = 'module load qbic/singularity_slurm/3.0.3'
|
||||||
executor = 'slurm'
|
executor = 'slurm'
|
||||||
}
|
}
|
||||||
|
|
||||||
params {
|
params {
|
||||||
igenomesIgnore = true
|
igenomes_base = '/nfsmounts/igenomes'
|
||||||
max_memory = 60.GB
|
max_memory = 60.GB
|
||||||
max_cpus = 24
|
max_cpus = 20
|
||||||
max_time = 140.h
|
max_time = 140.h
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'CRICK'
|
|
||||||
config_profile_description = 'The Francis Crick Institute CAMP HPC cluster profile provided by nf-core/configs.'
|
config_profile_description = 'The Francis Crick Institute CAMP HPC cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Harshil Patel (@drpatelh )'
|
config_profile_contact = 'Harshil Patel (@drpatelh)'
|
||||||
config_profile_url = 'https://www.crick.ac.uk/research/platforms-and-facilities/scientific-computing/technologies'
|
config_profile_url = 'https://www.crick.ac.uk/research/platforms-and-facilities/scientific-computing/technologies'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
/*
|
//Profile config names for nf-core/configs
|
||||||
* -------------------------------------------------
|
params {
|
||||||
* Nextflow config file for GIS (Aquila)
|
config_profile_description = 'Genome Institute of Singapore (Aquila) cluster profile provided by nf-core/configs.'
|
||||||
* -------------------------------------------------
|
config_profile_contact = 'Andreas Wilm (@andreas-wilm)'
|
||||||
* Defines reference genomes, using iGenome paths
|
config_profile_url = 'https://www.a-star.edu.sg/gis/'
|
||||||
* Imported under the default 'standard' Nextflow
|
}
|
||||||
* profile in nextflow.config
|
|
||||||
*/
|
|
||||||
|
|
||||||
process {
|
process {
|
||||||
executor = 'sge'
|
executor = 'sge'
|
||||||
|
@ -20,4 +18,3 @@ params {
|
||||||
// illumina iGenomes reference file paths on GIS Aquila
|
// illumina iGenomes reference file paths on GIS Aquila
|
||||||
igenomes_base = '/mnt/projects/rpd/genomes.testing/S3_igenomes/'
|
igenomes_base = '/mnt/projects/rpd/genomes.testing/S3_igenomes/'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'HEBBE'
|
|
||||||
config_profile_description = 'Gothenburg Hebbe cluster profile provided by nf-core/configs.'
|
config_profile_description = 'Gothenburg Hebbe cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Phil Ewels (@ewels )'
|
config_profile_contact = 'Phil Ewels (@ewels)'
|
||||||
config_profile_url = 'http://www.c3se.chalmers.se/index.php/Hebbe'
|
config_profile_url = 'http://www.c3se.chalmers.se/index.php/Hebbe'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,22 +1,23 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'MENDEL'
|
|
||||||
config_profile_description = 'GMI MENDEL cluster profile provided by nf-core/configs'
|
config_profile_description = 'GMI MENDEL cluster profile provided by nf-core/configs'
|
||||||
config_profile_contact = 'Philipp H (@phue)'
|
config_profile_contact = 'Patrick Hüther (@phue)'
|
||||||
config_profile_url = 'http://www.gmi.oeaw.ac.at/'
|
config_profile_url = 'http://www.gmi.oeaw.ac.at/'
|
||||||
}
|
}
|
||||||
|
|
||||||
singularity {
|
manifest {
|
||||||
enabled = true
|
nextflowVersion = '>=19.01.0'
|
||||||
}
|
}
|
||||||
|
|
||||||
process {
|
process {
|
||||||
beforeScript = 'module load Singularity'
|
beforeScript = {'module load Singularity; module load Miniconda3'}
|
||||||
executor = 'pbs'
|
executor = 'pbspro'
|
||||||
clusterOptions = { "-P $params.project" }
|
clusterOptions = { "-P $params.project" }
|
||||||
}
|
}
|
||||||
|
|
||||||
params {
|
params {
|
||||||
|
max_cpus = 32
|
||||||
|
max_memory = 128.GB
|
||||||
max_time = 192.h
|
max_time = 192.h
|
||||||
igenomesIgnore = true
|
igenomesIgnore = true
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'MUNIN'
|
|
||||||
config_profile_description = 'Big iron cluster profile provided by nf-core/configs.'
|
config_profile_description = 'Big iron cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Szilveszter Juhos (@szilva)'
|
config_profile_contact = 'Szilveszter Juhos (@szilva)'
|
||||||
config_profile_url = ''
|
config_profile_url = ''
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'PHOENIX'
|
|
||||||
config_profile_description = 'University of Adelaide Phoenix HPC cluster profile provided by nf-core/configs'
|
config_profile_description = 'University of Adelaide Phoenix HPC cluster profile provided by nf-core/configs'
|
||||||
config_profile_contact = 'Yassine Souilmi / Alexander Peltzer (@yassineS, @apeltzer)'
|
config_profile_contact = 'Yassine Souilmi / Alexander Peltzer (@yassineS, @apeltzer)'
|
||||||
config_profile_url = 'https://www.adelaide.edu.au/phoenix/'
|
config_profile_url = 'https://www.adelaide.edu.au/phoenix/'
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'SHH'
|
|
||||||
config_profile_description = 'MPI SHH cluster profile provided by nf-core/configs.'
|
config_profile_description = 'MPI SHH cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'James Fellows Yates (@jfy133)'
|
config_profile_contact = 'James Fellows Yates (@jfy133)'
|
||||||
config_profile_url = 'https://shh.mpg.de'
|
config_profile_url = 'https://shh.mpg.de'
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'uct_hex'
|
|
||||||
config_profile_description = 'University of Cape Town HEX cluster config file provided by nf-core/configs.'
|
config_profile_description = 'University of Cape Town HEX cluster config file provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Katie Lennard (@kviljoen)'
|
config_profile_contact = 'Katie Lennard (@kviljoen)'
|
||||||
config_profile_url = 'http://hpc.uct.ac.za/index.php/hex-3/'
|
config_profile_url = 'http://hpc.uct.ac.za/index.php/hex-3/'
|
||||||
|
@ -22,4 +21,3 @@ executor{
|
||||||
executor = 'pbs'
|
executor = 'pbs'
|
||||||
jobName = { "$task.tag" }
|
jobName = { "$task.tag" }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// Profile config names for nf-core/configs
|
// Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'UPPMAX-devel'
|
|
||||||
config_profile_description = 'Testing & development profile for UPPMAX, provided by nf-core/configs.'
|
config_profile_description = 'Testing & development profile for UPPMAX, provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Phil Ewels (@ewels)'
|
config_profile_contact = 'Phil Ewels (@ewels)'
|
||||||
config_profile_url = 'https://www.uppmax.uu.se/'
|
config_profile_url = 'https://www.uppmax.uu.se/'
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params {
|
params {
|
||||||
config_profile_name = 'UPPMAX'
|
|
||||||
config_profile_description = 'Swedish UPPMAX cluster profile provided by nf-core/configs.'
|
config_profile_description = 'Swedish UPPMAX cluster profile provided by nf-core/configs.'
|
||||||
config_profile_contact = 'Phil Ewels (@ewels)'
|
config_profile_contact = 'Phil Ewels (@ewels)'
|
||||||
config_profile_url = 'https://www.uppmax.uu.se/'
|
config_profile_url = 'https://www.uppmax.uu.se/'
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//Profile config names for nf-core/configs
|
//Profile config names for nf-core/configs
|
||||||
params{
|
params{
|
||||||
config_profile_name = 'UZH'
|
|
||||||
config_profile_description = 'UZH science cloud profile provided by nf-core/configs'
|
config_profile_description = 'UZH science cloud profile provided by nf-core/configs'
|
||||||
config_profile_contact = 'Judith Neukamm/Alexander Peltzer (@JudithNeukamm, @apeltzer)'
|
config_profile_contact = 'Judith Neukamm/Alexander Peltzer (@JudithNeukamm, @apeltzer)'
|
||||||
config_profile_url = 'https://www.id.uzh.ch/en/scienceit/infrastructure/sciencecloud.html'
|
config_profile_url = 'https://www.id.uzh.ch/en/scienceit/infrastructure/sciencecloud.html'
|
||||||
|
|
10
configtest.nf
Normal file
10
configtest.nf
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/usr/bin/env nextflow
|
||||||
|
|
||||||
|
def separator = "-"*40
|
||||||
|
print("$separator\n")
|
||||||
|
print("Parameter scope for config \'${workflow.profile}\'\n")
|
||||||
|
print("$separator\n")
|
||||||
|
params.each {
|
||||||
|
assert it
|
||||||
|
print("\t$it\n")
|
||||||
|
}
|
30
docs/mendel.md
Normal file
30
docs/mendel.md
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# nf-core/configs: MENDEL Configuration
|
||||||
|
|
||||||
|
All nf-core pipelines have been successfully configured for use on the MENDEL CLUSTER at the Gregor Mendel Institute (GMI).
|
||||||
|
|
||||||
|
To use, run the pipeline with `-profile conda,mendel`. This will download and launch the [`mendel.config`](../conf/mendel.config) which has been pre-configured with a setup suitable for the MENDEL cluster. A Conda environment will be created automatically and software dependencies will be downloaded from ['bioconda'](https://bioconda.github.io/).
|
||||||
|
|
||||||
|
Theoretically, using `-profile singularity,mendel` would download a docker image containing all of the required software, and convert it to a Singularity image before execution of the pipeline. However, there is a regression in the Singularity deployment on MENDEL which renders containers downloaded from public repositories unusable because they lack the /lustre mountpoint.
|
||||||
|
|
||||||
|
If you want to run the pipeline containerized anyway you will have to build the image yourself (on a machine where you have root access) using the provided `Singularity` file in the pipeline repository:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd /path/to/pipeline-repository
|
||||||
|
echo 'mkdir /lustre > Singularity'
|
||||||
|
singularity build nf-core-methylseq-custom.simg Singularity
|
||||||
|
```
|
||||||
|
|
||||||
|
After you copied the container image to the cluster filesystem, make sure to pass the path to the image to the pipeline with `-with-singularity /path/to/nf-core-methylseq-custom.simg`
|
||||||
|
|
||||||
|
Before running the pipeline you will need to load Nextflow and Conda using the environment module system on MENDEL. You can do this by issuing the commands below:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
## Load Nextflow and Conda environment modules
|
||||||
|
module purge
|
||||||
|
module load Nextflow
|
||||||
|
module load Miniconda3 # not needed if using Singularity
|
||||||
|
```
|
||||||
|
|
||||||
|
>NB: You will need an account to use the HPC cluster in order to run the pipeline. If in doubt contact the HPC team.
|
||||||
|
|
||||||
|
>NB: Nextflow will need to submit the jobs via the job scheduler to the HPC cluster and as such the commands above will have to be executed on one of the login nodes. If in doubt contact the HPC team.
|
1
nextflow.config
Normal file
1
nextflow.config
Normal file
|
@ -0,0 +1 @@
|
||||||
|
includeConfig("nfcore_custom.config")
|
|
@ -8,23 +8,33 @@
|
||||||
* name here.
|
* name here.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}/conf"
|
params.custom_config_version = 'master'
|
||||||
|
params.custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}"
|
||||||
|
|
||||||
profiles {
|
profiles {
|
||||||
binac { includeConfig "${config_base}/binac.config" }
|
binac { includeConfig "${params.custom_config_base}/conf/binac.config" }
|
||||||
ccga { includeConfig "${config_base}/ccga.config" }
|
ccga { includeConfig "${params.custom_config_base}/conf/ccga.config" }
|
||||||
cfc { includeConfig "${config_base}/cfc.config" }
|
cfc { includeConfig "${params.custom_config_base}/conf/cfc.config" }
|
||||||
crick { includeConfig "${config_base}/crick.config" }
|
crick { includeConfig "${params.custom_config_base}/conf/crick.config" }
|
||||||
gis { includeConfig "${config_base}/gis.config" }
|
gis { includeConfig "${params.custom_config_base}/conf/gis.config" }
|
||||||
hebbe { includeConfig "${config_base}/hebbe.config" }
|
hebbe { includeConfig "${params.custom_config_base}/conf/hebbe.config" }
|
||||||
mendel { includeConfig "${config_base}/mendel.config" }
|
mendel { includeConfig "${params.custom_config_base}/conf/mendel.config" }
|
||||||
munin { includeConfig "${config_base}/munin.config" }
|
munin { includeConfig "${params.custom_config_base}/conf/munin.config" }
|
||||||
phoenix { includeConfig "${config_base}/pheonix.config" }
|
phoenix { includeConfig "${params.custom_config_base}/conf/phoenix.config" }
|
||||||
shh { includeConfig "${config_base}/shh.config" }
|
shh { includeConfig "${params.custom_config_base}/conf/shh.config" }
|
||||||
uct_hex { includeConfig "${config_base}/uct_hex.config" }
|
uct_hex { includeConfig "${params.custom_config_base}/conf/uct_hex.config" }
|
||||||
uppmax_devel { includeConfig "${config_base}/uppmax.config"
|
uppmax_devel { includeConfig "${params.custom_config_base}/conf/uppmax.config"; includeConfig "${params.custom_config_base}/conf/uppmax-devel.config" }
|
||||||
includeConfig "${config_base}/uppmax-devel.config"
|
uppmax { includeConfig "${params.custom_config_base}/conf/uppmax.config" }
|
||||||
}
|
uzh { includeConfig "${params.custom_config_base}/conf/uzh.config" }
|
||||||
uppmax { includeConfig "${config_base}/uppmax.config" }
|
}
|
||||||
uzh { includeConfig "${config_base}/uzh.config" }
|
|
||||||
|
// If user hostnames contain one of these substring and they are
|
||||||
|
// not running the associated profile, it will trigger a warning message
|
||||||
|
// Should be defined here for all profiles (not within profile config)
|
||||||
|
params {
|
||||||
|
// This is a groovy map, not a nextflow parameter set
|
||||||
|
hostnames = [
|
||||||
|
crick: ['.thecrick.org'],
|
||||||
|
uppmax: ['.uppmax.uu.se']
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue