1
0
Fork 0
mirror of https://github.com/MillironX/nf-configs.git synced 2024-12-22 10:38:16 +00:00

Merge all

This commit is contained in:
Alexander Peltzer 2019-10-27 13:24:45 +01:00
commit b5b2dd817f
No known key found for this signature in database
GPG key ID: A4A9322B50AF95A0
17 changed files with 202 additions and 25 deletions

View file

@ -17,8 +17,8 @@ install:
- mkdir -p ${TRAVIS_BUILD_DIR}/tests && cd ${TRAVIS_BUILD_DIR}/tests
env:
- NXF_VER='18.10.1' # Specify a minimum NF version that should be tested and work
- NXF_VER='' # Plus: get the latest NF version and check, that it works
- NXF_VER='18.10.1' SCRATCH='~' # Specify a minimum NF version that should be tested and work. Set SCRATCH for prince.config.
- NXF_VER='' SCRATCH='~' # Plus: get the latest NF version and check, that it works. Set SCRATCH for prince.config.
script:
# Run the pipeline with the test profile and test remote config
@ -26,4 +26,5 @@ script:
grep "{.*includeConfig.*[a-z]*\.config\"" ${TRAVIS_BUILD_DIR}/nfcore_custom.config | \
tr -s ' ' | \
cut -d " " -f 2 | \
xargs -I {} nextflow run ${TRAVIS_BUILD_DIR}/configtest.nf -profile {}
grep -v "czbiohub_aws" | \
xargs -I {} nextflow run ${TRAVIS_BUILD_DIR}/configtest.nf --custom_config_base=${TRAVIS_BUILD_DIR} -profile {}

View file

@ -84,12 +84,17 @@ Currently documentation is available for the following clusters:
* [BIGPURPLE](docs/bigpurple.md)
* [BINAC](docs/binac.md)
* [CBE](docs/cbe.md)
* [CCGA](docs/ccga.md)
* [CCGA_DX](/docs/ccga_dx.md)
* [CFC](docs/binac.md)
* [CRICK](docs/crick.md)
* [CZBIOHUB_AWS](docs/czbiohub.md)
* [CZBIOHUB_AWS_HIGHPRIORITY](docs/czbiohub.md)
* [GENOUEST](docs/genouest.md)
* [GIS](docs/gis.md)
* [HEBBE](docs/hebbe.md)
* [KRAKEN](docs/kraken.md)
* [MENDEL](docs/mendel.md)
* [MUNIN](docs/munin.md)
* [PASTEUR](docs/pasteur.md)
@ -98,7 +103,7 @@ Currently documentation is available for the following clusters:
* [SHH](docs/shh.md)
* [UCT_HEX](docs/uct_hex.md)
* [UPPMAX](docs/uppmax.md)
* [UPPMAX-DEVEL](docs/uppmax-devel.md)
* [UPPMAX_DEVEL](docs/uppmax.md)
* [UZH](docs/uzh.md)
### Uploading to `nf-core/configs`
@ -111,4 +116,4 @@ We will be notified automatically when you have created your pull request, and p
## Help
If you have any questions or issues please send us a message on [Slack](https://nf-core-invite.herokuapp.com/).
If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack).

View file

@ -21,3 +21,8 @@ params {
max_cpus = 28
max_time = 48.h
}
weblog{
enabled = true
url = 'https://services.qbic.uni-tuebingen.de/flowstore/workflows'
}

32
conf/cbe.config Executable file
View file

@ -0,0 +1,32 @@
//Profile config names for nf-core/configs
params {
config_profile_description = 'CLIP BATCH ENVIRONMENT (CBE) cluster profile provided by nf-core/configs'
config_profile_contact = 'Patrick Hüther (@phue)'
config_profile_url = 'http://www.gmi.oeaw.ac.at/'
}
process {
executor = 'slurm'
module = 'singularity/3.2.1'
queue = 'c'
}
singularity.enabled = true
params {
target_qos = 'medium'
params.max_cpus = 36
params.max_memory = 170.GB
igenomesIgnore = true
}
if (params.target_qos == 'short') {
params.max_time = 8.h
process.clusterOptions = '--qos short'
} else if (params.target_qos == 'medium') {
params.max_time = 2.d
process.clusterOptions = '--qos medium'
} else {
params.max_time = 14.d
process.clusterOptions = '--qos long'
}

View file

@ -14,6 +14,7 @@ params {
singularity {
enabled = true
runOptions = "-B /ifs -B /scratch -B /work_beegfs"
cacheDir = "/ifs/data/nfs_share/ikmb_repository/singularity_cache/"
}
executor {

View file

@ -13,6 +13,7 @@ params {
singularity {
enabled = true
runOptions = "-B /mnt"
}
executor {

View file

@ -7,6 +7,7 @@ params {
singularity {
enabled = true
cacheDir = '/nfsmounts/container'
}
process {
@ -16,7 +17,7 @@ process {
weblog{
enabled = true
url = 'http://services.qbic.uni-tuebingen.de:8080/workflowservice/workflows'
url = 'https://services.qbic.uni-tuebingen.de/flowstore/workflows'
}
params {

23
conf/genouest.config Normal file
View file

@ -0,0 +1,23 @@
//Profile config names for nf-core/configs
params {
config_profile_description = 'The GenOuest cluster profile'
config_profile_contact = 'Anthony Bretaudeau (@abretaud)'
config_profile_url = 'https://www.genouest.org'
}
singularity {
enabled = true
autoMounts = true
runOptions = '-B /scratch:/scratch -B /local:/local -B /db:/db'
}
process {
executor = 'slurm'
}
params {
igenomesIgnore = true
max_memory = 750.GB
max_cpus = 80
max_time = 336.h
}

24
conf/kraken.config Normal file
View file

@ -0,0 +1,24 @@
//Profile config names for nf-core/configs
params {
config_profile_name = 'KRAKEN'
config_profile_description = 'Jenkins cluster provided by nf-core/configs.'
config_profile_contact = 'Maxime Garcia or Johannes Alneberg'
config_profile_url = 'kraken.dyn.scilifelab.se'
}
process {
executor = 'local'
}
docker {
enabled = true
mountFlags = 'z'
fixOwnership = true
}
params {
max_memory = 60.GB
max_cpus = 16
max_time = 72.h
igenomes_base = '/share/igenomes/'
}

View file

@ -1,20 +1,21 @@
//Profile config names for nf-core/configs
params {
config_profile_description = 'Big iron cluster profile provided by nf-core/configs.'
config_profile_description = 'MUNIN profile provided by nf-core/configs.'
config_profile_contact = 'Szilveszter Juhos (@szilva)'
config_profile_url = ''
}
process {
executor = 'local'
maxForks = 46
}
// To use singularity, use nextflow run -profile munin,singularity
singularity {
enabled = true
autoMounts = true
}
// To use docker instead of singularity, use nextflow run -profile munin,docker
// To use docker, use nextflow run -profile munin,docker
docker {
enabled = false
mountFlags = 'z'
@ -22,11 +23,11 @@ docker {
}
params {
saveReference = true
max_memory = 128.GB
max_cpus = 16
// general params
max_memory = 752.GB
max_cpus = 46
max_time = 72.h
// illumina iGenomes reference file paths on UPPMAX
// Local AWS iGenomes reference file paths on munin
igenomes_base = '/data1/references/igenomes/'
}

View file

@ -14,7 +14,7 @@ singularity {
process {
executor = 'slurm'
queue = 'short'
queue = { task.memory > 756.GB ? 'supercruncher': task.time <= 2.h ? 'short' : task.time <= 48.h ? 'medium': 'long' }
}
executor {
@ -22,9 +22,9 @@ executor {
}
params {
max_memory = 256.GB
max_memory = 2.TB
max_cpus = 32
max_time = 2.h
max_time = 720.h
//Illumina iGenomes reference file path
igenomes_base = "/projects1/public_data/igenomes/"
}

18
docs/cbe.md Normal file
View file

@ -0,0 +1,18 @@
# nf-core/configs: CBE Configuration
All nf-core pipelines have been successfully configured for use on the CLIP BATCH ENVIRONMENT (CBE) cluster at the Vienna BioCenter (VBC).
To use, run the pipeline with `-profile cbe`. This will download and launch the [`cbe.config`](../conf/cbe.config) which has been pre-configured with a setup suitable for the CBE cluster. Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Singularity image before execution of the pipeline.
Before running the pipeline you will need to load Nextflow and Singularity using the environment module system on CBE. You can do this by issuing the commands below:
```bash
## Load Nextflow and Singularity environment modules
module purge
module load nextflow/19.04.0
module load singularity/3.2.1
```
>NB: You will need an account to use the HPC cluster on CBE in order to run the pipeline. If in doubt contact IT.
>NB: Nextflow will need to submit the jobs via the job scheduler to the HPC cluster and as such the commands above will have to be executed on one of the login nodes. If in doubt contact IT.

38
docs/genouest.md Normal file
View file

@ -0,0 +1,38 @@
# nf-core/configs: GenOuest Configuration
All nf-core pipelines have been successfully configured for use on the GenOuest cluster.
To use, run the pipeline with `-profile genouest`. This will download and launch the [`genouest.config`](../conf/genouest.config) which has been pre-configured with a setup suitable for the GenOuest cluster. Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Singularity image before execution of the pipeline.
## Running the workflow on the GenOuest cluster
Nextflow is installed on the GenOuest cluster. Some documentation is available on the [GenOuest website](https://www.genouest.org/howto/#nextflow).
You need to activate it like this:
```bash
source /local/env/envnextflow-19.07.0.sh
```
Nextflow manages each process as a separate job that is submitted to the cluster by using the sbatch command.
Nextflow shouldn't run directly on the submission node but on a compute node. Run nextflow from a compute node:
```bash
# Login to a compute node
srun --pty bash
# Load the dependencies if not done before
source /local/env/envnextflow-19.07.0.sh
# Run a downloaded/git-cloned nextflow workflow from
nextflow run \\
/path/to/nf-core/workflow \\
-resume
-profile genouest \\
--email my-email@example.org \\
-c my-specific.config
...
# Or use the nf-core client
nextflow run nf-core/rnaseq ...
```

10
docs/kraken.md Normal file
View file

@ -0,0 +1,10 @@
# nf-core/configs: KRAKEN Configuration
This profile can be **only** combined with `jenkins.config`. It is used for
testing pipeline with real data on **in-house** cluster located at SciLifeLab.
To use, run the pipeline with `-profile kraken`. This will download and launch
the [`kraken.config`](../conf/kraken.config) which has been pre-configured to
test the pipeline using `docker` by default.
Example: `nextflow run -profile kraken,jenkins`

View file

@ -1,15 +1,31 @@
# nf-core/configs: MUNIN Configuration
All nf-core pipelines have been successfully configured for use on the MUNIN cluster aka big iron.
All nf-core pipelines have been successfully configured for use on the MUNIN cluster.
To use, run the pipeline with `-profile munin`. This will download and launch the [`munin.config`](../conf/munin.config) which has been pre-configured with a setup suitable for the MUNIN cluster. Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Singularity image before execution of the pipeline.
## Usage
To use, run the pipeline with `-profile munin`.
This will download and launch the [`munin.config`](../conf/munin.config) which has been pre-configured with a setup suitable for the MUNIN cluster.
Example: `nextflow run -profile munin`
## Docker
### Singularity
This is the default behavior of this configuration profile.
Using this profile, if no singularity image are available, one will be downloaded from dockerhub, and converted to a Singularity image before execution of the pipeline.
It is also possible to specify the singularity profile:
Example: `nextflow run -profile munin,singularity`
### Docker
It is also possible to execute the pipeline using Docker.
Using this profile, if no docker image are available, one will be downloaded from dockerhub before execution of the pipeline.
Example: `nextflow run -profile munin,docker`
## Below are non-mandatory information on iGenomes specific configuration

View file

@ -14,17 +14,17 @@ params.custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/$
profiles {
bigpurple { includeConfig "${params.custom_config_base}/conf/bigpurple.config" }
binac { includeConfig "${params.custom_config_base}/conf/binac.config" }
cbe { includeConfig "${params.custom_config_base}/conf/cbe.config" }
ccga { includeConfig "${params.custom_config_base}/conf/ccga.config" }
ccga_dx { includeConfig "${params.custom_config_base}/conf/ccga_dx.config" }
cfc { includeConfig "${params.custom_config_base}/conf/cfc.config" }
crick { includeConfig "${params.custom_config_base}/conf/crick.config" }
czbiohub_aws { includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config" }
czbiohub_aws_highpriority {
includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config"
includeConfig "${params.custom_config_base}/conf/czbiohub_aws_highpriority.config"
}
czbiohub_aws_highpriority { includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config"; includeConfig "${params.custom_config_base}/conf/czbiohub_aws_highpriority.config" }
genouest { includeConfig "${params.custom_config_base}/conf/genouest.config" }
gis { includeConfig "${params.custom_config_base}/conf/gis.config" }
hebbe { includeConfig "${params.custom_config_base}/conf/hebbe.config" }
kraken { includeConfig "${params.custom_config_base}/conf/kraken.config" }
mendel { includeConfig "${params.custom_config_base}/conf/mendel.config" }
munin { includeConfig "${params.custom_config_base}/conf/munin.config" }
pasteur { includeConfig "${params.custom_config_base}/conf/pasteur.config" }
@ -33,7 +33,7 @@ profiles {
shh { includeConfig "${params.custom_config_base}/conf/shh.config" }
uct_hex { includeConfig "${params.custom_config_base}/conf/uct_hex.config" }
uppmax { includeConfig "${params.custom_config_base}/conf/uppmax.config" }
uppmax_devel { includeConfig "${params.custom_config_base}/conf/uppmax.config"; includeConfig "${params.custom_config_base}/conf/uppmax-devel.config" }
uppmax_devel { includeConfig "${params.custom_config_base}/conf/uppmax.config"; includeConfig "${params.custom_config_base}/conf/uppmax_devel.config" }
uzh { includeConfig "${params.custom_config_base}/conf/uzh.config" }
}
@ -44,6 +44,7 @@ params {
// This is a groovy map, not a nextflow parameter set
hostnames = [
crick: ['.thecrick.org'],
genouest: ['.genouest.org'],
uppmax: ['.uppmax.uu.se']
]
}