mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-22 16:29:55 +00:00
Merge branch 'master' into master
This commit is contained in:
commit
c725c71e4d
8 changed files with 144 additions and 22 deletions
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
|
@ -16,7 +16,7 @@ jobs:
|
||||||
needs: test_all_profiles
|
needs: test_all_profiles
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
profile: ['awsbatch', 'bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga', 'cfc', 'crick', 'denbi_qbic', 'genouest', 'gis', 'hebbe', 'kraken', 'munin', 'pasteur', 'phoenix', 'prince', 'shh_sdag', 'shh_cdag', 'uct_hex', 'uppmax_devel', 'uppmax', 'uzh']
|
profile: ['awsbatch', 'bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga', 'cfc', 'crick', 'denbi_qbic', 'genotoul', 'genouest', 'gis', 'hebbe', 'kraken', 'munin', 'pasteur', 'phoenix', 'prince', 'shh_sdag', 'shh_cdag', 'uct_hex', 'uppmax_devel', 'uppmax', 'uzh']
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Install Nextflow
|
- name: Install Nextflow
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
# [![nf-core/configs](docs/images/nfcore-configs_logo.png)](https://github.com/nf-core/configs)
|
# [![nf-core/configs](docs/images/nfcore-configs_logo.png "nf-core/configs")](https://github.com/nf-core/configs)
|
||||||
|
|
||||||
[![Lint Status](https://github.com/nf-core/configs/workflows/nfcore%20configs%20tests/badge.svg)](https://github.com/nf-core/configs/workflows/nfcore%20configs%20tests/badge.svg)
|
[![Lint Status](https://github.com/nf-core/configs/workflows/Configs%20tests/badge.svg)](https://github.com/nf-core/configs/workflows/Configs%20tests/badge.svg)
|
||||||
|
|
||||||
A repository for hosting nextflow config files containing custom parameters required to run nf-core pipelines at different Institutions.
|
A repository for hosting nextflow config files containing custom parameters required to run nf-core pipelines at different Institutions.
|
||||||
|
|
||||||
## Table of contents
|
## Table of contents
|
||||||
|
|
||||||
* [![nf-core/configs](https://github.com/nf-core/configs)](#nf-coreconfigshttpsgithubcomnf-coreconfigs)
|
|
||||||
* [Table of contents](#table-of-contents)
|
* [Table of contents](#table-of-contents)
|
||||||
* [Using an existing config](#using-an-existing-config)
|
* [Using an existing config](#using-an-existing-config)
|
||||||
* [Configuration and parameters](#configuration-and-parameters)
|
* [Configuration and parameters](#configuration-and-parameters)
|
||||||
|
@ -97,6 +96,7 @@ Currently documentation is available for the following systems:
|
||||||
* [CZBIOHUB_AWS](docs/czbiohub.md)
|
* [CZBIOHUB_AWS](docs/czbiohub.md)
|
||||||
* [CZBIOHUB_AWS_HIGHPRIORITY](docs/czbiohub.md)
|
* [CZBIOHUB_AWS_HIGHPRIORITY](docs/czbiohub.md)
|
||||||
* [DENBI_QBIC](docs/denbi_qbic.md)
|
* [DENBI_QBIC](docs/denbi_qbic.md)
|
||||||
|
* [GENOTOUL](docs/genotoul.md)
|
||||||
* [GENOUEST](docs/genouest.md)
|
* [GENOUEST](docs/genouest.md)
|
||||||
* [GIS](docs/gis.md)
|
* [GIS](docs/gis.md)
|
||||||
* [HEBBE](docs/hebbe.md)
|
* [HEBBE](docs/hebbe.md)
|
||||||
|
@ -115,7 +115,7 @@ Currently documentation is available for the following systems:
|
||||||
|
|
||||||
[Fork](https://help.github.com/articles/fork-a-repo/) the `nf-core/configs` repository to your own GitHub account. Within the local clone of your fork add the custom config file to the [`conf/`](https://github.com/nf-core/configs/tree/master/conf) directory, and the documentation file to the [`docs/`](https://github.com/nf-core/configs/tree/master/docs) directory. You will also need to edit and add your custom profile to the [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) file in the top-level directory of the clone.
|
[Fork](https://help.github.com/articles/fork-a-repo/) the `nf-core/configs` repository to your own GitHub account. Within the local clone of your fork add the custom config file to the [`conf/`](https://github.com/nf-core/configs/tree/master/conf) directory, and the documentation file to the [`docs/`](https://github.com/nf-core/configs/tree/master/docs) directory. You will also need to edit and add your custom profile to the [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) file in the top-level directory of the clone.
|
||||||
|
|
||||||
Afterwards, make sure to edit the `.github/main.yml` file and add your profile name to alphabetically sorted `profile:` scope. This way, it will be tested automatically using GitHub Actions.
|
Afterwards, make sure to edit the `.github/main.yml` file and add your profile name to the alphabetically sorted `profile:` scope. This way, it will be tested automatically using GitHub Actions. If you forget to do this, tests will fail and complain about that.
|
||||||
|
|
||||||
Commit and push these changes to your local clone on GitHub, and then [create a pull request](https://help.github.com/articles/creating-a-pull-request-from-a-fork/) on the `nf-core/configs` GitHub repo with the appropriate information.
|
Commit and push these changes to your local clone on GitHub, and then [create a pull request](https://help.github.com/articles/creating-a-pull-request-from-a-fork/) on the `nf-core/configs` GitHub repo with the appropriate information.
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ def check_config(Config, Github):
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
#Maybe report what is missing here too
|
#Maybe report what is missing here too
|
||||||
print("Tests don't seem to test these profiles properly!\n")
|
print("Tests don't seem to test these profiles properly. Please check whether you added the profile to the Github Actions testing YAML.\n")
|
||||||
print(config_profiles.symmetric_difference(tests))
|
print(config_profiles.symmetric_difference(tests))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
|
@ -12,12 +12,14 @@ process {
|
||||||
clusterOptions = { task.time <= 8.h ? '--qos short': task.time <= 48.h ? '--qos medium' : '--qos long' }
|
clusterOptions = { task.time <= 8.h ? '--qos short': task.time <= 48.h ? '--qos medium' : '--qos long' }
|
||||||
}
|
}
|
||||||
|
|
||||||
singularity.enabled = true
|
singularity {
|
||||||
|
enabled = true
|
||||||
|
cacheDir = '/scratch-cbe/shared/containers'
|
||||||
|
}
|
||||||
|
|
||||||
params {
|
params {
|
||||||
params.max_time = 14.d
|
params.max_time = 14.d
|
||||||
params.max_cpus = 36
|
params.max_cpus = 36
|
||||||
params.max_memory = 4.TB
|
params.max_memory = 4.TB
|
||||||
igenomes_ignore = true
|
igenomes_base = '/resources/references/igenomes'
|
||||||
igenomesIgnore = true //deprecated
|
|
||||||
}
|
}
|
27
conf/genotoul.config
Normal file
27
conf/genotoul.config
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
//Profile config names for nf-core/configs
|
||||||
|
params {
|
||||||
|
config_profile_description = 'The Genotoul cluster profile'
|
||||||
|
config_profile_contact = 'support.bioinfo.genotoul@inra.fr'
|
||||||
|
config_profile_url = 'http://bioinfo.genotoul.fr/'
|
||||||
|
}
|
||||||
|
|
||||||
|
singularity {
|
||||||
|
// need one image per execution
|
||||||
|
enabled = true
|
||||||
|
runOptions = '-B /bank -B /work2 -B /work -B /save -B /home'
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
}
|
||||||
|
|
||||||
|
params {
|
||||||
|
save_reference = true
|
||||||
|
igenomes_ignore = true
|
||||||
|
igenomesIgnore = true //deprecated
|
||||||
|
// Max resources requested by a normal node on genotoul.
|
||||||
|
max_memory = 120.GB
|
||||||
|
max_cpus = 48
|
||||||
|
max_time = 96.h
|
||||||
|
}
|
|
@ -13,5 +13,7 @@ module load nextflow/19.04.0
|
||||||
module load singularity/3.2.1
|
module load singularity/3.2.1
|
||||||
```
|
```
|
||||||
|
|
||||||
|
A local copy of the [AWS-iGenomes](https://registry.opendata.aws/aws-igenomes/) resource has been made available on CBE so you should be able to run the pipeline against any reference available in the `igenomes.config` specific to the nf-core pipeline. You can do this by simply using the `--genome <GENOME_ID>` parameter.
|
||||||
|
|
||||||
>NB: You will need an account to use the HPC cluster on CBE in order to run the pipeline. If in doubt contact IT.
|
>NB: You will need an account to use the HPC cluster on CBE in order to run the pipeline. If in doubt contact IT.
|
||||||
>NB: Nextflow will need to submit the jobs via the job scheduler to the HPC cluster and as such the commands above will have to be executed on one of the login nodes. If in doubt contact IT.
|
>NB: Nextflow will need to submit the jobs via the job scheduler to the HPC cluster and as such the commands above will have to be executed on one of the login nodes. If in doubt contact IT.
|
||||||
|
|
88
docs/genotoul.md
Normal file
88
docs/genotoul.md
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
# nf-core/configs: Bioinfo Genotoul Configuration
|
||||||
|
|
||||||
|
All nf-core pipelines have been successfully configured for use on the Bioinfo Genotoul cluster at the INRA toulouse.
|
||||||
|
|
||||||
|
To use, run the pipeline with `-profile genotoul`. This will download and
|
||||||
|
launch the [`genotoul.config`](../conf/genotoul.config) which has been
|
||||||
|
pre-configured with a setup suitable for the Bioinfo Genotoul cluster.
|
||||||
|
|
||||||
|
Using this profile, a docker image containing all of the required software
|
||||||
|
will be downloaded, and converted to a Singularity image before execution
|
||||||
|
of the pipeline. Images are stored for all users in following directory `/usr/local/bioinfo/src/NextflowWorkflows/singularity-img/`.
|
||||||
|
|
||||||
|
## Running the workflow ib the Genologin cluster
|
||||||
|
|
||||||
|
Before running the pipeline you will need to load Nextflow and
|
||||||
|
Singularity using the environment module system on Genotoul. You can do
|
||||||
|
this by issuing the commands below:
|
||||||
|
|
||||||
|
Once connected on our frontal node :
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Login to a compute node
|
||||||
|
srun --mem=4G --pty bash
|
||||||
|
```
|
||||||
|
|
||||||
|
Setup default nextflow and singularity home directory (to be done only one time):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sh /usr/local/bioinfo/src/NextflowWorkflows/create_nfx_dirs.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Load environment :
|
||||||
|
|
||||||
|
```bash
|
||||||
|
module purge
|
||||||
|
module load bioinfo/nfcore-Nextflow-v19.04.0
|
||||||
|
```
|
||||||
|
|
||||||
|
Try a test workflow (for example the methylseq workflow) :
|
||||||
|
|
||||||
|
```bash
|
||||||
|
nextflow run nf-core/methylseq -profile genotoul,test
|
||||||
|
```
|
||||||
|
|
||||||
|
Create launch script `nfcore-rnaseq.sh` :
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
#SBATCH -p workq
|
||||||
|
#SBATCH -t 1:00:00 #time in hour
|
||||||
|
#SBATCH --mem=4G
|
||||||
|
#SBATCH --mail-type=BEGIN,END,FAIL
|
||||||
|
|
||||||
|
module load bioinfo/nfcore-Nextflow-v19.04.0
|
||||||
|
nextflow run nf-core/methylseq -profile genotoul,test
|
||||||
|
```
|
||||||
|
|
||||||
|
Launch on the cluster with sbatch:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sbatch nfcore-rnaseq.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Mounted directory
|
||||||
|
|
||||||
|
By default, available mount points are:
|
||||||
|
|
||||||
|
* /bank
|
||||||
|
* /home
|
||||||
|
* /save
|
||||||
|
* /work
|
||||||
|
* /work2
|
||||||
|
|
||||||
|
To have access to specific other mount point (such as nosave or project)
|
||||||
|
you can add a config profile file with option `-profile` and which contain:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
singularity.runOptions = '-B /directory/to/mount'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Databanks
|
||||||
|
|
||||||
|
A local copy of several genomes are available in `/bank` directory. See
|
||||||
|
our [databank page](http://bioinfo.genotoul.fr/index.php/resources-2/databanks/)
|
||||||
|
to search for your favorite genome.
|
||||||
|
|
||||||
|
>NB: You will need an account to use the HPC cluster on Genotoul in order
|
||||||
|
to run the pipeline. If in doubt see [http://bioinfo.genotoul.fr/](http://bioinfo.genotoul.fr/).
|
|
@ -22,8 +22,10 @@ profiles {
|
||||||
cfc { includeConfig "${params.custom_config_base}/conf/cfc.config" }
|
cfc { includeConfig "${params.custom_config_base}/conf/cfc.config" }
|
||||||
crick { includeConfig "${params.custom_config_base}/conf/crick.config" }
|
crick { includeConfig "${params.custom_config_base}/conf/crick.config" }
|
||||||
czbiohub_aws { includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config" }
|
czbiohub_aws { includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config" }
|
||||||
czbiohub_aws_highpriority { includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config";
|
czbiohub_aws_highpriority {
|
||||||
|
includeConfig "${params.custom_config_base}/conf/czbiohub_aws.config";
|
||||||
includeConfig "${params.custom_config_base}/conf/czbiohub_aws_highpriority.config"}
|
includeConfig "${params.custom_config_base}/conf/czbiohub_aws_highpriority.config"}
|
||||||
|
genotoul { includeConfig "${params.custom_config_base}/conf/genotoul.config" }
|
||||||
denbi_qbic { includeConfig "${params.custom_config_base}/conf/denbi_qbic.config" }
|
denbi_qbic { includeConfig "${params.custom_config_base}/conf/denbi_qbic.config" }
|
||||||
genouest { includeConfig "${params.custom_config_base}/conf/genouest.config" }
|
genouest { includeConfig "${params.custom_config_base}/conf/genouest.config" }
|
||||||
gis { includeConfig "${params.custom_config_base}/conf/gis.config" }
|
gis { includeConfig "${params.custom_config_base}/conf/gis.config" }
|
||||||
|
@ -49,6 +51,7 @@ params {
|
||||||
// This is a groovy map, not a nextflow parameter set
|
// This is a groovy map, not a nextflow parameter set
|
||||||
hostnames = [
|
hostnames = [
|
||||||
crick: ['.thecrick.org'],
|
crick: ['.thecrick.org'],
|
||||||
|
genotoul: ['.genologin1.toulouse.inra.fr', '.genologin2.toulouse.inra.fr'],
|
||||||
genouest: ['.genouest.org'],
|
genouest: ['.genouest.org'],
|
||||||
uppmax: ['.uppmax.uu.se']
|
uppmax: ['.uppmax.uu.se']
|
||||||
]
|
]
|
||||||
|
|
Loading…
Reference in a new issue