mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-11 04:23:10 +00:00
commit
4dfacc1894
9 changed files with 124 additions and 12 deletions
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
|
@ -16,7 +16,7 @@ jobs:
|
|||
needs: test_all_profiles
|
||||
strategy:
|
||||
matrix:
|
||||
profile: ['awsbatch', 'bi','bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'kraken', 'mpcdf', 'munin', 'pasteur', 'phoenix', 'prince', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh']
|
||||
profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'kraken', 'mpcdf', 'munin', 'pasteur', 'phoenix', 'prince', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh']
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install Nextflow
|
||||
|
|
|
@ -94,6 +94,7 @@ See [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs
|
|||
|
||||
Currently documentation is available for the following systems:
|
||||
|
||||
* [ABIMS](docs/abims.md)
|
||||
* [AWSBATCH](docs/awsbatch.md)
|
||||
* [BIGPURPLE](docs/bigpurple.md)
|
||||
* [BI](docs/bi.md)
|
||||
|
|
24
conf/abims.config
Normal file
24
conf/abims.config
Normal file
|
@ -0,0 +1,24 @@
|
|||
//Profile config names for nf-core/configs
|
||||
params {
|
||||
config_profile_description = 'The ABiMS cluster profile'
|
||||
config_profile_contact = 'Gildas Le Corguillé (@lecorguille)'
|
||||
config_profile_url = 'https://abims.sb-roscoff.fr'
|
||||
}
|
||||
|
||||
singularity {
|
||||
enabled = true
|
||||
autoMounts = false
|
||||
runOptions = '-B /scratch:/scratch -B /scratch2:/scratch2 -B /shared:/shared'
|
||||
}
|
||||
|
||||
process {
|
||||
executor = 'slurm'
|
||||
}
|
||||
|
||||
params {
|
||||
igenomes_ignore = true
|
||||
igenomesIgnore = true //deprecated
|
||||
max_memory = 750.GB
|
||||
max_cpus = 200
|
||||
max_time = 24.h
|
||||
}
|
|
@ -4,18 +4,25 @@ params {
|
|||
config_profile_contact = 'Maxime Garcia (@MaxUlysse)'
|
||||
config_profile_description = 'nf-core/sarek uppmax profile provided by nf-core/configs'
|
||||
|
||||
singleCPUmem = 7000.MB
|
||||
single_cpu_mem = 7000.MB
|
||||
// Just useful until iGenomes is updated on UPPMAX
|
||||
igenomeIgnore = true
|
||||
igenomes_ignore = true
|
||||
genomes_base = params.genome == 'GRCh37' ? '/sw/data/uppnex/ToolBox/ReferenceAssemblies/hg38make/bundle/2.8/b37' : '/sw/data/uppnex/ToolBox/hg38bundle'
|
||||
}
|
||||
|
||||
def hostname = "hostname".execute().text.trim()
|
||||
|
||||
if (hostname ==~ "r.*") {
|
||||
params.singleCPUmem = 6400.MB
|
||||
params.single_cpu_mem = 6400.MB
|
||||
|
||||
process {
|
||||
withName:BamQC {
|
||||
cpus = {params.max_cpus}
|
||||
memory = {params.max_memory}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hostname ==~ "i.*") {
|
||||
params.singleCPUmem = 15.GB
|
||||
params.single_cpu_mem = 15.GB
|
||||
}
|
||||
|
|
|
@ -15,7 +15,6 @@ cleanup = true
|
|||
singularity {
|
||||
enabled = true
|
||||
autoMounts = true
|
||||
runOptions = '-B /run/shm:/run/shm'
|
||||
cacheDir = "/projects1/singularity_scratch/cache/"
|
||||
}
|
||||
|
||||
|
@ -25,7 +24,7 @@ process {
|
|||
}
|
||||
|
||||
executor {
|
||||
queueSize = 16
|
||||
queueSize = 8
|
||||
}
|
||||
|
||||
profiles {
|
||||
|
|
|
@ -15,7 +15,7 @@ process {
|
|||
}
|
||||
|
||||
params {
|
||||
saveReference = true
|
||||
save_reference = true
|
||||
|
||||
max_memory = 125.GB
|
||||
max_cpus = 16
|
||||
|
|
|
@ -12,13 +12,13 @@ singularity {
|
|||
}
|
||||
|
||||
process {
|
||||
beforeScript = 'module load singularity/2.4.5'
|
||||
beforeScript = 'module load singularity/3.2.1'
|
||||
executor = 'slurm'
|
||||
queue = 'genomics'
|
||||
queue = { task.memory >= 32.GB && task.cpu <= 12 ? 'Kim': task.memory <= 24.GB && task.cpu <= 8 ? 'smallmem' : 'genomics' }
|
||||
}
|
||||
|
||||
params {
|
||||
max_memory = 32.GB
|
||||
max_memory = 128.GB
|
||||
max_cpus = 16
|
||||
max_time = 48.h
|
||||
max_time = 96.h
|
||||
}
|
||||
|
|
80
docs/abims.md
Normal file
80
docs/abims.md
Normal file
|
@ -0,0 +1,80 @@
|
|||
# nf-core/configs: ABiMS Configuration
|
||||
|
||||
All nf-core pipelines have been successfully configured for use on the ABiMS cluster.
|
||||
|
||||
To use, run the pipeline with `-profile abims`. This will download and launch the [`abims.config`](../conf/abims.config) which has been pre-configured with a setup suitable for the ABiMS cluster. Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Singularity image before execution of the pipeline.
|
||||
|
||||
## Request an account
|
||||
|
||||
You will need an account to use the HPC cluster on ABiMS in order
|
||||
to run the pipeline. If in doubt see [http://abims.sb-roscoff.fr/account](http://abims.sb-roscoff.fr/account).
|
||||
|
||||
## Running the workflow on the ABiMS cluster
|
||||
|
||||
Nextflow is installed on the ABiMS cluster.
|
||||
|
||||
You need to activate it like this:
|
||||
|
||||
```bash
|
||||
module load nextflow slurm-drmaa graphviz
|
||||
```
|
||||
|
||||
Nextflow manages each process as a separate job that is submitted to the cluster by using the sbatch command.
|
||||
Nextflow shouldn't run directly on the submission node but on a compute node. Run nextflow from a compute node:
|
||||
|
||||
```bash
|
||||
# Load the dependencies if not done before
|
||||
module load nextflow slurm-drmaa graphviz
|
||||
|
||||
# Run a downloaded/git-cloned nextflow workflow from
|
||||
srun nextflow run \
|
||||
/path/to/nf-core/workflow \
|
||||
-profile abims \
|
||||
--email my-email@example.org \
|
||||
-c my-specific.config
|
||||
...
|
||||
|
||||
# Or use let nf-core client download the workflow
|
||||
srun nextflow run nf-core/rnaseq -profile abims ...
|
||||
|
||||
# To launch in background
|
||||
sbatch --wrap "nextflow run nf-core/rnaseq -profile abims ..."
|
||||
```
|
||||
|
||||
Or write a sbatch script
|
||||
|
||||
> nfcore-rnaseq.sh
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
#SBATCH -p fast
|
||||
#SBATCH --mem=4G
|
||||
|
||||
module load nextflow slurm-drmaa graphviz
|
||||
nextflow run nf-core/rnaseq -profile abims ...
|
||||
```
|
||||
|
||||
Launch on the cluster with sbatch:
|
||||
|
||||
```bash
|
||||
sbatch nfcore-rnaseq.sh
|
||||
```
|
||||
|
||||
### Hello, world
|
||||
|
||||
nf-core provides some test for each workflow:
|
||||
|
||||
```bash
|
||||
module load nextflow slurm-drmaa graphviz
|
||||
nextflow run nf-core/rnaseq -profile abims,test
|
||||
```
|
||||
|
||||
## Singularity images mutualized directory
|
||||
|
||||
To reduce the disk usage, nf-core images can be stored in a mutualized directory: `/shared/software/singularity/images/nf-core/`
|
||||
|
||||
The environment variable `NXF_SINGULARITY_CACHEDIR: /shared/data/cache/nextflow` will indicate this directory to nextflow.
|
||||
|
||||
## Databanks
|
||||
|
||||
A local copy of several genomes are available in `/shared/bank/` directory.
|
|
@ -10,6 +10,7 @@
|
|||
|
||||
//Please use a new line per include Config section to allow easier linting/parsing. Thank you.
|
||||
profiles {
|
||||
abims { includeConfig "${params.custom_config_base}/conf/abims.config" }
|
||||
awsbatch { includeConfig "${params.custom_config_base}/conf/awsbatch.config" }
|
||||
bi { includeConfig "${params.custom_config_base}/conf/bi.config" }
|
||||
bigpurple { includeConfig "${params.custom_config_base}/conf/bigpurple.config" }
|
||||
|
|
Loading…
Reference in a new issue