1
0
Fork 0
mirror of https://github.com/MillironX/nf-configs.git synced 2024-09-21 06:02:03 +00:00

Merge branch 'master' into biohpc_charliecloud_patch

This commit is contained in:
Patrick Hüther 2022-09-18 12:52:33 +02:00 committed by GitHub
commit 85aa6adeed
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 658 additions and 4 deletions

View file

@ -62,6 +62,7 @@ jobs:
- "google"
- "hasta"
- "hebbe"
- "hki"
- "icr_davros"
- "ifb_core"
- "imperial"
@ -83,6 +84,7 @@ jobs:
- "sage"
- "sahmri"
- "sanger"
- "sbc_sharc"
- "seg_globe"
- "uct_hpc"
- "unibe_ibu"

View file

@ -118,6 +118,7 @@ Currently documentation is available for the following systems:
- [GOOGLE](docs/google.md)
- [HASTA](docs/hasta.md)
- [HEBBE](docs/hebbe.md)
- [HKI](docs/hki.md)
- [ICR_DAVROS](docs/icr_davros.md)
- [IMPERIAL](docs/imperial.md)
- [JAX](docs/jax.md)
@ -137,6 +138,7 @@ Currently documentation is available for the following systems:
- [ROSALIND](docs/rosalind.md)
- [SAGE BIONETWORKS](docs/sage.md)
- [SANGER](docs/sanger.md)
- [SBC_SHARC](docs/sbc_sharc.md)
- [SEG_GLOBE](docs/seg_globe.md)
- [UCT_HPC](docs/uct_hpc.md)
- [UNIBE_IBU](docs/unibe_ibu.md)
@ -197,6 +199,10 @@ Currently documentation is available for the following pipelines within specific
- ampliseq
- [BINAC](docs/pipeline/ampliseq/binac.md)
- [UPPMAX](docs/pipeline/ampliseq/uppmax.md)
- atacseq
- [SBC_SHARC](docs/pipeline/atacseq/sbc_sharc.md)
- chipseq
- [SBC_SHARC](docs/pipeline/chipseq/sbc_sharc.md)
- eager
- [EVA](docs/pipeline/eager/eva.md)
- mag
@ -204,11 +210,14 @@ Currently documentation is available for the following pipelines within specific
- rnafusion
- [HASTA](docs/pipeline/rnafusion/hasta.md)
- [MUNIN](docs/pipeline/rnafusion/munin.md)
- rnaseq
- [SBC_SHARC](docs/pipeline/rnaseq/sbc_sharc.md)
- rnavar
- [MUNIN](docs/pipeline/rnavar/munin.md)
- sarek
- [Cancer Research UK Manchester Institute](docs/pipeline/sarek/crukmi.md)
- [MUNIN](docs/pipeline/sarek/munin.md)
- [SBC_SHARC](docs/pipeline/sarek/sbc_sharc.md)
- [UPPMAX](docs/pipeline/sarek/uppmax.md)
- taxprofiler
- [EVA](docs/pipeline/taxprofiler/eva.md)

View file

@ -12,7 +12,7 @@ singularity {
process {
executor = 'slurm'
queue = { task.memory > 60.GB || task.cpus > 20 ? 'qbic' : 'compute' }
queue = 'qbic'
scratch = 'true'
}

View file

@ -11,7 +11,7 @@ singularity {
process {
executor = 'slurm'
queue = { task.memory > 60.GB || task.cpus > 20 ? 'qbic' : 'compute' }
queue = 'qbic'
scratch = 'true'
}
@ -25,4 +25,4 @@ params {
max_memory = 1999.GB
max_cpus = 128
max_time = 140.h
}
}

104
conf/hki.config Normal file
View file

@ -0,0 +1,104 @@
params {
config_profile_description = 'HKI clusters profile provided by nf-core/configs.'
config_profile_contact = 'James Fellows Yates (@jfy133)'
config_profile_url = 'https://leibniz-hki.de'
}
profiles {
apate {
params {
config_profile_description = 'apate HKI cluster profile provided by nf-core/configs'
config_profile_contact = 'James Fellows Yates (@jfy133)'
config_profile_url = 'https://leibniz-hki.de'
max_memory = 128.GB
max_cpus = 32
max_time = 1440.h
}
process {
executor = 'local'
maxRetries = 2
}
executor {
queueSize = 8
}
singularity {
enabled = true
autoMounts = true
cacheDir = '/Net/Groups/ccdata/apps/singularity'
}
conda {
cacheDir = '/Net/Groups/ccdata/apps/conda_envs'
}
cleanup = true
}
aither {
params {
config_profile_description = 'aither HKI cluster profile provided by nf-core/configs'
config_profile_contact = 'James Fellows Yates (@jfy133)'
config_profile_url = 'https://leibniz-hki.de'
max_memory = 128.GB
max_cpus = 32
max_time = 1440.h
}
process {
executor = 'local'
maxRetries = 2
}
executor {
queueSize = 8
}
singularity {
enabled = true
autoMounts = true
cacheDir = '/Net/Groups/ccdata/apps/singularity'
}
conda {
cacheDir = '/Net/Groups/ccdata/apps/conda_envs'
}
cleanup = true
}
arges {
params {
config_profile_description = 'arges HKI cluster profile provided by nf-core/configs'
config_profile_contact = 'James Fellows Yates (@jfy133)'
config_profile_url = 'https://leibniz-hki.de'
max_memory = 64.GB
max_cpus = 12
max_time = 1440.h
}
process {
executor = 'local'
maxRetries = 2
}
executor {
queueSize = 8
}
singularity {
enabled = true
autoMounts = true
cacheDir = '/Net/Groups/ccdata/apps/singularity'
}
conda {
cacheDir = '/Net/Groups/ccdata/apps/conda_envs'
}
cleanup = true
}
debug {
cleanup = false
}
}

View file

@ -0,0 +1,74 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sheffield Bioinformatics Core Configuration Profile - ShARC
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Custom Pipeline Resource Config for nf-core/atacseq
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
// process-specific resource requirements - reduced specification from those in atacseq/conf/base.config
process {
withLabel:process_low {
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
time = { check_max( 4.h * task.attempt, 'time' ) }
}
withLabel:process_medium {
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
time = { check_max( 6.h * task.attempt, 'time' ) }
}
withLabel:process_high {
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
time = { check_max( 8.h * task.attempt, 'time' ) }
}
withLabel:process_long {
time = { check_max( 12.h * task.attempt, 'time' ) }
}
}
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min(obj, params.max_cpus as int)
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}

View file

@ -0,0 +1,74 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sheffield Bioinformatics Core Configuration Profile - ShARC
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Custom Pipeline Resource Config for nf-core/chipseq
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
// process-specific resource requirements - reduced specification from those in chipseq/conf/base.config
process {
withLabel:process_low {
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
time = { check_max( 4.h * task.attempt, 'time' ) }
}
withLabel:process_medium {
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
time = { check_max( 6.h * task.attempt, 'time' ) }
}
withLabel:process_high {
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
time = { check_max( 8.h * task.attempt, 'time' ) }
}
withLabel:process_long {
time = { check_max( 12.h * task.attempt, 'time' ) }
}
}
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min(obj, params.max_cpus as int)
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}

View file

@ -0,0 +1,79 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sheffield Bioinformatics Core Configuration Profile - ShARC
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Custom Pipeline Resource Config for nf-core/rnaseq
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
// process-specific resource requirements - reduced specification from those in rnaseq/conf/base.config
process {
withLabel:process_low {
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
time = { check_max( 4.h * task.attempt, 'time' ) }
}
withLabel:process_medium {
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
time = { check_max( 6.h * task.attempt, 'time' ) }
}
withLabel:process_high {
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
time = { check_max( 8.h * task.attempt, 'time' ) }
}
withLabel:process_long {
time = { check_max( 12.h * task.attempt, 'time' ) }
}
withLabel:process_high_memory {
memory = { check_max( 60.GB * task.attempt, 'memory' ) }
}
}
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min(obj, params.max_cpus as int)
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}

View file

@ -0,0 +1,114 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sheffield Bioinformatics Core Configuration Profile - ShARC
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Custom Pipeline Resource Config for nf-core/sarek
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
// process-specific resource requirements - reduced specification from those in sarek/conf/base.config
process {
// process labels
withLabel:process_low {
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
time = { check_max( 4.h * task.attempt, 'time' ) }
}
withLabel:process_medium {
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
time = { check_max( 6.h * task.attempt, 'time' ) }
}
withLabel:process_high {
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
time = { check_max( 8.h * task.attempt, 'time' ) }
}
withLabel:process_long {
time = { check_max( 12.h * task.attempt, 'time' ) }
}
withLabel:process_high_memory {
memory = { check_max( 60.GB * task.attempt, 'memory' ) }
}
// process name
withName:'BWAMEM1_MEM|BWAMEM2_MEM' {
cpus = { check_max( 12 * task.attempt, 'cpus' ) }
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
time = { check_max( 8.h * task.attempt, 'time' ) }
}
withName:'FASTP' {
cpus = { check_max( 12 * task.attempt, 'cpus' ) }
}
withName:'FASTQC|FASTP|MOSDEPTH|SAMTOOLS_CONVERT' {
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
}
withName:'GATK4_APPLYBQSR|GATK4_APPLYBQSR_SPARK|GATK4_BASERECALIBRATOR|SAMTOOLS_STATS' {
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
}
withName:'GATK4_APPLYBQSR|GATK4_APPLYBQSR_SPARK|GATK4_BASERECALIBRATOR|GATK4_GATHERBQSRREPORTS' {
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
}
withName:'GATK4_MARKDUPLICATES' {
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
}
withName:'FREEBAYES|SAMTOOLS_STATS|SAMTOOLS_INDEX|UNZIP' {
cpus = { check_max( 1 * task.attempt, 'cpus' ) }
}
}
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min(obj, params.max_cpus as int)
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}

View file

@ -30,11 +30,14 @@ aws {
client {
uploadChunkSize = 209715200
}
batch {
maxParallelTransfers = 1
}
}
executor {
name = 'awsbatch'
// Ensure unlimited queue size on AWS Batch
queueSize = 100000
queueSize = 500
// Slow down the rate at which AWS Batch jobs accumulate in
// the queue (an attempt to prevent orphaned EBS volumes)
submitRateLimit = '5 / 1 sec'

57
conf/sbc_sharc.config Normal file
View file

@ -0,0 +1,57 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sheffield Bioinformatics Core Configuration Profile - ShARC
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Base Institutional Configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
params {
// nf-core specific parameters displayed in header summary of each run
config_profile_description = 'Sheffield Bioinformatics Core - ShARC'
config_profile_contact = 'Lewis Quayle (l.quayle@sheffield.ac.uk)'
config_profile_url = 'https://docs.hpc.shef.ac.uk/en/latest/sharc/index.html'
// hpc resource limits
max_cpus = 16
max_memory = 64.GB
max_time = 96.h
}
// container engine
singularity {
enabled = true
autoMounts = true
}
// hpc configuration specific to ShARC
process {
// scheduler
executor = 'sge'
penv = 'smp'
queue = { task.time <= 6.h ? 'shortint.q' : 'all.q' }
clusterOptions = { "-l rmem=${task.memory.toGiga()}G" }
// error and retry handling
errorStrategy = { task.exitStatus in [143,137,104,134,139,140] ? 'retry' : 'finish' }
maxRetries = 2
}

24
docs/hki.md Normal file
View file

@ -0,0 +1,24 @@
# nf-core/configs: HKI Configuration
All nf-core pipelines have been successfully configured for use on clusters at the [Leibniz Institute for Natural Product Research and Infection Biology Hans Knöll Institute](https://www.leibniz-hki.de/en).
To use, run the pipeline with `-profile hki,<cluster>`. This will download and launch the [`hki.config`](../conf/hki.config) which contains specific profiles for each cluster. The number of parallel jobs that run is currently limited to 8.
The currently available profiles are:
- apate (uses singularity, cleanup set to true by default)
- arges (uses singularity, cleanup set to true by default)
- aither (uses singularity, cleanup set to true by default)
- debug (sets cleanup to false for debugging purposes, use e.g. `profile hki,<cluster>,debug`)
Note that Nextflow is not necessarily installed by default on the HKI HPC cluster(s). You will need to install it into a directory you have write access to.
Follow these instructions from the Nextflow documentation.
- Install Nextflow : [here](https://www.nextflow.io/docs/latest/getstarted.html#)
All of the intermediate files required to run the pipeline will be stored in the `work/` directory. It is recommended to delete this directory after the pipeline
has finished successfully because it can get quite large, and all of the main output files will be saved in the `results/` directory anyway.
> NB: You will need an account to use the HKI HPC clusters in order to run the pipeline. If in doubt contact the ICT Service Desk.
> NB: Nextflow will need to submit the jobs via SLURM to the HKI HPC clusters and as such the commands above will have to be executed on the login
> node. If in doubt contact ICT.

View file

@ -0,0 +1,11 @@
# nf-core/configs: ATAC-Seq Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
Specific configuration for [nf-co.re/atacseq](https://nf-co.re/atacseq) pipeline
## Usage
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
This will download and launch the atacseq specific [`sbc_sharc.config`](../../../conf/pipeline/atacseq/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
Example: `nextflow run nf-core/atacseq -profile sbc_sharc`

View file

@ -0,0 +1,11 @@
# nf-core/configs: ChIP-Seq Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
Specific configuration for [nf-co.re/chipseq](https://nf-co.re/chipseq) pipeline
## Usage
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
This will download and launch the chipseq specific [`sbc_sharc.config`](../../../conf/pipeline/chipseq/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
Example: `nextflow run nf-core/chipseq -profile sbc_sharc`

View file

@ -0,0 +1,11 @@
# nf-core/configs: RNA-Seq Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
Specific configuration for [nf-co.re/rnaseq](https://nf-co.re/rnaseq) pipeline
## Usage
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
This will download and launch the rnaseq specific [`sbc_sharc.config`](../../../conf/pipeline/rnaseq/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
Example: `nextflow run nf-core/rnaseq -profile sbc_sharc`

View file

@ -0,0 +1,11 @@
# nf-core/configs: Sarek Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
Specific configuration for [nf-co.re/sarek](https://nf-co.re/sarek) pipeline
## Usage
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
This will download and launch the sarek specific [`sbc_sharc.config`](../../../conf/pipeline/sarek/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
Example: `nextflow run nf-core/sarek -profile sbc_sharc`

40
docs/sbc_sharc.md Normal file
View file

@ -0,0 +1,40 @@
# nf-core/configs: Sheffield Bioinformatics Core Facility ShARC Configuration
## Using the SBC_ShARC Institutional Configuration Profile
To use [`sbc_sharc.config`](../conf/sbc_sharc.config), run nextflow with an nf-core pipeline using `-profile sbc_sharc` (note the single hyphen).
This will download and launch [`sbc_sharc.config`](../conf/sbc_sharc.config) which has been pre-configured with a setup suitable for the ShARC cluster and will automatically load the appropriate pipeline-specific configuration file.
The following nf-core pipelines have been successfully configured for use on the the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html):
- [nf-co.re/atacseq](https://nf-co.re/atacseq)
- [nf-co.re/chipseq](https://nf-co.re/chipseq)
- [nf-co.re/rnaseq](https://nf-co.re/rnaseq)
- [nf-co.re/sarek](https://nf-co.re/sarek)
When using [`sbc_sharc.config`](../conf/sbc_sharc.config) with the pipelines listed above, the appropriate configuration file from the list below will be loaded automatically:
- [atacseq sbc_sharc.config](../conf/pipeline/atacseq/sbc_sharc.config)
- [chipseq sbc_sharc.config](../conf/pipeline/chipseq/sbc_sharc.config)
- [rnaseq sbc_sharc.config](../conf/pipeline/rnaseq/sbc_sharc.config)
- [sarek sbc_sharc.config](../conf/pipeline/sarek/sbc_sharc.config)
The [`sbc_sharc.config`](../conf/sbc_sharc.config) configuration file might work with other nf-core pipelines as it stands but we cannot guarantee they will run without issue. We will be continuing to create, test and optimise configurations for new pipelines in the future.
## A Note on Singularity Containers
The [`sbc_sharc.config`](../conf/sbc_sharc.config) configuration file supports running nf-core pipelines with Singularity containers; Singularity images will be downloaded automatically before execution of the pipeline.
When you run nextflow for the first time, Singularity will create a hidden directory `.singularity` in your `$HOME` directory `/home/$USER` which has very very limited (10GB) space available. It is therefore a good idea to create a directory somewhere else (e.g., `/data/$USER`) with more room and link the locations. To do this, run the following series of commands:
```shell
# change directory to $HOME
cd $HOME
# make the directory that will be linked to
mkdir /data/$USER/.singularity
# link the new directory with the existing one
ln -s /data/$USER/.singularity .singularity
```

View file

@ -44,6 +44,7 @@ profiles {
google { includeConfig "${params.custom_config_base}/conf/google.config" }
hasta { includeConfig "${params.custom_config_base}/conf/hasta.config" }
hebbe { includeConfig "${params.custom_config_base}/conf/hebbe.config" }
hki { includeConfig "${params.custom_config_base}/conf/hki.config"}
icr_davros { includeConfig "${params.custom_config_base}/conf/icr_davros.config" }
ifb_core { includeConfig "${params.custom_config_base}/conf/ifb_core.config" }
imperial { includeConfig "${params.custom_config_base}/conf/imperial.config" }
@ -65,6 +66,7 @@ profiles {
sage { includeConfig "${params.custom_config_base}/conf/sage.config" }
sahmri { includeConfig "${params.custom_config_base}/conf/sahmri.config" }
sanger { includeConfig "${params.custom_config_base}/conf/sanger.config"}
sbc_sharc { includeConfig "${params.custom_config_base}/conf/sbc_sharc.config"}
seg_globe { includeConfig "${params.custom_config_base}/conf/seg_globe.config"}
uct_hpc { includeConfig "${params.custom_config_base}/conf/uct_hpc.config" }
unibe_ibu { includeConfig "${params.custom_config_base}/conf/unibe_ibu.config" }

13
pipeline/atacseq.config Normal file
View file

@ -0,0 +1,13 @@
/*
* -------------------------------------------------
* nfcore/atacseq custom profile Nextflow config file
* -------------------------------------------------
* Config options for custom environments.
* Cluster-specific config options should be saved
* in the conf/pipeline/atacseq folder and imported
* under a profile name here.
*/
profiles {
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/atacseq/sbc_sharc.config" }
}

13
pipeline/chipseq.config Normal file
View file

@ -0,0 +1,13 @@
/*
* -------------------------------------------------
* nfcore/chipseq custom profile Nextflow config file
* -------------------------------------------------
* Config options for custom environments.
* Cluster-specific config options should be saved
* in the conf/pipeline/chipseq folder and imported
* under a profile name here.
*/
profiles {
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/chipseq/sbc_sharc.config" }
}

View file

@ -11,5 +11,6 @@
profiles {
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/eddie.config" }
mpcdf { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/mpcdf.config" }
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/sbc_sharc.config" }
utd_sysbio { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/utd_sysbio.config" }
}

View file

@ -15,5 +15,6 @@ profiles {
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/eddie.config" }
icr_davros { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/icr_davros.config" }
munin { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/munin.config" }
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/sbc_sharc.config" }
uppmax { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/uppmax.config" }
}