mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-22 08:29:54 +00:00
Merge pull request #414 from sheffield-bioinformatics-core/sbc_sharc
Added global institutional and pipeline configs for sbc_sharc
This commit is contained in:
commit
3fcf708134
17 changed files with 520 additions and 0 deletions
1
.github/workflows/main.yml
vendored
1
.github/workflows/main.yml
vendored
|
@ -83,6 +83,7 @@ jobs:
|
||||||
- "sage"
|
- "sage"
|
||||||
- "sahmri"
|
- "sahmri"
|
||||||
- "sanger"
|
- "sanger"
|
||||||
|
- "sbc_sharc"
|
||||||
- "seg_globe"
|
- "seg_globe"
|
||||||
- "uct_hpc"
|
- "uct_hpc"
|
||||||
- "unibe_ibu"
|
- "unibe_ibu"
|
||||||
|
|
|
@ -137,6 +137,7 @@ Currently documentation is available for the following systems:
|
||||||
- [ROSALIND](docs/rosalind.md)
|
- [ROSALIND](docs/rosalind.md)
|
||||||
- [SAGE BIONETWORKS](docs/sage.md)
|
- [SAGE BIONETWORKS](docs/sage.md)
|
||||||
- [SANGER](docs/sanger.md)
|
- [SANGER](docs/sanger.md)
|
||||||
|
- [SBC_SHARC](docs/sbc_sharc.md)
|
||||||
- [SEG_GLOBE](docs/seg_globe.md)
|
- [SEG_GLOBE](docs/seg_globe.md)
|
||||||
- [UCT_HPC](docs/uct_hpc.md)
|
- [UCT_HPC](docs/uct_hpc.md)
|
||||||
- [UNIBE_IBU](docs/unibe_ibu.md)
|
- [UNIBE_IBU](docs/unibe_ibu.md)
|
||||||
|
@ -197,6 +198,10 @@ Currently documentation is available for the following pipelines within specific
|
||||||
- ampliseq
|
- ampliseq
|
||||||
- [BINAC](docs/pipeline/ampliseq/binac.md)
|
- [BINAC](docs/pipeline/ampliseq/binac.md)
|
||||||
- [UPPMAX](docs/pipeline/ampliseq/uppmax.md)
|
- [UPPMAX](docs/pipeline/ampliseq/uppmax.md)
|
||||||
|
- atacseq
|
||||||
|
- [SBC_SHARC](docs/pipeline/atacseq/sbc_sharc.md)
|
||||||
|
- chipseq
|
||||||
|
- [SBC_SHARC](docs/pipeline/chipseq/sbc_sharc.md)
|
||||||
- eager
|
- eager
|
||||||
- [EVA](docs/pipeline/eager/eva.md)
|
- [EVA](docs/pipeline/eager/eva.md)
|
||||||
- mag
|
- mag
|
||||||
|
@ -204,11 +209,14 @@ Currently documentation is available for the following pipelines within specific
|
||||||
- rnafusion
|
- rnafusion
|
||||||
- [HASTA](docs/pipeline/rnafusion/hasta.md)
|
- [HASTA](docs/pipeline/rnafusion/hasta.md)
|
||||||
- [MUNIN](docs/pipeline/rnafusion/munin.md)
|
- [MUNIN](docs/pipeline/rnafusion/munin.md)
|
||||||
|
- rnaseq
|
||||||
|
- [SBC_SHARC](docs/pipeline/rnaseq/sbc_sharc.md)
|
||||||
- rnavar
|
- rnavar
|
||||||
- [MUNIN](docs/pipeline/rnavar/munin.md)
|
- [MUNIN](docs/pipeline/rnavar/munin.md)
|
||||||
- sarek
|
- sarek
|
||||||
- [Cancer Research UK Manchester Institute](docs/pipeline/sarek/crukmi.md)
|
- [Cancer Research UK Manchester Institute](docs/pipeline/sarek/crukmi.md)
|
||||||
- [MUNIN](docs/pipeline/sarek/munin.md)
|
- [MUNIN](docs/pipeline/sarek/munin.md)
|
||||||
|
- [SBC_SHARC](docs/pipeline/sarek/sbc_sharc.md)
|
||||||
- [UPPMAX](docs/pipeline/sarek/uppmax.md)
|
- [UPPMAX](docs/pipeline/sarek/uppmax.md)
|
||||||
- taxprofiler
|
- taxprofiler
|
||||||
- [EVA](docs/pipeline/taxprofiler/eva.md)
|
- [EVA](docs/pipeline/taxprofiler/eva.md)
|
||||||
|
|
74
conf/pipeline/atacseq/sbc_sharc.config
Normal file
74
conf/pipeline/atacseq/sbc_sharc.config
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
/*
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Sheffield Bioinformatics Core Configuration Profile - ShARC
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Custom Pipeline Resource Config for nf-core/atacseq
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
// process-specific resource requirements - reduced specification from those in atacseq/conf/base.config
|
||||||
|
|
||||||
|
process {
|
||||||
|
|
||||||
|
withLabel:process_low {
|
||||||
|
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 4.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_medium {
|
||||||
|
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_high {
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 8.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_long {
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
|
||||||
|
|
||||||
|
def check_max(obj, type) {
|
||||||
|
if (type == 'memory') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
|
||||||
|
return params.max_memory as nextflow.util.MemoryUnit
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'time') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
|
||||||
|
return params.max_time as nextflow.util.Duration
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'cpus') {
|
||||||
|
try {
|
||||||
|
return Math.min(obj, params.max_cpus as int)
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
74
conf/pipeline/chipseq/sbc_sharc.config
Normal file
74
conf/pipeline/chipseq/sbc_sharc.config
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
/*
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Sheffield Bioinformatics Core Configuration Profile - ShARC
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Custom Pipeline Resource Config for nf-core/chipseq
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
// process-specific resource requirements - reduced specification from those in chipseq/conf/base.config
|
||||||
|
|
||||||
|
process {
|
||||||
|
|
||||||
|
withLabel:process_low {
|
||||||
|
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 4.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_medium {
|
||||||
|
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_high {
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 8.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_long {
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
|
||||||
|
|
||||||
|
def check_max(obj, type) {
|
||||||
|
if (type == 'memory') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
|
||||||
|
return params.max_memory as nextflow.util.MemoryUnit
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'time') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
|
||||||
|
return params.max_time as nextflow.util.Duration
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'cpus') {
|
||||||
|
try {
|
||||||
|
return Math.min(obj, params.max_cpus as int)
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
79
conf/pipeline/rnaseq/sbc_sharc.config
Normal file
79
conf/pipeline/rnaseq/sbc_sharc.config
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
/*
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Sheffield Bioinformatics Core Configuration Profile - ShARC
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Custom Pipeline Resource Config for nf-core/rnaseq
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
// process-specific resource requirements - reduced specification from those in rnaseq/conf/base.config
|
||||||
|
|
||||||
|
process {
|
||||||
|
|
||||||
|
withLabel:process_low {
|
||||||
|
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 4.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_medium {
|
||||||
|
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_high {
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 8.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_long {
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_high_memory {
|
||||||
|
memory = { check_max( 60.GB * task.attempt, 'memory' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
|
||||||
|
|
||||||
|
def check_max(obj, type) {
|
||||||
|
if (type == 'memory') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
|
||||||
|
return params.max_memory as nextflow.util.MemoryUnit
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'time') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
|
||||||
|
return params.max_time as nextflow.util.Duration
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'cpus') {
|
||||||
|
try {
|
||||||
|
return Math.min(obj, params.max_cpus as int)
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
114
conf/pipeline/sarek/sbc_sharc.config
Normal file
114
conf/pipeline/sarek/sbc_sharc.config
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
/*
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Sheffield Bioinformatics Core Configuration Profile - ShARC
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Custom Pipeline Resource Config for nf-core/sarek
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
// process-specific resource requirements - reduced specification from those in sarek/conf/base.config
|
||||||
|
|
||||||
|
process {
|
||||||
|
|
||||||
|
|
||||||
|
// process labels
|
||||||
|
|
||||||
|
withLabel:process_low {
|
||||||
|
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 4.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_medium {
|
||||||
|
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 6.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_high {
|
||||||
|
cpus = { check_max( 8 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 8.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_long {
|
||||||
|
time = { check_max( 12.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withLabel:process_high_memory {
|
||||||
|
memory = { check_max( 60.GB * task.attempt, 'memory' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// process name
|
||||||
|
|
||||||
|
withName:'BWAMEM1_MEM|BWAMEM2_MEM' {
|
||||||
|
cpus = { check_max( 12 * task.attempt, 'cpus' ) }
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
time = { check_max( 8.h * task.attempt, 'time' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withName:'FASTP' {
|
||||||
|
cpus = { check_max( 12 * task.attempt, 'cpus' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withName:'FASTQC|FASTP|MOSDEPTH|SAMTOOLS_CONVERT' {
|
||||||
|
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withName:'GATK4_APPLYBQSR|GATK4_APPLYBQSR_SPARK|GATK4_BASERECALIBRATOR|SAMTOOLS_STATS' {
|
||||||
|
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withName:'GATK4_APPLYBQSR|GATK4_APPLYBQSR_SPARK|GATK4_BASERECALIBRATOR|GATK4_GATHERBQSRREPORTS' {
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withName:'GATK4_MARKDUPLICATES' {
|
||||||
|
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
withName:'FREEBAYES|SAMTOOLS_STATS|SAMTOOLS_INDEX|UNZIP' {
|
||||||
|
cpus = { check_max( 1 * task.attempt, 'cpus' ) }
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// function 'check_max()' to ensure that resource requirements don't go beyond maximum limit
|
||||||
|
|
||||||
|
def check_max(obj, type) {
|
||||||
|
if (type == 'memory') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
|
||||||
|
return params.max_memory as nextflow.util.MemoryUnit
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'time') {
|
||||||
|
try {
|
||||||
|
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
|
||||||
|
return params.max_time as nextflow.util.Duration
|
||||||
|
else
|
||||||
|
return obj
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
} else if (type == 'cpus') {
|
||||||
|
try {
|
||||||
|
return Math.min(obj, params.max_cpus as int)
|
||||||
|
} catch (all) {
|
||||||
|
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
57
conf/sbc_sharc.config
Normal file
57
conf/sbc_sharc.config
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
/*
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Sheffield Bioinformatics Core Configuration Profile - ShARC
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Base Institutional Configuration
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
params {
|
||||||
|
|
||||||
|
// nf-core specific parameters displayed in header summary of each run
|
||||||
|
|
||||||
|
config_profile_description = 'Sheffield Bioinformatics Core - ShARC'
|
||||||
|
config_profile_contact = 'Lewis Quayle (l.quayle@sheffield.ac.uk)'
|
||||||
|
config_profile_url = 'https://docs.hpc.shef.ac.uk/en/latest/sharc/index.html'
|
||||||
|
|
||||||
|
// hpc resource limits
|
||||||
|
|
||||||
|
max_cpus = 16
|
||||||
|
max_memory = 64.GB
|
||||||
|
max_time = 96.h
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// container engine
|
||||||
|
|
||||||
|
singularity {
|
||||||
|
|
||||||
|
enabled = true
|
||||||
|
autoMounts = true
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// hpc configuration specific to ShARC
|
||||||
|
|
||||||
|
process {
|
||||||
|
|
||||||
|
// scheduler
|
||||||
|
|
||||||
|
executor = 'sge'
|
||||||
|
penv = 'smp'
|
||||||
|
queue = { task.time <= 6.h ? 'shortint.q' : 'all.q' }
|
||||||
|
clusterOptions = { "-l rmem=${task.memory.toGiga()}G" }
|
||||||
|
|
||||||
|
// error and retry handling
|
||||||
|
|
||||||
|
errorStrategy = { task.exitStatus in [143,137,104,134,139,140] ? 'retry' : 'finish' }
|
||||||
|
maxRetries = 2
|
||||||
|
|
||||||
|
}
|
11
docs/pipeline/atacseq/sbc_sharc.md
Normal file
11
docs/pipeline/atacseq/sbc_sharc.md
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# nf-core/configs: ATAC-Seq Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
|
||||||
|
|
||||||
|
Specific configuration for [nf-co.re/atacseq](https://nf-co.re/atacseq) pipeline
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
|
||||||
|
|
||||||
|
This will download and launch the atacseq specific [`sbc_sharc.config`](../../../conf/pipeline/atacseq/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
|
||||||
|
|
||||||
|
Example: `nextflow run nf-core/atacseq -profile sbc_sharc`
|
11
docs/pipeline/chipseq/sbc_sharc.md
Normal file
11
docs/pipeline/chipseq/sbc_sharc.md
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# nf-core/configs: ChIP-Seq Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
|
||||||
|
|
||||||
|
Specific configuration for [nf-co.re/chipseq](https://nf-co.re/chipseq) pipeline
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
|
||||||
|
|
||||||
|
This will download and launch the chipseq specific [`sbc_sharc.config`](../../../conf/pipeline/chipseq/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
|
||||||
|
|
||||||
|
Example: `nextflow run nf-core/chipseq -profile sbc_sharc`
|
11
docs/pipeline/rnaseq/sbc_sharc.md
Normal file
11
docs/pipeline/rnaseq/sbc_sharc.md
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# nf-core/configs: RNA-Seq Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
|
||||||
|
|
||||||
|
Specific configuration for [nf-co.re/rnaseq](https://nf-co.re/rnaseq) pipeline
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
|
||||||
|
|
||||||
|
This will download and launch the rnaseq specific [`sbc_sharc.config`](../../../conf/pipeline/rnaseq/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
|
||||||
|
|
||||||
|
Example: `nextflow run nf-core/rnaseq -profile sbc_sharc`
|
11
docs/pipeline/sarek/sbc_sharc.md
Normal file
11
docs/pipeline/sarek/sbc_sharc.md
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# nf-core/configs: Sarek Specific Configuration - Sheffield Bioinformatics Core Facility ShARC
|
||||||
|
|
||||||
|
Specific configuration for [nf-co.re/sarek](https://nf-co.re/sarek) pipeline
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To use, run nextflow with the pipeline using `-profile sbc_sharc` (note the single hyphen).
|
||||||
|
|
||||||
|
This will download and launch the sarek specific [`sbc_sharc.config`](../../../conf/pipeline/sarek/sbc_sharc.config) which has been pre-configured with a setup suitable for the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html) and will automatically load the appropriate pipeline-specific configuration file.
|
||||||
|
|
||||||
|
Example: `nextflow run nf-core/sarek -profile sbc_sharc`
|
40
docs/sbc_sharc.md
Normal file
40
docs/sbc_sharc.md
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
# nf-core/configs: Sheffield Bioinformatics Core Facility ShARC Configuration
|
||||||
|
|
||||||
|
## Using the SBC_ShARC Institutional Configuration Profile
|
||||||
|
|
||||||
|
To use [`sbc_sharc.config`](../conf/sbc_sharc.config), run nextflow with an nf-core pipeline using `-profile sbc_sharc` (note the single hyphen).
|
||||||
|
|
||||||
|
This will download and launch [`sbc_sharc.config`](../conf/sbc_sharc.config) which has been pre-configured with a setup suitable for the ShARC cluster and will automatically load the appropriate pipeline-specific configuration file.
|
||||||
|
|
||||||
|
The following nf-core pipelines have been successfully configured for use on the the [University of Sheffield ShARC cluster](https://docs.hpc.shef.ac.uk/en/latest/index.html):
|
||||||
|
|
||||||
|
- [nf-co.re/atacseq](https://nf-co.re/atacseq)
|
||||||
|
- [nf-co.re/chipseq](https://nf-co.re/chipseq)
|
||||||
|
- [nf-co.re/rnaseq](https://nf-co.re/rnaseq)
|
||||||
|
- [nf-co.re/sarek](https://nf-co.re/sarek)
|
||||||
|
|
||||||
|
When using [`sbc_sharc.config`](../conf/sbc_sharc.config) with the pipelines listed above, the appropriate configuration file from the list below will be loaded automatically:
|
||||||
|
|
||||||
|
- [atacseq sbc_sharc.config](../conf/pipeline/atacseq/sbc_sharc.config)
|
||||||
|
- [chipseq sbc_sharc.config](../conf/pipeline/chipseq/sbc_sharc.config)
|
||||||
|
- [rnaseq sbc_sharc.config](../conf/pipeline/rnaseq/sbc_sharc.config)
|
||||||
|
- [sarek sbc_sharc.config](../conf/pipeline/sarek/sbc_sharc.config)
|
||||||
|
|
||||||
|
The [`sbc_sharc.config`](../conf/sbc_sharc.config) configuration file might work with other nf-core pipelines as it stands but we cannot guarantee they will run without issue. We will be continuing to create, test and optimise configurations for new pipelines in the future.
|
||||||
|
|
||||||
|
## A Note on Singularity Containers
|
||||||
|
|
||||||
|
The [`sbc_sharc.config`](../conf/sbc_sharc.config) configuration file supports running nf-core pipelines with Singularity containers; Singularity images will be downloaded automatically before execution of the pipeline.
|
||||||
|
|
||||||
|
When you run nextflow for the first time, Singularity will create a hidden directory `.singularity` in your `$HOME` directory `/home/$USER` which has very very limited (10GB) space available. It is therefore a good idea to create a directory somewhere else (e.g., `/data/$USER`) with more room and link the locations. To do this, run the following series of commands:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# change directory to $HOME
|
||||||
|
cd $HOME
|
||||||
|
|
||||||
|
# make the directory that will be linked to
|
||||||
|
mkdir /data/$USER/.singularity
|
||||||
|
|
||||||
|
# link the new directory with the existing one
|
||||||
|
ln -s /data/$USER/.singularity .singularity
|
||||||
|
```
|
|
@ -65,6 +65,7 @@ profiles {
|
||||||
sage { includeConfig "${params.custom_config_base}/conf/sage.config" }
|
sage { includeConfig "${params.custom_config_base}/conf/sage.config" }
|
||||||
sahmri { includeConfig "${params.custom_config_base}/conf/sahmri.config" }
|
sahmri { includeConfig "${params.custom_config_base}/conf/sahmri.config" }
|
||||||
sanger { includeConfig "${params.custom_config_base}/conf/sanger.config"}
|
sanger { includeConfig "${params.custom_config_base}/conf/sanger.config"}
|
||||||
|
sbc_sharc { includeConfig "${params.custom_config_base}/conf/sbc_sharc.config"}
|
||||||
seg_globe { includeConfig "${params.custom_config_base}/conf/seg_globe.config"}
|
seg_globe { includeConfig "${params.custom_config_base}/conf/seg_globe.config"}
|
||||||
uct_hpc { includeConfig "${params.custom_config_base}/conf/uct_hpc.config" }
|
uct_hpc { includeConfig "${params.custom_config_base}/conf/uct_hpc.config" }
|
||||||
unibe_ibu { includeConfig "${params.custom_config_base}/conf/unibe_ibu.config" }
|
unibe_ibu { includeConfig "${params.custom_config_base}/conf/unibe_ibu.config" }
|
||||||
|
|
13
pipeline/atacseq.config
Normal file
13
pipeline/atacseq.config
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
/*
|
||||||
|
* -------------------------------------------------
|
||||||
|
* nfcore/atacseq custom profile Nextflow config file
|
||||||
|
* -------------------------------------------------
|
||||||
|
* Config options for custom environments.
|
||||||
|
* Cluster-specific config options should be saved
|
||||||
|
* in the conf/pipeline/atacseq folder and imported
|
||||||
|
* under a profile name here.
|
||||||
|
*/
|
||||||
|
|
||||||
|
profiles {
|
||||||
|
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/atacseq/sbc_sharc.config" }
|
||||||
|
}
|
13
pipeline/chipseq.config
Normal file
13
pipeline/chipseq.config
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
/*
|
||||||
|
* -------------------------------------------------
|
||||||
|
* nfcore/chipseq custom profile Nextflow config file
|
||||||
|
* -------------------------------------------------
|
||||||
|
* Config options for custom environments.
|
||||||
|
* Cluster-specific config options should be saved
|
||||||
|
* in the conf/pipeline/chipseq folder and imported
|
||||||
|
* under a profile name here.
|
||||||
|
*/
|
||||||
|
|
||||||
|
profiles {
|
||||||
|
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/chipseq/sbc_sharc.config" }
|
||||||
|
}
|
|
@ -11,5 +11,6 @@
|
||||||
profiles {
|
profiles {
|
||||||
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/eddie.config" }
|
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/eddie.config" }
|
||||||
mpcdf { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/mpcdf.config" }
|
mpcdf { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/mpcdf.config" }
|
||||||
|
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/sbc_sharc.config" }
|
||||||
utd_sysbio { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/utd_sysbio.config" }
|
utd_sysbio { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/utd_sysbio.config" }
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,5 +15,6 @@ profiles {
|
||||||
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/eddie.config" }
|
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/eddie.config" }
|
||||||
icr_davros { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/icr_davros.config" }
|
icr_davros { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/icr_davros.config" }
|
||||||
munin { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/munin.config" }
|
munin { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/munin.config" }
|
||||||
|
sbc_sharc { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/sbc_sharc.config" }
|
||||||
uppmax { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/uppmax.config" }
|
uppmax { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/uppmax.config" }
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue