diff --git a/README.md b/README.md index 978e606..d86461c 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,6 @@ A repository for hosting Nextflow configuration files containing custom paramete - [Configuration and parameters](#configuration-and-parameters) - [Offline usage](#offline-usage) - [Adding a new config](#adding-a-new-config) - - [Checking user hostnames](#checking-user-hostnames) - [Testing](#testing) - [Documentation](#documentation) - [Uploading to `nf-core/configs`](#uploading-to-nf-coreconfigs) diff --git a/conf/sage.config b/conf/sage.config index 3b2a423..e5bfa8b 100644 --- a/conf/sage.config +++ b/conf/sage.config @@ -64,3 +64,37 @@ params { def slow(attempt, factor = 2) { return Math.ceil( attempt / factor) as int } + + +// Function to ensure that resource requirements don't go +// beyond a maximum limit (copied here for Sarek v2) +def check_max(obj, type) { + if (type == 'memory') { + try { + if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) + return params.max_memory as nextflow.util.MemoryUnit + else + return obj + } catch (all) { + println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'time') { + try { + if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) + return params.max_time as nextflow.util.Duration + else + return obj + } catch (all) { + println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" + return obj + } + } else if (type == 'cpus') { + try { + return Math.min( obj, params.max_cpus as int ) + } catch (all) { + println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" + return obj + } + } +} diff --git a/docs/sage.md b/docs/sage.md index 755e0c2..133ccec 100644 --- a/docs/sage.md +++ b/docs/sage.md @@ -1,12 +1,15 @@ -# nf-core/configs: Sage Bionetworks Configuration +# nf-core/configs: Sage Bionetworks Global Configuration -To use this custom configuration, run the pipeline with `-profile sage`. This will download and launch the [`sage.config`](../conf/sage.config), which contains a number of optimizations relevant to Sage employees running workflows on AWS (_e.g._ using Nextflow Tower). These include: +To use this custom configuration, run the pipeline with `-profile sage`. This will download and load the [`sage.config`](../conf/sage.config), which contains a number of optimizations relevant to Sage employees running workflows on AWS (_e.g._ using Nextflow Tower). This profile will also load any applicable pipeline-specific configuration. -- Updating the default value for `igenomes_base` to `s3://sage-igenomes` -- Increasing the default time limits because we run pipelines on AWS -- Enabling retries by default when exit codes relate to insufficient memory +This global configuration includes the following tweaks: + +- Update the default value for `igenomes_base` to `s3://sage-igenomes` +- Increase the default time limits because we run pipelines on AWS +- Enable retries by default when exit codes relate to insufficient memory - Allow pending jobs to finish if the number of retries are exhausted -- Slowing the increase in the number of allocated CPU cores on retries +- Slow the increase in the number of allocated CPU cores on retries +- Define the `check_max()` function, which is missing in Sarek v2 ## Additional information about iGenomes diff --git a/pipeline/sarek.config b/pipeline/sarek.config index 57d7bdf..512541e 100644 --- a/pipeline/sarek.config +++ b/pipeline/sarek.config @@ -15,4 +15,4 @@ profiles { cfc { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/cfc.config" } cfc_dev { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/cfc.config" } eddie { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/eddie.config" } -} \ No newline at end of file +}