mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-24 09:09:56 +00:00
Merge branch 'eva-java-correction' of github.com:jfy133/nf-core-configs into eva-java-correction
This commit is contained in:
commit
76b0d1dd85
8 changed files with 135 additions and 14 deletions
55
.github/workflows/fix-linting.yml
vendored
Normal file
55
.github/workflows/fix-linting.yml
vendored
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
name: Fix linting from a comment
|
||||||
|
on:
|
||||||
|
issue_comment:
|
||||||
|
types: [created]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
# Only run if comment is on a PR with the main repo, and if it contains the magic keywords
|
||||||
|
if: >
|
||||||
|
contains(github.event.comment.html_url, '/pull/') &&
|
||||||
|
contains(github.event.comment.body, '@nf-core-bot fix linting') &&
|
||||||
|
github.repository == 'nf-core/configs'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# Use the @nf-core-bot token to check out so we can push later
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.nf_core_bot_auth_token }}
|
||||||
|
|
||||||
|
# Action runs on the issue comment, so we don't get the PR by default
|
||||||
|
# Use the gh cli to check out the PR
|
||||||
|
- name: Checkout Pull Request
|
||||||
|
run: gh pr checkout ${{ github.event.issue.number }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v2
|
||||||
|
|
||||||
|
- name: Install Prettier
|
||||||
|
run: npm install -g prettier @prettier/plugin-php
|
||||||
|
|
||||||
|
# Check that we actually need to fix something
|
||||||
|
- name: Run 'prettier --check'
|
||||||
|
id: prettier_status
|
||||||
|
run: |
|
||||||
|
if prettier --check ${GITHUB_WORKSPACE}; then
|
||||||
|
echo "::set-output name=result::pass"
|
||||||
|
else
|
||||||
|
echo "::set-output name=result::fail"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run 'prettier --write'
|
||||||
|
if: steps.prettier_status.outputs.result == 'fail'
|
||||||
|
run: prettier --write ${GITHUB_WORKSPACE}
|
||||||
|
|
||||||
|
- name: Commit & push changes
|
||||||
|
if: steps.prettier_status.outputs.result == 'fail'
|
||||||
|
run: |
|
||||||
|
git config user.email "core@nf-co.re"
|
||||||
|
git config user.name "nf-core-bot"
|
||||||
|
git config push.default upstream
|
||||||
|
git add .
|
||||||
|
git status
|
||||||
|
git commit -m "[automated] Fix linting with Prettier"
|
||||||
|
git push
|
1
.github/workflows/main.yml
vendored
1
.github/workflows/main.yml
vendored
|
@ -66,6 +66,7 @@ jobs:
|
||||||
- "jax"
|
- "jax"
|
||||||
- "lugh"
|
- "lugh"
|
||||||
- "marvin"
|
- "marvin"
|
||||||
|
- "mjolnir_globe"
|
||||||
- "maestro"
|
- "maestro"
|
||||||
- "mpcdf"
|
- "mpcdf"
|
||||||
- "munin"
|
- "munin"
|
||||||
|
|
|
@ -121,6 +121,7 @@ Currently documentation is available for the following systems:
|
||||||
- [LUGH](docs/lugh.md)
|
- [LUGH](docs/lugh.md)
|
||||||
- [MAESTRO](docs/maestro.md)
|
- [MAESTRO](docs/maestro.md)
|
||||||
- [MARVIN](docs/marvin.md)
|
- [MARVIN](docs/marvin.md)
|
||||||
|
- [MJOLNIR_GLOBE](docs/mjolnir_globe.md)
|
||||||
- [MPCDF](docs/mpcdf.md)
|
- [MPCDF](docs/mpcdf.md)
|
||||||
- [MUNIN](docs/munin.md)
|
- [MUNIN](docs/munin.md)
|
||||||
- [NU_GENOMICS](docs/nu_genomics.md)
|
- [NU_GENOMICS](docs/nu_genomics.md)
|
||||||
|
|
25
conf/mjolnir_globe.config
Normal file
25
conf/mjolnir_globe.config
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
//Profile config names for nf-core/configs
|
||||||
|
params {
|
||||||
|
config_profile_description = 'Section for Hologenomics and Section for Molecular Ecology and Evolution @ Globe Institute, University of Copenhagen - mjolnir_globe profile provided by nf-core/configs.'
|
||||||
|
config_profile_contact = 'Aashild Vaagene (@ashildv)'
|
||||||
|
config_profile_url = 'https://globe.ku.dk/research/'
|
||||||
|
max_memory = 500.GB
|
||||||
|
max_cpus = 50
|
||||||
|
max_time = 720.h
|
||||||
|
}
|
||||||
|
|
||||||
|
singularity {
|
||||||
|
enabled = true
|
||||||
|
autoMounts = true
|
||||||
|
cacheDir = '/maps/projects/mjolnir1/data/cache/nf-core/singularity'
|
||||||
|
}
|
||||||
|
|
||||||
|
process {
|
||||||
|
executor = 'slurm'
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup = true
|
||||||
|
|
||||||
|
executor {
|
||||||
|
queueSize = 10
|
||||||
|
}
|
|
@ -196,10 +196,14 @@ process {
|
||||||
withName:get_software_versions {
|
withName:get_software_versions {
|
||||||
cache = false
|
cache = false
|
||||||
clusterOptions = { "-S /bin/bash -V -l h=!(bionode06)" }
|
clusterOptions = { "-S /bin/bash -V -l h=!(bionode06)" }
|
||||||
clusterOptions = { "-S /bin/bash -V -l h_vmem=${(task.memory.toMega() * 4)}M" }
|
clusterOptions = { "-S /bin/bash -V -l h_vmem=${(task.memory.toMega() * 8)}M" }
|
||||||
errorStrategy = { task.exitStatus in [1,143,137,104,134,139,140] ? 'retry' : 'finish' }
|
errorStrategy = { task.exitStatus in [1,143,137,104,134,139,140] ? 'retry' : 'finish' }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
withName:multiqc {
|
||||||
|
clusterOptions = { "-S /bin/bash -V -j y -o output.log -l h_vmem=${task.memory.toGiga() * 2}G" }
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
profiles {
|
profiles {
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
// Define the Scratch directory
|
||||||
|
def scratch_dir = System.getenv("VSC_SCRATCH_VO_USER") ?: "scratch/"
|
||||||
|
|
||||||
// Specify the work directory
|
// Specify the work directory
|
||||||
workDir = "$VSC_SCRATCH_VO_USER/work"
|
workDir = "$scratch_dir/work"
|
||||||
|
|
||||||
// Perform work directory cleanup when the run has succesfully completed
|
// Perform work directory cleanup when the run has succesfully completed
|
||||||
cleanup = true
|
// cleanup = true
|
||||||
|
|
||||||
// Reduce the job submit rate to about 10 per second, this way the server won't be bombarded with jobs
|
// Reduce the job submit rate to about 10 per second, this way the server won't be bombarded with jobs
|
||||||
executor {
|
executor {
|
||||||
|
@ -13,7 +16,7 @@ executor {
|
||||||
singularity {
|
singularity {
|
||||||
enabled = true
|
enabled = true
|
||||||
autoMounts = true
|
autoMounts = true
|
||||||
cacheDir = "$VSC_SCRATCH_VO_USER/singularity"
|
cacheDir = "$scratch_dir/singularity"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Define profiles for each cluster
|
// Define profiles for each cluster
|
||||||
|
@ -32,8 +35,8 @@ profiles {
|
||||||
executor = 'slurm'
|
executor = 'slurm'
|
||||||
queue = 'skitty'
|
queue = 'skitty'
|
||||||
maxRetries = 2
|
maxRetries = 2
|
||||||
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
beforeScript = "export SINGULARITY_CACHEDIR=$scratch_dir/.singularity"
|
||||||
scratch = "$VSC_SCRATCH_VO_USER"
|
scratch = "$scratch_dir"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -51,8 +54,8 @@ profiles {
|
||||||
executor = 'slurm'
|
executor = 'slurm'
|
||||||
queue = 'swalot'
|
queue = 'swalot'
|
||||||
maxRetries = 2
|
maxRetries = 2
|
||||||
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
beforeScript = "export SINGULARITY_CACHEDIR=$scratch_dir/.singularity"
|
||||||
scratch = "$VSC_SCRATCH_VO_USER"
|
scratch = "$scratch_dir"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,8 +73,8 @@ profiles {
|
||||||
executor = 'slurm'
|
executor = 'slurm'
|
||||||
queue = 'victini'
|
queue = 'victini'
|
||||||
maxRetries = 2
|
maxRetries = 2
|
||||||
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
beforeScript = "export SINGULARITY_CACHEDIR=$scratch_dir/.singularity"
|
||||||
scratch = "$VSC_SCRATCH_VO_USER"
|
scratch = "$scratch_dir"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,8 +92,8 @@ profiles {
|
||||||
executor = 'slurm'
|
executor = 'slurm'
|
||||||
queue = 'kirlia'
|
queue = 'kirlia'
|
||||||
maxRetries = 2
|
maxRetries = 2
|
||||||
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
beforeScript = "export SINGULARITY_CACHEDIR=$scratch_dir/.singularity"
|
||||||
scratch = "$VSC_SCRATCH_VO_USER"
|
scratch = "$scratch_dir"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,8 +111,8 @@ profiles {
|
||||||
executor = 'slurm'
|
executor = 'slurm'
|
||||||
queue = 'doduo'
|
queue = 'doduo'
|
||||||
maxRetries = 2
|
maxRetries = 2
|
||||||
beforeScript = "export SINGULARITY_CACHEDIR=$VSC_SCRATCH_VO_USER/.singularity"
|
beforeScript = "export SINGULARITY_CACHEDIR=$scratch_dir/.singularity"
|
||||||
scratch = "$VSC_SCRATCH_VO_USER"
|
scratch = "$scratch_dir"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
31
docs/mjolnir_globe.md
Normal file
31
docs/mjolnir_globe.md
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# nf-core/configs: Section for Hologenomics at GLOBE, University of Copenhagen (Mjolnir server) Configuration
|
||||||
|
|
||||||
|
> **NB:** You will need an account on Mjolnir to run the pipeline. If in doubt contact IT.
|
||||||
|
|
||||||
|
Prior to running the pipeline for the first time with the `mjolnir_globe.config` (../conf/mjolnir_globe.config), users **must** create a hidden directory called `.tmp_nfcore` in their data/project directory on Mjolnir where the temp files from nf-core pipelines will be re-directed by the `NXF_TEMP` command (see below).
|
||||||
|
|
||||||
|
The contents of the `.tmp_nfcore` directory should be periodically deleted manually to save on space.
|
||||||
|
If the `NXF_TEMP` command is not used to properly re-direct temp files the `/tmp` directory on the compute nodes will be used and quickly filled up, which blocks anyone from working on these nodes until the offending user removes their files.
|
||||||
|
|
||||||
|
The following lines **must** be added by users to their `~/.bash_profile`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#re-direct tmp files away from /tmp directories on compute nodes or the headnode
|
||||||
|
export NXF_TEMP=/maps/projects/mjolnir1/people/$USER/.tmp_nfcore
|
||||||
|
|
||||||
|
# nextflow - limiting memory of virtual java machine
|
||||||
|
NXF_OPTS='-Xms1g -Xmx4g'
|
||||||
|
```
|
||||||
|
|
||||||
|
Once you have created the `.tmp_nfcore` directory and added the above lines of code to your `.bash_profile` you can run an nf-core pipeline.
|
||||||
|
|
||||||
|
Before running a pipeline you will need to load Java, Miniconda, Singularity and Nextflow. You can do this by including the commands below in your SLURM/sbatch script:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
## Load Java and Nextflow environment modules
|
||||||
|
module purge
|
||||||
|
module load jdk/1.8.0_291 miniconda singularity/3.8.0 nextflow/21.04.1.5556
|
||||||
|
```
|
||||||
|
|
||||||
|
All of the intermediate output files required to run the pipeline will be stored in the `work/` directory. It is recommended to delete this directory after the pipeline has finished successfully because it can get quite large, and all of the main output files will be saved in the `results/` directory anyway.
|
||||||
|
The `mjolnir_globe` config contains a `cleanup` command that removes the `work/` directory automatically once the pipeline has completeed successfully. If the run does not complete successfully then the `work/` dir should be removed manually to save storage space.
|
|
@ -49,6 +49,7 @@ profiles {
|
||||||
lugh { includeConfig "${params.custom_config_base}/conf/lugh.config" }
|
lugh { includeConfig "${params.custom_config_base}/conf/lugh.config" }
|
||||||
maestro { includeConfig "${params.custom_config_base}/conf/maestro.config" }
|
maestro { includeConfig "${params.custom_config_base}/conf/maestro.config" }
|
||||||
marvin { includeConfig "${params.custom_config_base}/conf/marvin.config" }
|
marvin { includeConfig "${params.custom_config_base}/conf/marvin.config" }
|
||||||
|
mjolnir_globe { includeConfig "${params.custom_config_base}/conf/mjolnir_globe.config" }
|
||||||
mpcdf { includeConfig "${params.custom_config_base}/conf/mpcdf.config" }
|
mpcdf { includeConfig "${params.custom_config_base}/conf/mpcdf.config" }
|
||||||
munin { includeConfig "${params.custom_config_base}/conf/munin.config" }
|
munin { includeConfig "${params.custom_config_base}/conf/munin.config" }
|
||||||
nihbiowulf { includeConfig "${params.custom_config_base}/conf/nihbiowulf.config" }
|
nihbiowulf { includeConfig "${params.custom_config_base}/conf/nihbiowulf.config" }
|
||||||
|
|
Loading…
Reference in a new issue