mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-24 17:19:54 +00:00
commit
0da2c57784
12 changed files with 105 additions and 58 deletions
43
.github/workflows/main.yml
vendored
43
.github/workflows/main.yml
vendored
|
@ -16,7 +16,48 @@ jobs:
|
|||
needs: test_all_profiles
|
||||
strategy:
|
||||
matrix:
|
||||
profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'biohpc_gen', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'eddie', 'eva', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'ifb_core', 'imperial', 'imperial_mb', 'jax', 'kraken', 'mpcdf', 'munin', 'oist', 'pasteur', 'phoenix', 'prince', 'sanger', 'seg_globe', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'utd_sysbio', 'uzh']
|
||||
profile:
|
||||
- 'abims'
|
||||
- 'awsbatch'
|
||||
- 'aws_tower'
|
||||
- 'bi'
|
||||
- 'bigpurple'
|
||||
- 'binac'
|
||||
- 'biohpc_gen'
|
||||
- 'cbe'
|
||||
- 'ccga_dx'
|
||||
- 'ccga_med'
|
||||
- 'cfc'
|
||||
- 'cfc_dev'
|
||||
- 'crick'
|
||||
- 'denbi_qbic'
|
||||
- 'ebc'
|
||||
- 'eddie'
|
||||
- 'eva'
|
||||
- 'genotoul'
|
||||
- 'genouest'
|
||||
- 'gis'
|
||||
- 'google'
|
||||
- 'hebbe'
|
||||
- 'icr_davros'
|
||||
- 'ifb_core'
|
||||
- 'imperial'
|
||||
- 'imperial_mb'
|
||||
- 'jax'
|
||||
- 'mpcdf'
|
||||
- 'munin'
|
||||
- 'oist'
|
||||
- 'pasteur'
|
||||
- 'phoenix'
|
||||
- 'prince'
|
||||
- 'sanger'
|
||||
- 'seg_globe'
|
||||
- 'shh'
|
||||
- 'uct_hpc'
|
||||
- 'uppmax'
|
||||
- 'utd_ganymede'
|
||||
- 'utd_sysbio'
|
||||
- 'uzh'
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install Nextflow
|
||||
|
|
|
@ -96,6 +96,7 @@ Currently documentation is available for the following systems:
|
|||
|
||||
* [ABIMS](docs/abims.md)
|
||||
* [AWSBATCH](docs/awsbatch.md)
|
||||
* [AWS_TOWER](docs/aws_tower.md)
|
||||
* [BIGPURPLE](docs/bigpurple.md)
|
||||
* [BI](docs/bi.md)
|
||||
* [BINAC](docs/binac.md)
|
||||
|
@ -117,7 +118,6 @@ Currently documentation is available for the following systems:
|
|||
* [HEBBE](docs/hebbe.md)
|
||||
* [ICR_DAVROS](docs/icr_davros.md)
|
||||
* [JAX](docs/jax.md)
|
||||
* [KRAKEN](docs/kraken.md)
|
||||
* [MPCDF](docs/mpcdf.md)
|
||||
* [MUNIN](docs/munin.md)
|
||||
* [OIST](docs/oist.md)
|
||||
|
|
|
@ -10,6 +10,7 @@ import os
|
|||
import sys
|
||||
import argparse
|
||||
import re
|
||||
import yaml
|
||||
|
||||
############################################
|
||||
############################################
|
||||
|
@ -51,19 +52,22 @@ def check_config(Config, Github):
|
|||
### Ignore these profiles
|
||||
ignore_me = ['czbiohub_aws']
|
||||
tests.update(ignore_me)
|
||||
with open(Github, 'r') as ghfile:
|
||||
for line in ghfile:
|
||||
if re.search('profile: ', line):
|
||||
line = line.replace('\'','').replace('[','').replace(']','').replace('\n','')
|
||||
profiles = line.split(':')[1].split(',')
|
||||
for p in profiles:
|
||||
tests.add(p.strip())
|
||||
# parse yaml GitHub actions file
|
||||
try:
|
||||
with open(Github, 'r') as ghfile:
|
||||
wf = yaml.safe_load(ghfile)
|
||||
profile_list = wf["jobs"]["profile_test"]["strategy"]["matrix"]["profile"]
|
||||
except Exception as e:
|
||||
print("Could not parse yaml file: {}, {}".format(Github, e))
|
||||
sys.exit(1)
|
||||
# Add profiles to test
|
||||
for profile in profile_list:
|
||||
tests.add(profile.strip())
|
||||
|
||||
###Check if sets are equal
|
||||
if tests == config_profiles:
|
||||
sys.exit(0)
|
||||
else:
|
||||
#Maybe report what is missing here too
|
||||
try:
|
||||
assert tests == config_profiles
|
||||
except (AssertionError):
|
||||
print("Tests don't seem to test these profiles properly. Please check whether you added the profile to the Github Actions testing YAML.\n")
|
||||
print(config_profiles.symmetric_difference(tests))
|
||||
sys.exit(1)
|
||||
|
|
21
conf/aws_tower.config
Normal file
21
conf/aws_tower.config
Normal file
|
@ -0,0 +1,21 @@
|
|||
//Nextflow config file for running on AWS batch
|
||||
params {
|
||||
config_profile_description = 'AWS Batch with Tower Profile'
|
||||
config_profile_contact = 'Gisela Gabernet (@ggabernet)'
|
||||
config_profile_url = 'https://aws.amazon.com/batch/'
|
||||
}
|
||||
|
||||
timeline {
|
||||
overwrite = true
|
||||
}
|
||||
report {
|
||||
overwrite = true
|
||||
}
|
||||
trace {
|
||||
overwrite = true
|
||||
}
|
||||
dag {
|
||||
overwrite = true
|
||||
}
|
||||
|
||||
process.executor = 'awsbatch'
|
|
@ -12,7 +12,6 @@ singularity {
|
|||
|
||||
process {
|
||||
executor = 'slurm'
|
||||
beforeScript = 'module load devel/singularity/3.4.2'
|
||||
queue = { task.memory > 60.GB || task.cpus > 20 ? 'qbic' : 'compute' }
|
||||
scratch = 'true'
|
||||
}
|
||||
|
|
|
@ -10,7 +10,6 @@ singularity {
|
|||
}
|
||||
|
||||
process {
|
||||
beforeScript = 'module load devel/singularity/3.4.2'
|
||||
executor = 'slurm'
|
||||
queue = { task.memory > 60.GB || task.cpus > 20 ? 'qbic' : 'compute' }
|
||||
scratch = 'true'
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
//Profile config names for nf-core/configs
|
||||
params {
|
||||
config_profile_name = 'KRAKEN'
|
||||
config_profile_description = 'Jenkins cluster provided by nf-core/configs.'
|
||||
config_profile_contact = 'Maxime Garcia or Johannes Alneberg'
|
||||
config_profile_url = 'kraken.dyn.scilifelab.se'
|
||||
}
|
||||
|
||||
process {
|
||||
executor = 'local'
|
||||
}
|
||||
|
||||
docker {
|
||||
enabled = true
|
||||
mountFlags = 'z'
|
||||
fixOwnership = true
|
||||
}
|
||||
|
||||
params {
|
||||
max_memory = 60.GB
|
||||
max_cpus = 16
|
||||
max_time = 72.h
|
||||
igenomes_base = '/share/igenomes/'
|
||||
}
|
|
@ -69,7 +69,7 @@ process {
|
|||
}
|
||||
|
||||
withName: adapter_removal {
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 2)}G,virtual_free=${(task.memory.toGiga() * 2)}G" }
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 4)}G,virtual_free=${(task.memory.toGiga() * 4)}G" }
|
||||
}
|
||||
|
||||
withName: dedup {
|
||||
|
@ -97,7 +97,7 @@ process {
|
|||
}
|
||||
|
||||
withName: mtnucratio {
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 2)}G,virtual_free=${(task.memory.toGiga() * 2)}G" }
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 4)}G,virtual_free=${(task.memory.toGiga() * 4)}G" }
|
||||
}
|
||||
|
||||
withName: vcf2genome {
|
||||
|
|
|
@ -20,21 +20,33 @@ params {
|
|||
primer_sets {
|
||||
artic {
|
||||
'1' {
|
||||
fasta = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V1/nCoV-2019.reference.fasta'
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V1/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V1/nCoV-2019.primer.bed'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V1/nCoV-2019.primer.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
'2' {
|
||||
fasta = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V2/nCoV-2019.reference.fasta'
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V2/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V2/nCoV-2019.primer.bed'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V2/nCoV-2019.primer.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
'3' {
|
||||
fasta = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta'
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V3/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V3/nCoV-2019.primer.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
'4' {
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V4/SARS-CoV-2.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V4/SARS-CoV-2.scheme.bed'
|
||||
scheme = 'SARS-CoV-2'
|
||||
}
|
||||
'1200' {
|
||||
fasta = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/primer_schemes/artic/nCoV-2019/V1200/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/primer_schemes/artic/nCoV-2019/V1200/nCoV-2019.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
}
|
||||
|
|
5
docs/aws_tower.md
Normal file
5
docs/aws_tower.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
# nf-core/configs: AWS Batch with Tower Configuration
|
||||
|
||||
To be used when submitting jobs to AWS Batch by using Tower Forge. If you are not using Tower Forge, consider using the profile `awsbatch` where you can directly specify the Batch queue, AWS region and AWS cli path.
|
||||
|
||||
This profile defines `awsbatch` as executor, and allows `overwrite` of `trace`, `timeline`, `report` and `dag` to allow resuming pipelines.
|
|
@ -1,10 +0,0 @@
|
|||
# nf-core/configs: KRAKEN Configuration
|
||||
|
||||
This profile can be **only** combined with `jenkins.config`. It is used for
|
||||
testing pipeline with real data on **in-house** cluster located at SciLifeLab.
|
||||
|
||||
To use, run the pipeline with `-profile kraken`. This will download and launch
|
||||
the [`kraken.config`](../conf/kraken.config) which has been pre-configured to
|
||||
test the pipeline using `docker` by default.
|
||||
|
||||
Example: `nextflow run -profile kraken,jenkins`
|
|
@ -12,6 +12,7 @@
|
|||
profiles {
|
||||
abims { includeConfig "${params.custom_config_base}/conf/abims.config" }
|
||||
awsbatch { includeConfig "${params.custom_config_base}/conf/awsbatch.config" }
|
||||
aws_tower { includeConfig "${params.custom_config_base}/conf/aws_tower.config" }
|
||||
bi { includeConfig "${params.custom_config_base}/conf/bi.config" }
|
||||
bigpurple { includeConfig "${params.custom_config_base}/conf/bigpurple.config" }
|
||||
binac { includeConfig "${params.custom_config_base}/conf/binac.config" }
|
||||
|
@ -36,7 +37,6 @@ profiles {
|
|||
genouest { includeConfig "${params.custom_config_base}/conf/genouest.config" }
|
||||
gis { includeConfig "${params.custom_config_base}/conf/gis.config" }
|
||||
hebbe { includeConfig "${params.custom_config_base}/conf/hebbe.config" }
|
||||
kraken { includeConfig "${params.custom_config_base}/conf/kraken.config" }
|
||||
mpcdf { includeConfig "${params.custom_config_base}/conf/mpcdf.config" }
|
||||
munin { includeConfig "${params.custom_config_base}/conf/munin.config" }
|
||||
oist { includeConfig "${params.custom_config_base}/conf/oist.config" }
|
||||
|
|
Loading…
Reference in a new issue