1
0
Fork 0
mirror of https://github.com/MillironX/nf-configs.git synced 2024-11-25 01:19:54 +00:00

Added existing configs from all pipelines

This commit is contained in:
drpatelh 2018-11-23 15:56:34 +00:00
parent ed3167c2a1
commit d52db66077
10 changed files with 238 additions and 0 deletions

22
conf/binac.config Normal file
View file

@ -0,0 +1,22 @@
/*
* ----------------------------------------------------------------------------
* Nextflow config file for use with Singularity on BINAC cluster in Tuebingen
* ----------------------------------------------------------------------------
* Defines basic usage limits and singularity image id.
*/
singularity {
enabled = true
}
process {
beforeScript = 'module load devel/singularity/3.0.1'
executor = 'pbs'
queue = 'short'
}
params {
max_memory = 128.GB
max_cpus = 28
max_time = 48.h
}

25
conf/ccga.config Normal file
View file

@ -0,0 +1,25 @@
/*
* -------------------------------------------------
* Nextflow config file with environment modules for RZCluster in Kiel
* -------------------------------------------------
*/
executor {
queueSize=100
}
process {
// Global process config
executor = 'slurm'
queue = 'ikmb_a'
clusterOptions = { "--qos=ikmb_a" }
}
params {
// illumina iGenomes reference file paths on RZCluster
igenomes_base = '/ifs/data/nfs_share/ikmb_repository/references/iGenomes/references/'
saveReference = true
}

21
conf/cfc.config Normal file
View file

@ -0,0 +1,21 @@
/*
* -------------------------------------------------------------
* Nextflow config file for use with Singularity on CFC at QBIC
* -------------------------------------------------------------
* Defines basic usage limits and singularity image id.
*/
singularity {
enabled = true
}
process {
beforeScript = 'module load qbic/singularity_slurm/3.0.1'
executor = 'slurm'
}
params {
max_memory = 60.GB
max_cpus = 24
max_time = 140.h
}

23
conf/crick.config Executable file
View file

@ -0,0 +1,23 @@
/*
* -------------------------------------------------
* Nextflow config file for CAMP HPC @ The Crick
* -------------------------------------------------
*/
singularity {
enabled = true
autoMounts = true
}
process {
beforeScript = 'module load Singularity/2.6.0-foss-2016b'
executor = 'slurm'
}
params {
max_memory = 224.GB
max_cpus = 32
max_time = 72.h
igenomes_base = '/camp/svc/reference/Genomics/iGenomes'
}

23
conf/gis.config Normal file
View file

@ -0,0 +1,23 @@
/*
* -------------------------------------------------
* Nextflow config file for GIS (Aquila)
* -------------------------------------------------
* Defines reference genomes, using iGenome paths
* Imported under the default 'standard' Nextflow
* profile in nextflow.config
*/
process {
executor = 'sge'
clusterOptions = { "-l mem_free=" + task.memory.toString().replaceAll(/[\sB]/,'') }
penv = 'OpenMP'
errorStrategy = { task.attempt < 2 ? 'retry' : 'finish' }
// auto translate container name into conda environment name
beforeScript = { 'source /mnt/projects/rpd/rc/init.2017-04; module load miniconda3; set +u; source activate nfcore-rnaseq-1.0dev; set -u;' }
}
params {
saveReference = true
// illumina iGenomes reference file paths on GIS Aquila
igenomes_base = '/mnt/projects/rpd/genomes.testing/S3_igenomes/'
}

26
conf/hebbe.config Normal file
View file

@ -0,0 +1,26 @@
/*
* -------------------------------------------------
* Gothenburg Hebbe Cluster config file
* -------------------------------------------------
* http://www.c3se.chalmers.se/index.php/Hebbe
*/
singularity {
enabled = true
}
process {
executor = 'slurm'
clusterOptions = { "-A $params.project ${params.clusterOptions ?: ''}" }
/* The Hebbe scheduler fails if you try to request an amount of memory for a job */
withName: '*' { memory = null }
}
params {
saveReference = true
max_memory = 64.GB
max_cpus = 20
max_time = 240.h
}

27
conf/uct_hex.config Normal file
View file

@ -0,0 +1,27 @@
/*
* -------------------------------------------------
* University of Cape Town HEX cluster config file
* -------------------------------------------------
* http://hpc.uct.ac.za/index.php/hex-3/
*/
singularity {
enabled = true
cacheDir = "/scratch/DB/bio/singularity-containers"
}
process {
stageInMode = 'symlink'
stageOutMode = 'rsync'
queue = 'UCTlong'
clusterOptions = { "-M $params.email -m abe -l nodes=1:ppn=1:series600" }
}
executor{
executor = 'pbs'
jobName = { "$task.tag" }
}
params {
igenomes_base = '/scratch/DB/bio/rna-seq/references'
}

24
conf/uppmax-devel.config Normal file
View file

@ -0,0 +1,24 @@
/*
* -------------------------------------------------
* Nextflow config file for UPPMAX (milou / irma)
* -------------------------------------------------
* To be applied after main UPPMAX config, overwrites config and
* submits jobs to the `devcore` queue, which has much faster
* queue times. All jobs are limited to 1 hour to be eligible
* for this queue and only one job allowed at a time.
*/
executor {
name = 'slurm'
queueSize = 1
}
process {
queue = 'devel'
}
params {
// Max resources to be requested by a devel job
max_memory = 120.GB
max_cpus = 16
max_time = 1.h
}

28
conf/uppmax.config Normal file
View file

@ -0,0 +1,28 @@
/*
* -------------------------------------------------
* Nextflow config file for UPPMAX (milou / irma)
* -------------------------------------------------
* Defines reference genomes, using iGenome paths
* Imported under the default 'standard' Nextflow
* profile in nextflow.config
*/
singularity {
enabled = true
}
process {
executor = 'slurm'
clusterOptions = { "-A $params.project ${params.clusterOptions ?: ''}" }
}
params {
saveReference = true
// Max resources requested by a normal node on milou. If you need more memory, run on a fat node using:
// --clusterOptions "-C mem512GB" --max_memory "512GB"
max_memory = 128.GB
max_cpus = 16
max_time = 240.h
// illumina iGenomes reference file paths on UPPMAX
igenomes_base = '/sw/data/uppnex/igenomes/'
}

19
conf/uzh.config Normal file
View file

@ -0,0 +1,19 @@
/*
* --------------------------------------------------------------------------------
* Nextflow config file for use with Singularity on University of Zurich Cluster
* --------------------------------------------------------------------------------
*/
singularity {
enabled = true
}
process {
executor = 'slurm'
}
params {
max_memory = 1800.GB
max_cpus = 112
max_time = 168.h
}