Merge pull request #433 from ameynert/master

Updated documentation and DSL2 profile for Sarek
master
ameynert 2 years ago committed by GitHub
commit bed950c735
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -46,5 +46,6 @@ singularity {
runOptions = '-p -B "$TMPDIR"'
enabled = true
autoMounts = true
cacheDir = "/exports/igmm/eddie/BioinformaticsResources/nfcore/singularity-images"
}

@ -1,53 +1,77 @@
process {
withName:MapReads {
cpus = 16
memory = 128.GB
clusterOptions = {"-l h_vmem=${(task.memory + 8.GB).bytes/task.cpus}"}
withName:APPLYBQSR {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:APPLYBQSR_SPARK {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_APPLYVQSR {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_BASERECALIBRATOR {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_BASERECALIBRATOR_SPARK {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:BuildDict {
cpus = 1
withName:GATK4_CALCULATECONTAMINATION {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:BamQC {
cpus = 8
memory = 128.GB
clusterOptions = {"-l h_vmem=${(task.memory + 8.GB).bytes/task.cpus}"}
withName:GATK4_CNNSCOREVARIANTS {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_CREATESEQUENCEDICTIONARY {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:MarkDuplicates {
withName:GATK4_ESTIMATELIBRARYCOMPLEXITY {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:BaseRecalibrator {
withName:GATK4_FILTERMUTECTCALLS {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:ApplyBQSR {
withName:GATK4_FILTERVARIANTTRANCHES {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GatherBQSRReports {
withName:GATK4_GATHERBQSRREPORTS {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:HaplotypeCaller {
withName:GATK4_GATHERPILEUPSUMMARIES {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GenotypeGVCFs {
withName:GATK4_GENOMICSDBIMPORT {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:Mutect2 {
withName:GATK4_GENOTYPEGVCFS {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:MergeMutect2Stats {
withName:GATK4_GETPILEUPSUMMARIES {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:PileupSummariesForMutect2 {
withName:GATK4_HAPLOTYPECALLER {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:MergePileupSummaries {
withName:GATK4_INTERVALLISTTOBED {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:CalculateContamination {
withName:GATK4_LEARNREADORIENTATIONMODEL {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:FilterMutect2Calls {
withName:GATK4_MARKDUPLICATES {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
}
withName:GATK4_MARKDUPLICATES_SPARK {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_MERGEMUTECTSTATS {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_MERGEVCFS {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_MUTECT2 {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
withName:GATK4_VARIANTRECALIBRATOR {
clusterOptions = {"-l h_vmem=${(task.memory + 4.GB).bytes/task.cpus}"}
}
}

@ -35,16 +35,17 @@ This config enables Nextflow to manage the pipeline jobs via the SGE job schedul
## Singularity set-up
Load Singularity from the module system and, if you have access to `/exports/igmm/eddie/BioinformaticsResources`, set the Singularity cache directory to the BioinformaticsResources path below. If some containers for your pipeline run are not present, please contact the [IGC Data Manager](data.manager@igc.ed.ac.uk) to have them added. You can add these lines to the file `$HOME/.bashrc`, or you can run these commands before you run an nf-core pipeline.
If you do not have access to `/exports/igmm/eddie/BioinformaticsResources`, set the Singularity cache directory to somewhere sensible that is not in your `$HOME` area (which has limited space). It will take time to download all the Singularity containers, but you can use this again.
Load Singularity from the module system.
```bash
module load singularity
export NXF_SINGULARITY_CACHEDIR="/exports/igmm/eddie/BioinformaticsResources/nf-core/singularity-images"
```
Singularity will create a directory `.singularity` in your `$HOME` directory on eddie. Space on `$HOME` is very limited, so it is a good idea to create a directory somewhere else with more room and link the locations.
The eddie profile is set to use `/exports/igmm/eddie/BioinformaticsResources/nfcore/singularity-images` as the Singularity cache directory. If some containers for your pipeline run are not present, please contact the [IGC Data Manager](data.manager@igc.ed.ac.uk) to have them added. You can add these lines to the file `$HOME/.bashrc`, or you can run these commands before you run an nf-core pipeline.
If you do not have access to `/exports/igmm/eddie/BioinformaticsResources`, set the Singularity cache directory to somewhere sensible that is not in your `$HOME` area (which has limited space). It will take time to download all the Singularity containers, but you can use this again.
Singularity will by default create a directory `.singularity` in your `$HOME` directory on eddie. Space on `$HOME` is very limited, so it is a good idea to create a directory somewhere else with more room and link the locations.
```bash
cd $HOME

Loading…
Cancel
Save