mirror of
https://github.com/MillironX/nf-core_modules.git
synced 2024-12-22 11:08:17 +00:00
test(picard): Refactor markduplicates to use pytest-workflow
This commit is contained in:
parent
d009420a0c
commit
7848019cdb
7 changed files with 20 additions and 52 deletions
|
@ -1 +0,0 @@
|
||||||
../../../../../tests/data/bam/test.paired_end.sorted.bam
|
|
|
@ -1,18 +0,0 @@
|
||||||
#!/usr/bin/env nextflow
|
|
||||||
|
|
||||||
nextflow.enable.dsl = 2
|
|
||||||
|
|
||||||
include { PICARD_MARKDUPLICATES } from '../main.nf' addParams( options: [:] )
|
|
||||||
|
|
||||||
workflow test {
|
|
||||||
|
|
||||||
def input = []
|
|
||||||
input = [ [ id:'test', single_end:false ], // meta map
|
|
||||||
file("${baseDir}/input/test.paired_end.sorted.bam", checkIfExists: true) ]
|
|
||||||
|
|
||||||
PICARD_MARKDUPLICATES ( input )
|
|
||||||
}
|
|
||||||
|
|
||||||
workflow {
|
|
||||||
test()
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
|
|
||||||
params {
|
|
||||||
outdir = "output/"
|
|
||||||
publish_dir_mode = "copy"
|
|
||||||
enable_conda = false
|
|
||||||
}
|
|
||||||
|
|
||||||
profiles {
|
|
||||||
conda {
|
|
||||||
params.enable_conda = true
|
|
||||||
}
|
|
||||||
docker {
|
|
||||||
docker.enabled = true
|
|
||||||
docker.runOptions = '-u \$(id -u):\$(id -g)'
|
|
||||||
}
|
|
||||||
singularity {
|
|
||||||
singularity.enabled = true
|
|
||||||
singularity.autoMounts = true
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
## htsjdk.samtools.metrics.StringHeader
|
|
||||||
# MarkDuplicates INPUT=[test.paired_end.sorted.bam] OUTPUT=test.bam METRICS_FILE=test.MarkDuplicates.metrics.txt MAX_SEQUENCES_FOR_DISK_READ_ENDS_MAP=50000 MAX_FILE_HANDLES_FOR_READ_ENDS_MAP=8000 SORTING_COLLECTION_SIZE_RATIO=0.25 TAG_DUPLICATE_SET_MEMBERS=false REMOVE_SEQUENCING_DUPLICATES=false TAGGING_POLICY=DontTag CLEAR_DT=true DUPLEX_UMI=false ADD_PG_TAG_TO_READS=true REMOVE_DUPLICATES=false ASSUME_SORTED=false DUPLICATE_SCORING_STRATEGY=SUM_OF_BASE_QUALITIES PROGRAM_RECORD_ID=MarkDuplicates PROGRAM_GROUP_NAME=MarkDuplicates READ_NAME_REGEX=<optimized capture of last three ':' separated fields as numeric values> OPTICAL_DUPLICATE_PIXEL_DISTANCE=100 MAX_OPTICAL_DUPLICATE_SET_SIZE=300000 VERBOSITY=INFO QUIET=false VALIDATION_STRINGENCY=STRICT COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false GA4GH_CLIENT_SECRETS=client_secrets.json USE_JDK_DEFLATER=false USE_JDK_INFLATER=false
|
|
||||||
## htsjdk.samtools.metrics.StringHeader
|
|
||||||
# Started on: Fri Aug 07 15:11:32 GMT 2020
|
|
||||||
|
|
||||||
## METRICS CLASS picard.sam.DuplicationMetrics
|
|
||||||
LIBRARY UNPAIRED_READS_EXAMINED READ_PAIRS_EXAMINED SECONDARY_OR_SUPPLEMENTARY_RDS UNMAPPED_READS UNPAIRED_READ_DUPLICATES READ_PAIR_DUPLICATES READ_PAIR_OPTICAL_DUPLICATES PERCENT_DUPLICATION ESTIMATED_LIBRARY_SIZE
|
|
||||||
Unknown Library 0 10000 0 0 0 0 0 0
|
|
||||||
|
|
||||||
## HISTOGRAM java.lang.Double
|
|
||||||
set_size all_sets non_optical_sets
|
|
||||||
1.0 10000 10000
|
|
||||||
|
|
Binary file not shown.
|
@ -4,6 +4,7 @@ nextflow.enable.dsl = 2
|
||||||
|
|
||||||
include { PICARD_MERGESAMFILES } from '../../../software/picard/mergesamfiles/main.nf' addParams( options: [:] )
|
include { PICARD_MERGESAMFILES } from '../../../software/picard/mergesamfiles/main.nf' addParams( options: [:] )
|
||||||
include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../software/picard/collectmultiplemetrics/main.nf' addParams( options: [:] )
|
include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../software/picard/collectmultiplemetrics/main.nf' addParams( options: [:] )
|
||||||
|
include { PICARD_MARKDUPLICATES } from '../../../software/picard/markduplicates/main.nf' addParams( options: [:] )
|
||||||
|
|
||||||
workflow test_picard_mergesamfiles {
|
workflow test_picard_mergesamfiles {
|
||||||
|
|
||||||
|
@ -27,3 +28,12 @@ workflow test_picard_collectmultiplemetrics {
|
||||||
file("${launchDir}/tests/data/fasta/E_coli/NC_010473.fa", checkIfExists: true)
|
file("${launchDir}/tests/data/fasta/E_coli/NC_010473.fa", checkIfExists: true)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
workflow test_picard_markduplicates {
|
||||||
|
|
||||||
|
def input = []
|
||||||
|
input = [ [ id:'test', single_end:false ], // meta map
|
||||||
|
file("${launchDir}/tests/data/bam/test.paired_end.sorted.bam", checkIfExists: true) ]
|
||||||
|
|
||||||
|
PICARD_MARKDUPLICATES ( input )
|
||||||
|
}
|
||||||
|
|
|
@ -23,3 +23,13 @@
|
||||||
- path: output/picard/test.CollectMultipleMetrics.quality_by_cycle_metrics
|
- path: output/picard/test.CollectMultipleMetrics.quality_by_cycle_metrics
|
||||||
- path: output/picard/test.CollectMultipleMetrics.quality_distribution.pdf
|
- path: output/picard/test.CollectMultipleMetrics.quality_distribution.pdf
|
||||||
- path: output/picard/test.CollectMultipleMetrics.quality_distribution_metrics
|
- path: output/picard/test.CollectMultipleMetrics.quality_distribution_metrics
|
||||||
|
|
||||||
|
- name: Run picard MarkDuplicates
|
||||||
|
command: nextflow run ./tests/software/picard -profile docker -entry test_picard_markduplicates -c tests/config/nextflow.config
|
||||||
|
tags:
|
||||||
|
- picard
|
||||||
|
- picard_markduplicates
|
||||||
|
files:
|
||||||
|
- path: output/picard/test.MarkDuplicates.metrics.txt
|
||||||
|
- path: output/picard/test.bam
|
||||||
|
md5sum: 50407a1ee722f2bf6a20471c8a7fd6b0
|
||||||
|
|
Loading…
Reference in a new issue