diff --git a/conf/modules.config b/conf/modules.config index 1f4914d..43f7ebb 100644 --- a/conf/modules.config +++ b/conf/modules.config @@ -136,9 +136,9 @@ process { withName: FILTLONG { ext.args = [ - "--min_length ${params.longread_qc_minlength}", - "--keep_percent ${params.longread_qc_keep_percent}", - "--target_bases ${params.longread_qc_target_bases}" + "--min_length ${params.longread_qc_qualityfilter_minlength}", + "--keep_percent ${params.longread_qc_qualityfilter_keeppercent}", + "--target_bases ${params.longread_qc_qualityfilter_targetbases}" ] .join(' ').trim() ext.prefix = { "${meta.id}_${meta.run_accession}_filtered" } diff --git a/conf/test.config b/conf/test.config index 6aaf058..dac0e81 100644 --- a/conf/test.config +++ b/conf/test.config @@ -26,6 +26,7 @@ params { databases = 'https://raw.githubusercontent.com/nf-core/test-datasets/taxprofiler/database.csv' perform_shortread_qc = true perform_longread_qc = true + shortread_qc_mergepairs = true perform_shortread_complexityfilter = true perform_shortread_hostremoval = true perform_longread_hostremoval = true diff --git a/conf/test_noprofiling.config b/conf/test_noprofiling.config index 86d8e70..12c7185 100644 --- a/conf/test_noprofiling.config +++ b/conf/test_noprofiling.config @@ -26,6 +26,7 @@ params { databases = 'https://raw.githubusercontent.com/nf-core/test-datasets/taxprofiler/database.csv' perform_shortread_qc = true perform_longread_qc = true + shortread_qc_mergepairs = true perform_shortread_complexityfilter = true perform_shortread_hostremoval = true perform_longread_hostremoval = true diff --git a/nextflow.config b/nextflow.config index 6e84548..e82cf9a 100644 --- a/nextflow.config +++ b/nextflow.config @@ -58,18 +58,18 @@ params { perform_shortread_qc = false shortread_qc_tool = 'fastp' shortread_qc_skipadaptertrim = false - shortread_qc_mergepairs = false + shortread_qc_mergepairs = true shortread_qc_excludeunmerged = false shortread_qc_adapter1 = null shortread_qc_adapter2 = null shortread_qc_minlength = 15 - perform_longread_qc = false - longread_qc_run_clip = false - longread_qc_run_filter = false - longread_qc_minlength = 1000 - longread_qc_keep_percent = 90 - longread_qc_target_bases = 500000000 + perform_longread_qc = false + longread_qc_skipadaptertrim = false + longread_qc_skipqualityfilter = false + longread_qc_qualityfilter_minlength = 1000 + longread_qc_qualityfilter_keeppercent = 90 + longread_qc_qualityfilter_targetbases = 500000000 save_preprocessed_reads = false diff --git a/nextflow_schema.json b/nextflow_schema.json index 3bc7e3e..cc85806 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -10,11 +10,7 @@ "type": "object", "fa_icon": "fas fa-terminal", "description": "Define where the pipeline should find input data and save output data.", - "required": [ - "input", - "databases", - "outdir" - ], + "required": ["input", "databases", "outdir"], "properties": { "input": { "type": "string", @@ -83,10 +79,7 @@ "shortread_qc_tool": { "type": "string", "default": "fastp", - "enum": [ - "fastp", - "adapterremoval" - ], + "enum": ["fastp", "adapterremoval"], "fa_icon": "fas fa-tools", "description": "Specify which tool to use for read QC" }, @@ -131,11 +124,7 @@ "shortread_complexityfilter_tool": { "type": "string", "default": "bbduk", - "enum": [ - "bbduk", - "prinseqplusplus", - "fastp" - ], + "enum": ["bbduk", "prinseqplusplus", "fastp"], "fa_icon": "fas fa-hammer", "description": "Specify which tool to use for complexity filtering" }, @@ -165,10 +154,7 @@ "shortread_complexityfilter_prinseqplusplus_mode": { "type": "string", "default": "entropy", - "enum": [ - "entropy", - "dust" - ], + "enum": ["entropy", "dust"], "fa_icon": "fas fa-check-square", "description": "Specify the complexity filter mode for PRINSEQ++" }, @@ -323,15 +309,7 @@ "diamond_output_format": { "type": "string", "default": "tsv", - "enum": [ - "blast", - "xml", - "txt", - "daa", - "sam", - "tsv", - "paf" - ], + "enum": ["blast", "xml", "txt", "daa", "sam", "tsv", "paf"], "fa_icon": "fas fa-file", "description": "Specify output format from DIAMOND profiling." }, @@ -348,14 +326,7 @@ "kaiju_taxon_rank": { "type": "string", "default": "species", - "enum": [ - "phylum", - "class", - "order", - "family", - "genus", - "species" - ], + "enum": ["phylum", "class", "order", "family", "genus", "species"], "fa_icon": "fas fa-tag", "description": "Specify taxonomic rank to be displayed in Kaiju taxon table" }, @@ -540,14 +511,7 @@ "description": "Method used to save pipeline results to output directory.", "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", "fa_icon": "fas fa-copy", - "enum": [ - "symlink", - "rellink", - "link", - "copy", - "copyNoFollow", - "move" - ], + "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], "hidden": true }, "email_on_fail": { diff --git a/subworkflows/local/longread_preprocessing.nf b/subworkflows/local/longread_preprocessing.nf index 3464167..c04207e 100644 --- a/subworkflows/local/longread_preprocessing.nf +++ b/subworkflows/local/longread_preprocessing.nf @@ -14,7 +14,7 @@ workflow LONGREAD_PREPROCESSING { ch_versions = Channel.empty() ch_multiqc_files = Channel.empty() - if ( params.longread_qc_run_clip && !params.longread_qc_run_filter ) { + if ( !params.longread_qc_skipadaptertrim && params.longread_qc_skipqualityfilter) { PORECHOP ( reads ) ch_processed_reads = PORECHOP.out.reads @@ -28,7 +28,7 @@ workflow LONGREAD_PREPROCESSING { ch_versions = ch_versions.mix(PORECHOP.out.versions.first()) ch_multiqc_files = ch_multiqc_files.mix( PORECHOP.out.log ) - } else if ( !params.longread_qc_run_clip && params.longread_qc_run_filter ) { + } else if ( params.longread_qc_skipadaptertrim && !params.longread_qc_skipqualityfilter) { ch_processed_reads = FILTLONG ( reads.map{ meta, reads -> [meta, [], reads ]} ) ch_versions = ch_versions.mix(FILTLONG.out.versions.first()) diff --git a/workflows/taxprofiler.nf b/workflows/taxprofiler.nf index b6f3a37..2bd6c01 100644 --- a/workflows/taxprofiler.nf +++ b/workflows/taxprofiler.nf @@ -22,7 +22,6 @@ if (params.databases) { ch_databases = file(params.databases) } else { exit 1, ' if (params.shortread_qc_mergepairs && params.run_malt ) log.warn "[nf-core/taxprofiler] MALT does not accept uncollapsed paired-reads. Pairs will be profiled as separate files." if (params.shortread_qc_excludeunmerged && !params.shortread_qc_mergepairs) exit 1, "ERROR: [nf-core/taxprofiler] cannot include unmerged reads when merging not turned on. Please specify --shortread_qc_mergepairs" -if ( (params.longread_qc_run_clip || params.longread_qc_run_filter) & !params.perform_longread_qc ) exit 1, "ERROR: [nf-core/taxprofiler] --longread_qc_run_clip or --longread_qc_run_filter requested but quality-control not turned on. Please specify --perform_long_qc" if (params.shortread_complexityfilter_tool == 'fastp' && ( params.perform_shortread_qc == false || params.shortread_qc_tool != 'fastp' )) exit 1, "ERROR: [nf-core/taxprofiler] cannot use fastp complexity filtering if preprocessing not turned on and/or tool is not fastp. Please specify --perform_shortread_qc and/or --shortread_qc_tool 'fastp'"