1
0
Fork 0
mirror of https://github.com/MillironX/taxprofiler.git synced 2024-11-22 12:19:54 +00:00

Remove --pep parameter and leave just --input

This commit is contained in:
Rafal Stepien 2022-09-16 09:08:28 -04:00
parent 768f9980d8
commit 9349346864
12 changed files with 37 additions and 35 deletions

View file

@ -73,7 +73,7 @@ On release, automated continuous integration tests run the pipeline on a full-si
nextflow run nf-core/taxprofiler --input samplesheet.csv --databases database.csv --outdir <OUTDIR> --run_<TOOL1> --run_<TOOL1> -profile <docker/singularity/podman/shifter/charliecloud/conda/institute> nextflow run nf-core/taxprofiler --input samplesheet.csv --databases database.csv --outdir <OUTDIR> --run_<TOOL1> --run_<TOOL1> -profile <docker/singularity/podman/shifter/charliecloud/conda/institute>
``` ```
Note pipeline supports both CSV and PEP input sample sheets. Find out more [here](http://pep.databio.org/en/2.1.0/specification/). Note pipeline supports both CSV and PEP input sample sheets. Find out more [here](http://pep.databio.org/en/2.1.0/specification/).
## Documentation ## Documentation

View file

@ -421,4 +421,12 @@ process {
saveAs: { filename -> filename.equals('versions.yml') ? null : filename } saveAs: { filename -> filename.equals('versions.yml') ? null : filename }
] ]
} }
withName: 'EIDO_VALIDATE' {
ext.args = '--st-index sample'
}
withName: 'EIDO_CONVERT' {
ext.args = '--st-index sample'
}
} }

View file

@ -8,8 +8,7 @@ params {
max_time = '6.h' max_time = '6.h'
// Input data // Input data
input = null input = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/pep/test_pep_format_files/config.yaml'
pep = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/pep/test_pep_format_files/config.yaml'
databases = 'https://raw.githubusercontent.com/nf-core/test-datasets/taxprofiler/database.csv' databases = 'https://raw.githubusercontent.com/nf-core/test-datasets/taxprofiler/database.csv'
perform_shortread_qc = true perform_shortread_qc = true
perform_longread_qc = true perform_longread_qc = true
@ -41,10 +40,4 @@ process {
withName: MEGAN_RMA2INFO { withName: MEGAN_RMA2INFO {
maxForks = 1 maxForks = 1
} }
withName: 'EIDO_VALIDATE' {
ext.args = '--st-index sample'
}
withName: 'EIDO_CONVERT' {
ext.args = '--st-index sample'
}
} }

View file

@ -74,8 +74,8 @@ class WorkflowMain {
NfcoreTemplate.awsBatch(workflow, params) NfcoreTemplate.awsBatch(workflow, params)
// Check input has been provided // Check input has been provided
if (!params.input && !params.pep) { if (!params.input) {
log.error "Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'" log.error "Please provide an input samplesheet or PEP to the pipeline e.g. '--input samplesheet.csv'"
System.exit(1) System.exit(1)
} }
} }

View file

@ -43,11 +43,11 @@
}, },
"eido/convert": { "eido/convert": {
"branch": "master", "branch": "master",
"git_sha": "c9b29c76869d9713130a13a418c1e8b5aecfb80d" "git_sha": "9764eef361ded86e9242075bda64c2662421386a"
}, },
"eido/validate": { "eido/validate": {
"branch": "master", "branch": "master",
"git_sha": "8c0127e071711cb0a2648a6bdf881637a9d7eadc" "git_sha": "38383cfaefc06cd35e25de99989a3e6ab9ed2980"
}, },
"fastp": { "fastp": {
"branch": "master", "branch": "master",

View file

@ -10,6 +10,7 @@ process EIDO_CONVERT {
input: input:
path samplesheet path samplesheet
val format val format
path pep_input_base_dir
output: output:
path "versions.yml" , emit: versions path "versions.yml" , emit: versions

View file

@ -22,6 +22,9 @@ input:
- format: - format:
type: value type: value
description: Extension of an output file description: Extension of an output file
- pep_input_base_dir:
type: file
description: Optional path to the directory where files specified in a PEP config file are stored. Any paths specified in the config will need to be relative to this base directory.
output: output:
- versions: - versions:

View file

@ -10,6 +10,7 @@ process EIDO_VALIDATE {
input: input:
path samplesheet path samplesheet
path schema path schema
path pep_input_base_dir
output: output:
path "versions.yml" , emit: versions path "versions.yml" , emit: versions

View file

@ -23,6 +23,9 @@ input:
type: file type: file
description: Schema that the samplesheet will be validated against description: Schema that the samplesheet will be validated against
pattern: "*.{yaml,yml}" pattern: "*.{yaml,yml}"
- pep_input_base_dir:
type: file
description: Optional path to the directory where files specified in a PEP config file are stored. Any paths specified in the config will need to be relative to this base directory.
output: output:
- versions: - versions:

View file

@ -12,7 +12,6 @@ params {
// TODO nf-core: Specify your pipeline's command line flags // TODO nf-core: Specify your pipeline's command line flags
// Input options // Input options
input = null input = null
pep = null
// References // References

View file

@ -10,18 +10,13 @@
"type": "object", "type": "object",
"fa_icon": "fas fa-terminal", "fa_icon": "fas fa-terminal",
"description": "Define where the pipeline should find input data and save output data.", "description": "Define where the pipeline should find input data and save output data.",
"required": ["outdir", "databases"], "required": ["outdir", "databases", "input"],
"properties": { "properties": {
"pep": {
"type": "string",
"format": "file-path",
"pattern": "^\\S+\\.yaml$"
},
"input": { "input": {
"type": "string", "type": "string",
"format": "file-path", "format": "file-path",
"mimetype": "text/csv", "mimetype": "text/csv",
"pattern": "^\\S+\\.csv$", "pattern": "^\\S+\\.(csv|yaml)$",
"schema": "assets/schema_input.json", "schema": "assets/schema_input.json",
"description": "Path to comma-separated file containing information about the samples and libraries/runs.", "description": "Path to comma-separated file containing information about the samples and libraries/runs.",
"help_text": "You will need to create a design file with information about the samples and libraries/runs you want to running in your pipeline run. Use this parameter to specify its location. It has to be a comma-separated file with 6 columns, and a header row. See [usage docs](https://nf-co.re/taxprofiler/usage#samplesheet-input).", "help_text": "You will need to create a design file with information about the samples and libraries/runs you want to running in your pipeline run. Use this parameter to specify its location. It has to be a comma-separated file with 6 columns, and a header row. See [usage docs](https://nf-co.re/taxprofiler/usage#samplesheet-input).",

View file

@ -17,23 +17,22 @@ def checkPathParamList = [ params.input, params.databases, params.hostremoval_re
for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } } for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } }
// Check mandatory parameters // Check mandatory parameters
if (params.input) { if ( params.input.endsWith(".yaml") ) {
if ( params.input.startsWith("http://") || params.input.startsWith("https://") ) {
ch_input = file(params.input)
ch_pep_input_base_dir = []
}
else {
ch_input = file(params.input)
ch_pep_input_base_dir = new File(params.input).getParent()
}
} else if ( params.input.endsWith(".csv") ) {
ch_input = file(params.input) ch_input = file(params.input)
ch_pep_input_base_dir = [] ch_pep_input_base_dir = []
} else if (params.pep) { } else {
if ( params.pep.startsWith("http://") || params.pep.startsWith("https://") ) {
ch_input = file(params.pep)
ch_pep_input_base_dir = []
}
else {
ch_input = file(params.pep)
ch_pep_input_base_dir = new File(params.pep).getParent()
}
} else {
exit 1, 'Input samplesheet or PEP config not specified!' exit 1, 'Input samplesheet or PEP config not specified!'
} }