Fix merge conflicts

This commit is contained in:
drpatelh 2021-02-05 10:06:39 +00:00
commit 81ae0089bc
288 changed files with 2671 additions and 9679 deletions

142
.github/filters.yml vendored Normal file
View file

@ -0,0 +1,142 @@
bandage_image:
- software/bandage/image/**
- tests/software/bandage/image/**
bowtie_align:
- software/bowtie/align/**
- software/bowtie/build/**
- tests/software/bowtie/align/**
bowtie:
- software/bowtie/build/**
- tests/software/bowtie/build/**
bowtie2_align:
- software/bowtie2/align/**
- software/bowtie2/build/**
- tests/software/bowtie2/align/**
bowtie2:
- software/bowtie2/build/**
- tests/software/bowtie2/build/**
bwa_index:
- software/bwa/index/**
- tests/software/bwa/index/**
bwa_mem:
- software/bwa/mem/**
- tests/software/bwa/mem/**
cutadapt:
- software/cutadapt/**
- tests/software/cutadapt/**
dsh_filterbed:
- software/dsh/filterbed/**
- tests/software/dsh/filterbed/**
dsh_splitbed:
- software/dsh/splitbed/**
- tests/software/dsh/splitbed/**
fastp:
- software/fastp/**
- tests/software/fastp/**
fastqc:
- software/fastqc/**
- tests/software/fastqc/**
gffread:
- software/gffread/**
- tests/software/gffread/**
multiqc:
- software/fastqc/**
- software/multiqc/**
- tests/software/multiqc/**
pangolin:
- software/pangolin/**
- tests/software/pangolin/**
picard_collectmultiplemetrics:
- software/picard/collectmultiplemetrics/**
- tests/software/picard/collectmultiplemetrics/**
picard_markduplicates:
- software/picard/markduplicates/**
- tests/software/picard/markduplicates/**
picard_mergesamfiles:
- software/picard/mergesamfiles/**
- tests/software/picard/mergesamfiles/**
preseq_lcextrap:
- software/preseq/lcextrap/**
- tests/software/preseq/lcextrap/**
quast:
- software/quast/**
- tests/software/quast/**
salmon_index:
- software/salmon/index/**
- tests/software/salmon/index/**
salmon_quant:
- software/salmon/quant/**
- tests/software/salmon/quant/**
samtools_flagstat:
- software/samtools/flagstat/**
- tests/software/samtools/flagstat/**
samtools_idxstats:
- software/samtools/idxstats/**
- tests/software/samtools/idxstats/**
samtools_index:
- software/samtools/index/**
- tests/software/samtools/index/**
samtools_mpileup:
- software/samtools/mpileup/**
- tests/software/samtools/mpileup/**
samtools_sort:
- software/samtools/sort/**
- tests/software/samtools/sort/**
samtools_stats:
- software/samtools/stats/**
- tests/software/samtools/stats/**
samtools_view:
- software/samtools/view/**
- tests/software/samtools/view/**
seacr_callpeak:
- software/seacr/callpeak/**
- tests/software/seacr/callpeak/**
star_align:
- software/star/align/**
- tests/software/star/align/**
star_genomegenerate:
- software/star/genomegenerate/**
- tests/software/star/genomegenerate/**
stringtie:
- software/stringtie/**
- tests/software/stringtie/**
trimgalore:
- software/trimgalore/**
- tests/software/trimgalore/**
ucsc_bedgraphtobigwig:
- software/ucsc/bedgraphtobigwig/**
- tests/software/ucsc/bedgraphtobigwig/**

View file

@ -1,42 +0,0 @@
name: bowtie2_align
on:
push:
paths:
- software/bowtie2/align/**
- software/bowtie2/build/**
- .github/workflows/bowtie2_align.yml
- tests/software/bowtie2/**
pull_request:
paths:
- software/bowtie2/align/**
- software/bowtie2/build/**
- .github/workflows/bowtie2_align.yml
- tests/software/bowtie2/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag bowtie2_align --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: bowtie2_build
on:
push:
paths:
- software/bowtie2/build/**
- .github/workflows/bowtie2_build.yml
- tests/software/bowtie2/**
pull_request:
paths:
- software/bowtie2/build/**
- .github/workflows/bowtie2_build.yml
- tests/software/bowtie2/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag bowtie2_build --symlink --wt 2

View file

@ -1,42 +0,0 @@
name: bowtie_align
on:
push:
paths:
- software/bowtie/align/**
- software/bowtie/index/**
- .github/workflows/bowtie_align.yml
- tests/software/bowtie/**
pull_request:
paths:
- software/bowtie/align/**
- software/bowtie/index/**
- .github/workflows/bowtie_align.yml
- tests/software/bowtie/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag bowtie_align --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: bowtie_build
on:
push:
paths:
- software/bowtie/build/**
- .github/workflows/bowtie_build.yml
- tests/software/bowtie/**
pull_request:
paths:
- software/bowtie/build/**
- .github/workflows/bowtie_build.yml
- tests/software/bowtie/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag bowtie_build --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: bwa_index
on:
push:
paths:
- software/bwa/index/**
- .github/workflows/software/bwa_index.yml
- tests/software/bwa/**
pull_request:
paths:
- software/bwa/index/**
- .github/workflows/software/bwa_index.yml
- tests/software/bwa/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag bwa_index --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: bwa_mem
on:
push:
paths:
- software/bwa/mem/**
- .github/workflows/bwa_mem.yml
- tests/software/bwa/**
pull_request:
paths:
- software/bwa/mem/**
- .github/workflows/bwa_mem.yml
- tests/software/bwa/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag bwa_index --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: cutadapt
on:
push:
paths:
- software/cutadapt/**
- .github/workflows/cutadapt.yml
- tests/software/cutadapt/**
pull_request:
paths:
- software/cutadapt/**
- .github/workflows/cutadapt.yml
- tests/software/cutadapt/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag cutadapt --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: dsh_filterbed
on:
push:
paths:
- software/dsh/filterbed/**
- .github/workflows/dsh_filterbed.yml
- tests/software/dsh/**
pull_request:
paths:
- software/dsh/filterbed/**
- .github/workflows/dsh_filterbed.yml
- tests/software/dsh/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag dsh_filterbed --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: dsh_splitbed
on:
push:
paths:
- software/dsh/splitbed/**
- .github/workflows/dsh_splitbed.yml
- tests/software/dsh/**
pull_request:
paths:
- software/dsh/splitbed/**
- .github/workflows/dsh_splitbed.yml
- tests/software/dsh/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag dsh_splitbed --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: fastp
on:
push:
paths:
- software/fastp/**
- .github/workflows/fastp.yml
- tests
pull_request:
paths:
- software/fastp/**
- .github/workflows/fastp.yml
- tests
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag fastp --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: fastqc
on:
push:
paths:
- software/fastqc/**
- .github/workflows/fastqc.yml
- tests/software/fastqc/**
pull_request:
paths:
- software/fastqc/**
- .github/workflows/fastqc.yml
- tests/software/fastqc/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag fastqc --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: gffread
on:
push:
paths:
- software/gffread/**
- .github/workflows/gffread.yml
- tests/software/gffread/**
pull_request:
paths:
- software/gffread/**
- .github/workflows/gffread.yml
- tests/software/gffread/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag gffread --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: multiqc
on:
push:
paths:
- software/multiqc/**
- .github/workflows/multiqc.yml
- tests
pull_request:
paths:
- software/multiqc/**
- .github/workflows/multiqc.yml
- tests
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag multiqc --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: pangolin
on:
push:
paths:
- software/pangolin/**
- .github/workflows/pangolin.yml
- tests
pull_request:
paths:
- software/pangolin/**
- .github/workflows/pangolin.yml
- tests
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag pangolin --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: picard_collectmultiplemetrics
on:
push:
paths:
- software/picard/collectmultiplemetrics/**
- .github/workflows/picard_collectmultiplemetrics.yml
- tests/software/picard/**
pull_request:
paths:
- software/picard/collectmultiplemetrics/**
- .github/workflows/picard_collectmultiplemetrics.yml
- tests/software/picard/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag picard_collectmultiplemetrics --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: picard_markduplicates
on:
push:
paths:
- software/picard/markduplicates/**
- .github/workflows/picard_markduplicates.yml
- tests/software/picard/**
pull_request:
paths:
- software/picard/markduplicates/**
- .github/workflows/picard_markduplicates.yml
- tests/software/picard/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag picard_markduplicates --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: picard_mergesamfiles
on:
push:
paths:
- software/picard/mergesamfiles/**
- .github/workflows/picard_mergesamfiles.yml
- tests/software/picard/**
pull_request:
paths:
- software/picard/mergesamfiles/**
- .github/workflows/picard_mergesamfiles.yml
- tests/software/picard/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag picard_mergesamfiles --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: preseq_lcextrap
on:
push:
paths:
- software/preseq/lcextrap/**
- .github/workflows/preseq_lcextrap.yml
- tests/software/preseq/**
pull_request:
paths:
- software/preseq/lcextrap/**
- .github/workflows/preseq_lcextrap.yml
- tests/software/preseq/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag preseq_lcextrap --symlink --wt 2

93
.github/workflows/pytest-workflow.yml vendored Normal file
View file

@ -0,0 +1,93 @@
name: Pytest-workflow
on: [push, pull_request]
jobs:
changes:
name: Check for changes
runs-on: ubuntu-latest
outputs:
# Expose matched filters as job 'modules' output variable
modules: ${{ steps.filter.outputs.changes }}
steps:
- uses: actions/checkout@v2
- uses: dorny/paths-filter@v2
id: filter
with:
filters: '.github/filters.yml'
test:
runs-on: ubuntu-20.04
name: ${{ matrix.tags }} ${{ matrix.profile }} ${{ matrix.nxf_version }}
needs: changes
if: needs.changes.outputs.modules != '[]'
strategy:
fail-fast: false
matrix:
nxf_version: ['20.11.0-edge']
tags: ['${{ fromJson(needs.changes.outputs.modules) }}']
profile: ['docker', 'singularity'] ## 'conda'
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v2
with:
path: /usr/local/bin/nextflow
key: ${{ runner.os }}-nextflow-${{ matrix.nxf_version }}
restore-keys: |
${{ runner.os }}-nextflow-
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
- name: Set up Singularity
if: matrix.profile == 'singularity'
uses: eWaterCycle/setup-singularity@v5
with:
singularity-version: 3.7.1
- name: Setup miniconda
if: matrix.profile == 'conda'
uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
channels: conda-forge,bioconda,defaults
python-version: ${{ matrix.python-version }}
- name: Conda clean
if: matrix.profile == 'conda'
run: conda clean -a
# Test the module
- name: Run pytest-workflow
run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --wt 2 --kwdof
- name: Upload logs on failure
if: failure()
uses: actions/upload-artifact@v2
with:
name: logs-${{ matrix.tags }}-${{ matrix.profile }}-${{ matrix.nxf_version }}
path: |
/home/runner/pytest_workflow_*/*/.nextflow.log
/home/runner/pytest_workflow_*/*/log.out
/home/runner/pytest_workflow_*/*/log.err

View file

@ -1,40 +0,0 @@
name: quast
on:
push:
paths:
- software/quast/**
- .github/workflows/quast.yml
- tests
pull_request:
paths:
- software/pquast/**
- .github/workflows/quest.yml
- tests
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag quast --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: salmon_index
on:
push:
paths:
- software/salmon/index/**
- .github/workflows/salmon_index.yml
- tests/software/salmon/**
pull_request:
paths:
- software/salmon/index/**
- .github/workflows/salmon_index.yml
- tests/software/salmon/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag salmon_index --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: salmon_quant
on:
push:
paths:
- software/salmon/quant/**
- .github/workflows/salmon_quant.yml
- tests/software/salmon/**
pull_request:
paths:
- software/salmon/quant/**
- .github/workflows/salmon_quant.yml
- tests/software/salmon/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag salmon_quant --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: samtools_flagstat
on:
push:
paths:
- software/samtools/flagstat/**
- .github/workflows/samtools_flagstat.yml
- tests/software/samtools/**
pull_request:
paths:
- software/samtools/flagstat/**
- .github/workflows/samtools_flagstat.yml
- tests/software/samtools/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag samtools_flagstat --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: samtools_idxstats
on:
push:
paths:
- software/samtools/idxstats/**
- .github/workflows/samtools_idxstats.yml
- tests/software/samtools/**
pull_request:
paths:
- software/samtools/idxstats/**
- .github/workflows/samtools_idxstats.yml
- tests/software/samtools/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag samtools_idxstats --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: samtools_index
on:
push:
paths:
- software/samtools/index/**
- .github/workflows/samtools_index.yml
- tests/software/samtools/**
pull_request:
paths:
- software/samtools/index/**
- .github/workflows/samtools_index.yml
- tests/software/samtools/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag samtools_index --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: samtools_mpileup
on:
push:
paths:
- software/samtools/mpileup/**
- .github/workflows/samtools_mpileup.yml
- tests/software/samtools/**
pull_request:
paths:
- software/samtools/mpileup/**
- .github/workflows/samtools_mpileup.yml
- tests/software/samtools/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag samtools_mpileup --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: samtools_sort
on:
push:
paths:
- software/samtools/sort/**
- .github/workflows/samtools_sort.yml
- tests/software/samtools/**
pull_request:
paths:
- software/samtools/sort/**
- .github/workflows/samtools_sort.yml
- tests/software/samtools/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag samtools_sort --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: samtools_stats
on:
push:
paths:
- software/samtools/stats/**
- .github/workflows/software/samtools_stats.yml
- tests/software/samtools/**
pull_request:
paths:
- software/samtools/stats/**
- .github/workflows/software/samtools_stats.yml
- tests/software/samtools/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag samtools_stats --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: samtools_view
on:
push:
paths:
- software/samtools/view/**
- .github/workflows/samtools_view.yml
- tests/software/samtools/**
pull_request:
paths:
- software/samtools/view/**
- .github/workflows/samtools_view.yml
- tests/software/samtools/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag samtools_view --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: seacr_callpeak
on:
push:
paths:
- software/seacr/callpeak/**
- .github/workflows/seacr_callpeak.yml
- tests/software/seacr/**
pull_request:
paths:
- software/seacr/callpeak/**
- .github/workflows/seacr_callpeak.yml
- tests/software/seacr/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag seacr_callpeak --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: star_genomegenerate
on:
push:
paths:
- software/star/align/**
- .github/workflows/star_align.yml
- tests/software/star/**
pull_request:
paths:
- software/star/align/**
- .github/workflows/star_align.yml
- tests/software/star/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag star_align --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: star_genomegenerate
on:
push:
paths:
- software/star/genomegenerate/**
- .github/workflows/star_genomegenerate.yml
- tests/software/star/**
pull_request:
paths:
- software/star/genomegenerate/**
- .github/workflows/star_genomegenerate.yml
- tests/software/star/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag star_genomegenerate --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: stringtie
on:
push:
paths:
- software/stringtie/**
- .github/workflows/stringtie.yml
- tests/software/stringtie/**
pull_request:
paths:
- software/stringtie/**
- .github/workflows/stringtie.yml
- tests/software/stringtie/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag stringtie --symlink --wt 2

View file

@ -1,40 +0,0 @@
name: trimgalore
on:
push:
paths:
- software/trimgalore/**
- .github/workflows/trimgalore.yml
- tests/software/trimgalore/**
pull_request:
paths:
- software/trimgalore/**
- .github/workflows/trimgalore.yml
- tests/software/trimgalore/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag trimgalore --symlink --wt 2

View file

@ -1,39 +0,0 @@
name: ucsc_bedgraphtobigwig
on:
push:
paths:
- software/ucsc/bedgraphtobigwig/**
- .github/workflows/ucsc_bedgraphtobigwig.yml
- tests/software/ucsc/**
pull_request:
paths:
- software/ucsc/bedgraphtobigwig/**
- .github/workflows/ucsc_bedgraphtobigwig.yml
- tests/software/ucsc/**
jobs:
ci_test:
runs-on: ubuntu-latest
strategy:
matrix:
nxf_version: [20.11.0-edge]
env:
NXF_ANSI_LOG: false
steps:
- uses: actions/checkout@v2
- name: Install Nextflow
env:
NXF_VER: ${{ matrix.nxf_version }}
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.x"
- name: Install dependencies
run: python -m pip install --upgrade pip pytest-workflow
# Test the module
- run: pytest --tag ucsc_bedgraphtobigwig --symlink --wt 2

View file

@ -1,7 +0,0 @@
FROM nfcore/base
LABEL authors="Jose Espinosa-Carrasco" \
description="Docker image containing all requirements for nf-core/modules/bedtools/complement"
COPY environment.yml /
RUN conda env create -f /environment.yml && conda clean -a
ENV PATH /opt/conda/envs/nf-core-bedtools-complement/bin:$PATH

View file

@ -1,9 +0,0 @@
# You can use this file to create a conda environment for this pipeline:
# conda env create -f environment.yml
name: nf-core-bedtools-complement
channels:
- conda-forge
- bioconda
- defaults
dependencies:
- bioconda::bedtools=2.29.2

View file

@ -1,32 +0,0 @@
def MODULE = "bedtools_complement"
params.publish_dir = MODULE
params.publish_results = "default"
process BEDTOOLS_COMPLEMENT {
tag {input_file}
publishDir "${params.out_dir}/${params.publish_dir}",
mode: params.publish_dir_mode,
saveAs: { filename ->
if (params.publish_results == "none") null
else filename }
container "docker.pkg.github.com/nf-core/$MODULE"
conda "${moduleDir}/environment.yml"
input:
path (input_file)
path (fasta_sizes)
val (bedtools_complement_args)
output:
path "${input_file}.bed", emit: complement
path "*.version.txt", emit: version
script:
"""
bedtools complement -i ${input_file} -g ${fasta_sizes} ${bedtools_complement_args} > ${input_file}.bed
bedtools --version | sed -n "s/.*\\(v.*\$\\)/\\1/p" > bedtools.version.txt
"""
}

View file

@ -1,30 +0,0 @@
name: bedtools complement
description: Returns all intervals in a genome that are not covered by at least one interval in the input BED/GFF/VCF file
keywords:
- complement
tools:
- bedtools:
description: |
Bedtools is a software package that provides with a toolset to perform genome arithmetic operations.
homepage: https://bedtools.readthedocs.io/en/latest/index.html
documentation: https://bedtools.readthedocs.io/en/latest/index.html
doi: 10.093/bioinformatics/btq033
input:
-
- input_file:
type: file
description: Input genomic coordinates file
pattern: "*.{bed,gff,vcf}"
- fasta_sizes:
type: file
description: Genome chromosome sizes
pattern: "*.{txt,sizes}"
output:
-
- index:
type: stdout,file
description:
pattern: "stdout,*.{bed,gff,vcf}"
authors:
- "@JoseEspinosa"

View file

@ -1 +0,0 @@
../../../../../tests/data/bed/A.bed

View file

@ -1 +0,0 @@
../../../../../tests/data/bed/genome.sizes

View file

@ -1,19 +0,0 @@
#!/usr/bin/env nextflow
nextflow.preview.dsl = 2
params.out_dir = "test_output"
params.fastqc_args = ''
params.publish_dir_mode = "copy"
params.bedtools_complement_args = ''
include BEDTOOLS_COMPLEMENT from '../main.nf' params(params)
// Define input channels
ch_input = Channel.fromPath('./input_data/A.bed')
chrom_sizes = Channel.fromPath('./input_data/genome.sizes')
// Run the workflow
workflow {
BEDTOOLS_COMPLEMENT(ch_input, chrom_sizes, params.bedtools_complement_args)
}

View file

@ -1,7 +0,0 @@
FROM nfcore/base
LABEL authors="Jose Espinosa-Carrasco" \
description="Docker image containing all requirements for nf-core/modules/bedtools/genomecov"
COPY environment.yml /
RUN conda env create -f /environment.yml && conda clean -a
ENV PATH /opt/conda/envs/nf-core-bedtools-genomecov/bin:$PATH

View file

@ -1,9 +0,0 @@
# You can use this file to create a conda environment for this pipeline:
# conda env create -f environment.yml
name: nf-core-bedtools-genomecov
channels:
- conda-forge
- bioconda
- defaults
dependencies:
- bioconda::bedtools=2.29.2

View file

@ -1,32 +0,0 @@
def MODULE = "bedtools_genomecov"
params.publish_dir = MODULE
params.publish_results = "default"
process BEDTOOLS_GENOMECOV {
tag {bam}
publishDir "${params.out_dir}/${params.publish_dir}",
mode: params.publish_dir_mode,
saveAs: { filename ->
if (params.publish_results == "none") null
else filename }
container "docker.pkg.github.com/nf-core/$MODULE"
conda "${moduleDir}/environment.yml"
input:
path (bam)
path (chrom_sizes)
val (bedtools_genomecov_args)
output:
path "${bam}.bed", emit: coverage
path "*.version.txt", emit: version
script:
"""
bedtools genomecov -ibam ${bam} -g ${chrom_sizes} ${bedtools_genomecov_args} > ${bam}.bed
bedtools --version | sed -n "s/.*\\(v.*\$\\)/\\1/p" > bedtools.version.txt
"""
}

View file

@ -1,30 +0,0 @@
name: bedtools genomecov
description: Returns feature coverage for a given genome in different formats
keywords:
- genomecov
tools:
- bedtools:
description: |
Bedtools is a software package that provides with a toolset to perform genome arithmetic operations.
homepage: https://bedtools.readthedocs.io/en/latest/index.html
documentation: https://bedtools.readthedocs.io/en/latest/index.html
doi: 10.093/bioinformatics/btq033
input:
-
- input_file:
type: file
description: Input genomic coordinates file
pattern: "*.{bam}"
- chrom_sizes:
type: file
description: Genome chromosome sizes
pattern: "*.{txt,sizes}"
output:
-
- index:
type: stdout,file
description:
pattern: "stdout,*.{bed,bedGraph}"
authors:
- "@JoseEspinosa"

View file

@ -1 +0,0 @@
../../../../../tests/data/bam/JK2067_downsampled_s0.1.bam

View file

@ -1 +0,0 @@
../../../../../tests/data/bed/genome.sizes

View file

@ -1,19 +0,0 @@
#!/usr/bin/env nextflow
nextflow.preview.dsl = 2
params.out_dir = "test_output"
params.fastqc_args = ''
params.publish_dir_mode = "copy"
params.bedtools_genomecov_args = '' //'-bg'
include BEDTOOLS_GENOMECOV from '../main.nf' params(params)
// Define input channels
ch_input = Channel.fromPath('./input_data/JK2067_downsampled_s0.1.bam')
chrom_sizes = Channel.fromPath('./input_data/genome.sizes')
// Run the workflow
workflow {
BEDTOOLS_GENOMECOV(ch_input, chrom_sizes, params.bedtools_genomecov_args)
}

View file

@ -1,7 +0,0 @@
FROM nfcore/base
LABEL authors="Jose Espinosa-Carrasco" \
description="Docker image containing all requirements for nf-core/modules/bedtools/intersect"
COPY environment.yml /
RUN conda env create -f /environment.yml && conda clean -a
ENV PATH /opt/conda/envs/nf-core-bedtools-intersectbed/bin:$PATH

View file

@ -1,9 +0,0 @@
# You can use this file to create a conda environment for this pipeline:
# conda env create -f environment.yml
name: nf-core-bedtools-intersect
channels:
- conda-forge
- bioconda
- defaults
dependencies:
- bioconda::bedtools=2.29.2

View file

@ -1,36 +0,0 @@
def MODULE = "bedtools_intersect"
params.publish_dir = MODULE
params.publish_results = "default"
process INTERSECT_BED {
tag "$input_file_1-$input_file_2"
publishDir "${params.out_dir}/${params.publish_dir}",
mode: params.publish_dir_mode,
saveAs: { filename ->
if (params.publish_results == "none") null
else filename }
container "docker.pkg.github.com/nf-core/$MODULE"
conda "${moduleDir}/environment.yml"
input:
path (input_file_1)
path (input_file_2)
val (intersectbed_args)
output:
path "${input_file_1.baseName}_i_${input_file_2.baseName}.bed", emit: intersect
path "*.version.txt", emit: version
script:
def params_string = intersectbed_args.collect {
/-$it.key $it.value/
} join " "
"""
bedtools intersect -a ${input_file_1} -b ${input_file_2} ${params_string} > ${input_file_1.baseName}_i_${input_file_2.baseName}.bed
bedtools --version | sed -n "s/.*\\(v.*\$\\)/\\1/p" > bedtools.version.txt
"""
}

View file

@ -1,30 +0,0 @@
name: bedtools intersect
description: Returns the overlapping features between two sets of genomics features
keywords:
- bedtools intersect
tools:
- bedtools:
description: |
Bedtools is a software package that provides with a toolset to perform genome arithmetic operations.
homepage: https://bedtools.readthedocs.io/en/latest/index.html
documentation: https://bedtools.readthedocs.io/en/latest/index.html
doi: 10.093/bioinformatics/btq033
input:
-
- input_file_1:
type: file
description: Input genomic coordinates file
pattern: "*.{bam,bed,BED,gff,vcf}"
- input_file_2:
type: file
description: Input genomic coordinates file
pattern: "*.{bam,bed,BED,gff,vcf}"
output:
-
- index:
type: stdout,file
description:
pattern: "stdout,*.{bed,BED}"
authors:
- "@JoseEspinosa"

View file

@ -1 +0,0 @@
../../../../../tests/data/bed/A.bed

View file

@ -1 +0,0 @@
../../../../../tests/data/bed/B.bed

View file

@ -1,24 +0,0 @@
#!/usr/bin/env nextflow
nextflow.preview.dsl = 2
params.out_dir = "test_output"
params.fastqc_args = ''
params.publish_dir_mode = "copy"
params.intersect_args = '' //'-bed -c -f 0.20'
include check_output from '../../../../tests/functions/check_process_outputs.nf' // params(params)
include INTERSECT_BED from '../main.nf' params(params)
// Define input channels
ch_input_1 = Channel.fromPath('./input_data/A.bed')
ch_input_2 = Channel.fromPath('./input_data/B.bed')
def additional_params_map = [:]
additional_params_map = [ s: "",
f: 0.9 ]
// Run the workflow
workflow {
INTERSECT_BED(ch_input_1, ch_input_2, additional_params_map)
}

View file

@ -1,7 +0,0 @@
FROM nfcore/base
LABEL authors="Jose Espinosa-Carrasco" \
description="Docker image containing all requirements for nf-core/modules/bedtools/complementbed"
COPY environment.yml /
RUN conda env create -f /environment.yml && conda clean -a
ENV PATH /opt/conda/envs/nf-core-bedtools-merge/bin:$PATH

View file

@ -1,9 +0,0 @@
# You can use this file to create a conda environment for this pipeline:
# conda env create -f environment.yml
name: nf-core-bedtools-merge
channels:
- conda-forge
- bioconda
- defaults
dependencies:
- bioconda::bedtools=2.29.2

View file

@ -1,31 +0,0 @@
def MODULE = "bedtools_merge"
params.publish_dir = MODULE
params.publish_results = "default"
process BEDTOOLS_MERGE {
tag { input_file }
publishDir "${params.out_dir}/${params.publish_dir}",
mode: params.publish_dir_mode,
saveAs: { filename ->
if (params.publish_results == "none") null
else filename }
container "docker.pkg.github.com/nf-core/$MODULE"
conda "${moduleDir}/environment.yml"
input:
path (input_file)
val (bedtools_merge_args)
output:
path "${input_file}.bed", emit: merge
path "*.version.txt", emit: version
script:
"""
bedtools merge -i ${input_file} ${bedtools_merge_args} > ${input_file}.bed
bedtools --version | sed -n "s/.*\\(v.*\$\\)/\\1/p" > bedtools.version.txt
"""
}

View file

@ -1,26 +0,0 @@
name: bedtools merge
description: Combines overlapping genome features of a single file
keywords:
- merge
tools:
- bedtools:
description: |
Bedtools is a software package that provides with a toolset to perform genome arithmetic operations.
homepage: https://bedtools.readthedocs.io/en/latest/index.html
documentation: https://bedtools.readthedocs.io/en/latest/index.html
doi: 10.093/bioinformatics/btq033
input:
-
- input_file:
type: file
description: Input genomic coordinates file
pattern: "*.{bed,gff,vcf,bam}"
output:
-
- index:
type: stdout,file
description:
pattern: "stdout,*.{bed}"
authors:
- "@JoseEspinosa"

View file

@ -1 +0,0 @@
../../../../../tests/data/bed/A.bed

View file

@ -1 +0,0 @@
../../../../../tests/data/bam/JK2067_downsampled_s0.1.bam

View file

@ -1,19 +0,0 @@
#!/usr/bin/env nextflow
nextflow.preview.dsl = 2
params.out_dir = "test_output"
params.fastqc_args = ''
params.publish_dir_mode = "copy"
params.bedtools_merge_args = '' //''-s -c 6 -o distinct'
include BEDTOOLS_MERGE from '../main.nf' params(params)
// Define input channels
ch_input = Channel.fromPath('./input_data/A.bed')
//ch_input = Channel.fromPath('./input_data/JK2067_downsampled_s0.1.bam')
// Run the workflow
workflow {
BEDTOOLS_MERGE(ch_input, params.bedtools_merge_args)
}

View file

@ -1,2 +0,0 @@
docker.enabled = true
params.outdir = './results'

View file

@ -1,9 +0,0 @@
# You can use this file to create a conda environment for this pipeline:
# conda env create -f environment.yml
name: nf-core-bedtools-sort
channels:
- conda-forge
- bioconda
- defaults
dependencies:
- bioconda::bedtools=2.29.2

View file

@ -1,31 +0,0 @@
def MODULE = "bedtools_sort"
params.publish_dir = MODULE
params.publish_results = "default"
process BEDTOOLS_SORT {
tag { input_file }
publishDir "${params.out_dir}/${params.publish_dir}",
mode: params.publish_dir_mode,
saveAs: { filename ->
if (params.publish_results == "none") null
else filename }
container "docker.pkg.github.com/nf-core/$MODULE"
conda "${moduleDir}/environment.yml"
input:
path (input_file)
val (bedtools_sort_args)
output:
path "${input_file}.bed", emit: sort
path "*.version.txt", emit: version
script:
"""
bedtools sort -i ${input_file} ${bedtools_sort_args} > ${input_file}.bed
bedtools --version | sed -n "s/.*\\(v.*\$\\)/\\1/p" > bedtools.version.txt
"""
}

View file

@ -1,26 +0,0 @@
name: bedtools sort
description: Returns a sorted feature file by chromosome and other criteria
keywords:
- sort
tools:
- bedtools:
description: |
Bedtools is a software package that provides with a toolset to perform genome arithmetic operations.
homepage: https://bedtools.readthedocs.io/en/latest/index.html
documentation: https://bedtools.readthedocs.io/en/latest/index.html
doi: 10.093/bioinformatics/btq033
input:
-
- input_file:
type: file
description: Input genomic coordinates file
pattern: "*.{bed,gff,vcf}"
output:
-
- index:
type: stdout,file
description:
pattern: "stdout,*.{bed,gff,vcf}"
authors:
- "@JoseEspinosa"

View file

@ -1 +0,0 @@
../../../../../tests/data/bed/A.bed

View file

@ -1,18 +0,0 @@
#!/usr/bin/env nextflow
nextflow.preview.dsl = 2
params.out_dir = "test_output"
params.fastqc_args = ''
params.publish_dir_mode = "copy"
params.bedtools_sort_args = '' //'-sizeD'
include BEDTOOLS_SORT from '../main.nf' params(params)
// Define input channels
ch_input = Channel.fromPath('./input_data/A.bed')
// Run the workflow
workflow {
BEDTOOLS_SORT(ch_input, params.bedtools_sort_args)
}

View file

@ -1,52 +0,0 @@
nextflow.preview.dsl=2
params.genome = ''
process BOWTIE2 {
// depending on the genome used one might want/need to adjust the memory settings.
// For the E. coli test data this is probably not required
// label 'bigMem'
// label 'multiCore'
publishDir "$outdir/bowtie2",
mode: "copy", overwrite: true
input:
tuple val(name), path(reads)
val (outdir)
val (bowtie2_args)
val (verbose)
output:
path "*bam", emit: bam
path "*stats.txt", emit: stats
script:
if (verbose){
println ("[MODULE] BOWTIE2 ARGS: " + bowtie2_args)
}
cores = 4
readString = ""
// Options we add are
bowtie2_options = bowtie2_args
bowtie2_options += " --no-unal " // We don't need unaligned reads in the BAM file
// single-end / paired-end distinction. Might also be handled via params.single_end
if (reads instanceof List) {
readString = "-1 " + reads[0] + " -2 " + reads[1]
}
else {
readString = "-U " + reads
}
index = params.genome["bowtie2"]
bowtie2_name = name + "_" + params.genome["name"]
"""
bowtie2 -x ${index} -p ${cores} ${bowtie2_options} ${readString} 2>${bowtie2_name}_bowtie2_stats.txt | samtools view -bS -F 4 -F 8 -F 256 -> ${bowtie2_name}_bowtie2.bam
"""
}

View file

@ -1,37 +0,0 @@
name: Bowtie 2
description: Ultrafast alignment to reference genome
keywords:
- Alignment
- Short reads
- FM Index
tools:
- fastqc:
description: |
Bowtie 2 is an ultrafast and memory-efficient tool for aligning sequencing reads
to long reference sequences. It is particularly good at aligning reads of about
50 up to 100s or 1,000s of characters, and particularly good at aligning to relatively
long (e.g. mammalian) genomes. Bowtie 2 indexes the genome with an FM Index to keep
its memory footprint small: for the human genome, its memory footprint is typically
around 3.2 GB. Bowtie 2 supports gapped, local, and paired-end alignment modes.
homepage: http://bowtie-bio.sourceforge.net/bowtie2/index.shtml
documentation: http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml
input:
-
- sample_id:
type: string
description: Sample identifier
- reads:
type: file
description: Input FastQ file, or pair of files
output:
-
- report:
type: file
description: mapping statistics report
pattern: "*bowtie2_stats.txt"
- alignment:
type: file
description: alignment file in BAM format
pattern: "*bowtie2.bam"
authors:
- "@FelixKrueger"

View file

@ -1 +0,0 @@
../../../../../tests/data/fasta/E_coli/NC_010473.fa

View file

@ -1 +0,0 @@
../../../../tests/data/fastq/dna/Ecoli_DNA_R1.fastq.gz

View file

@ -1 +0,0 @@
../../../../tests/data/fastq/dna/Ecoli_DNA_R2.fastq.gz

View file

@ -1 +0,0 @@
../../../../tests/data/fastq/rna/test_R1_val_1.fq.gz

View file

@ -1 +0,0 @@
../../../../tests/data/fastq/rna/test_R2_val_2.fq.gz

View file

@ -1,31 +0,0 @@
#!/usr/bin/env nextflow
nextflow.preview.dsl=2
params.outdir = "."
params.genome = ""
params.bowtie2_args = ''
// Bowtie2 arguments should be supplied in the following format to work:
// --bowtie2_args="--score-min L,0,-0.8"
params.verbose = false
if (params.verbose){
println ("[WORKFLOW] BOWTIE2 ARGS: " + params.bowtie2_args)
}
// for other genomes this needs to be handled somehow to return all possible genomes
genomeValues = ["name" : params.genome]
genomeValues["bowtie2"] = "/bi/home/fkrueger/VersionControl/nf-core-modules/test-datasets/indices/bowtie2/E_coli/${params.genome}";
include '../main.nf' params(genome: genomeValues)
ch_read_files = Channel
.fromFilePairs('../../../test-datasets/Ecoli*{1,2}.fastq.gz',size:-1)
// .view() // to check whether the input channel works
workflow {
main:
BOWTIE2(ch_read_files, params.outdir, params.bowtie2_args, params.verbose)
}

View file

@ -1,2 +0,0 @@
docker.enabled = true
params.outdir = './results'

View file

@ -1,15 +0,0 @@
10000 reads; of these:
10000 (100.00%) were paired; of these:
893 (8.93%) aligned concordantly 0 times
8474 (84.74%) aligned concordantly exactly 1 time
633 (6.33%) aligned concordantly >1 times
----
893 pairs aligned concordantly 0 times; of these:
815 (91.27%) aligned discordantly 1 time
----
78 pairs aligned 0 times concordantly or discordantly; of these:
156 mates make up the pairs; of these:
0 (0.00%) aligned 0 times
1 (0.64%) aligned exactly 1 time
155 (99.36%) aligned >1 times
100.00% overall alignment rate

View file

@ -1,15 +0,0 @@
9979 reads; of these:
9979 (100.00%) were paired; of these:
3584 (35.92%) aligned concordantly 0 times
3705 (37.13%) aligned concordantly exactly 1 time
2690 (26.96%) aligned concordantly >1 times
----
3584 pairs aligned concordantly 0 times; of these:
886 (24.72%) aligned discordantly 1 time
----
2698 pairs aligned 0 times concordantly or discordantly; of these:
5396 mates make up the pairs; of these:
2282 (42.29%) aligned 0 times
1467 (27.19%) aligned exactly 1 time
1647 (30.52%) aligned >1 times
88.57% overall alignment rate

View file

@ -1,41 +0,0 @@
process cutadapt {
tag "${sample_id}"
container 'quay.io/biocontainers/cutadapt:1.16--py27_1'
input:
tuple val(sample_id), path(reads)
output:
tuple sample_id, path("trimmed_*.fastq")
script:
forward_fq = "trimmed_1.fastq"
reverse_fq = "trimmed_2.fastq"
if (params.single_end) {
processing = """
cutadapt \
-j ${task.cpus} \
-q $params.cutadapt_min_quality \
--minimum-length $params.cutadapt_min_length \
--output ${forward_fq} \
${reads}
"""
} else {
processing = """
cutadapt \
-j ${task.cpus} \
-q $params.cutadapt_min_quality \
--minimum-length $params.cutadapt_min_length \
--pair-filter=any \
--output ${forward_fq} \
--paired-output ${reverse_fq} ${reads}
"""
}
version = "cutadapt --version &> v_cutadapt.txt"
return processing + version
}

View file

@ -1,36 +0,0 @@
name: Cutadapt
description: cutadapt removes adapter sequences from high-throughput sequencing reads
keywords:
- Quality Control
- QC
- Adapters
tools:
- fastqc:
description: |
Cutadapt finds and removes adapter sequences, primers, poly-A tails and other types of unwanted sequence
from your high-throughput sequencing reads.
Cleaning your data in this way is often required: Reads from small-RNA sequencing contain the 3
sequencing adapter because the read is longer than the molecule that is sequenced. Amplicon reads
start with a primer sequence. Poly-A tails are useful for pulling out RNA from your sample, but
often you dont want them to be in your reads.
homepage: https://cutadapt.readthedocs.io/en/stable/
documentation: https://cutadapt.readthedocs.io/en/stable/
input:
-
- sample_id:
type: string
description: Sample identifier
- reads:
type: file
description: Input FastQ file, or pair of files
output:
-
- sample_id:
type: string
description: Sample identifier
- reads:
type: file
description: trimmed FastQ file, or pair of files
authors:
- "@piotr-faba-ardigen"

View file

@ -1,20 +0,0 @@
#!/usr/bin/env nextflow
nextflow.preview.dsl = 2
include '../main.nf' params(params)
// Define input channels
input_fastqs = Channel.fromFilePairs('../../../test-datasets/tools/cutadapt/input/*_{1,2}.fastq' )
if(params.single_end){
input_fastqs = Channel.from([
['SRR4238351', '../../../test-datasets/tools/cutadapt/input/SRR4238351_subsamp.fastq.gz'],
['SRR4238355', '../../../test-datasets/tools/cutadapt/input/SRR4238355_subsamp.fastq.gz'],
['SRR4238359', '../../../test-datasets/tools/cutadapt/input/SRR4238359_subsamp.fastq.gz'],
['SRR4238379', '../../../test-datasets/tools/cutadapt/input/SRR4238379_subsamp.fastq.gz']
]).map { row -> [ row[0], [ file(row[1]) ] ] }
}
// Run the workflow
workflow {
cutadapt(input_fastqs)
}

View file

@ -1,9 +0,0 @@
docker.enabled = true
params.outdir = './results'
params{
// Preprocessing options
cutadapt_min_length = 40
cutadapt_min_quality = 25
single_end = false
}

View file

@ -1,58 +0,0 @@
nextflow.preview.dsl=2
params.genome = ''
process HISAT2 {
// depending on the genome used one might want/need to adjust the memory settings.
// For the E. coli test data this is probably not required
// label 'bigMem'
// label 'multiCore'
publishDir "$outdir/hisat2",
mode: "copy", overwrite: true
input:
tuple val(name), path(reads)
val outdir
val hisat2_args
val verbose
output:
path "*bam", emit: bam
path "*stats.txt", emit: stats
script:
if (verbose){
println ("[MODULE] HISAT2 ARGS: " + hisat2_args)
}
cores = 4
readString = ""
hisat_options = hisat2_args
// Options we add are
hisat_options = hisat_options + " --no-unal --no-softclip "
if (reads instanceof List) {
readString = "-1 "+reads[0]+" -2 "+reads[1]
hisat_options = hisat_options + " --no-mixed --no-discordant"
}
else {
readString = "-U "+reads
}
index = params.genome["hisat2"]
splices = ''
if (params.genome.containsKey("hisat2_splices")){
splices = " --known-splicesite-infile " + params.genome["hisat2_splices"]
}
else{
println ("No key 'hisat2_splices' was supplied. Skipping...")
}
hisat_name = name + "_" + params.genome["name"]
"""
hisat2 -p ${cores} ${hisat_options} -x ${index} ${splices} ${readString} 2>${hisat_name}_hisat2_stats.txt | samtools view -bS -F 4 -F 8 -F 256 -> ${hisat_name}_hisat2.bam
"""
}

View file

@ -1,37 +0,0 @@
name: HISAT2
description: Graph-based alignment of next generation sequencing reads to a population of genomes
keywords:
- Alignment
- Short reads
- graph FM Index (GFM)
- RNA-seq
tools:
- fastqc:
description: |
HISAT2 is a fast and sensitive alignment program for mapping next-generation
sequencing reads (whole-genome, transcriptome, and exome sequencing data)
against the general human population (as well as against a single reference genome).
Based on GCSA (an extension of BWT for a graph) it is designed and implemented as a
graph FM index (GFM).
homepage: http://daehwankimlab.github.io/hisat2/
documentation: https://ccb.jhu.edu/software/hisat2/manual.shtml
input:
-
- sample_id:
type: string
description: Sample identifier
- reads:
type: file
description: Input FastQ file, or pair of files
output:
-
- report:
type: file
description: mapping statistics report
pattern: "*hisat2_stats.txt"
- alignment:
type: file
description: alignment file in BAM format
pattern: "*hisat2.bam"
authors:
- "@FelixKrueger"

Some files were not shown because too many files have changed in this diff Show more