mirror of
https://github.com/MillironX/nf-configs.git
synced 2024-11-21 16:16:04 +00:00
Merge branch 'master' into master_kraken
This commit is contained in:
commit
30e058abf0
36 changed files with 627 additions and 543 deletions
43
.github/workflows/main.yml
vendored
43
.github/workflows/main.yml
vendored
|
@ -16,7 +16,48 @@ jobs:
|
|||
needs: test_all_profiles
|
||||
strategy:
|
||||
matrix:
|
||||
profile: ['abims', 'awsbatch', 'bi','bigpurple', 'binac', 'biohpc_gen', 'cbe', 'ccga_dx', 'ccga_med', 'cfc', 'cfc_dev', 'crick', 'denbi_qbic', 'ebc', 'eddie', 'eva', 'genotoul', 'genouest', 'gis', 'google', 'hebbe', 'icr_davros', 'ifb_core', 'imperial', 'imperial_mb', 'jax', 'mpcdf', 'munin', 'oist', 'pasteur', 'phoenix', 'prince', 'seg_globe', 'shh', 'uct_hpc', 'uppmax', 'utd_ganymede', 'uzh']
|
||||
profile:
|
||||
- 'abims'
|
||||
- 'awsbatch'
|
||||
- 'aws_tower'
|
||||
- 'bi'
|
||||
- 'bigpurple'
|
||||
- 'binac'
|
||||
- 'biohpc_gen'
|
||||
- 'cbe'
|
||||
- 'ccga_dx'
|
||||
- 'ccga_med'
|
||||
- 'cfc'
|
||||
- 'cfc_dev'
|
||||
- 'crick'
|
||||
- 'denbi_qbic'
|
||||
- 'ebc'
|
||||
- 'eddie'
|
||||
- 'eva'
|
||||
- 'genotoul'
|
||||
- 'genouest'
|
||||
- 'gis'
|
||||
- 'google'
|
||||
- 'hebbe'
|
||||
- 'icr_davros'
|
||||
- 'ifb_core'
|
||||
- 'imperial'
|
||||
- 'imperial_mb'
|
||||
- 'jax'
|
||||
- 'mpcdf'
|
||||
- 'munin'
|
||||
- 'oist'
|
||||
- 'pasteur'
|
||||
- 'phoenix'
|
||||
- 'prince'
|
||||
- 'sanger'
|
||||
- 'seg_globe'
|
||||
- 'shh'
|
||||
- 'uct_hpc'
|
||||
- 'uppmax'
|
||||
- 'utd_ganymede'
|
||||
- 'utd_sysbio'
|
||||
- 'uzh'
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install Nextflow
|
||||
|
|
|
@ -96,10 +96,12 @@ Currently documentation is available for the following systems:
|
|||
|
||||
* [ABIMS](docs/abims.md)
|
||||
* [AWSBATCH](docs/awsbatch.md)
|
||||
* [AWS_TOWER](docs/aws_tower.md)
|
||||
* [BIGPURPLE](docs/bigpurple.md)
|
||||
* [BI](docs/bi.md)
|
||||
* [BINAC](docs/binac.md)
|
||||
* [BIOHPC_GEN](docs/biohpc_gen.md)
|
||||
* [CAMBRIDGE](docs/cambridge.md)
|
||||
* [CBE](docs/cbe.md)
|
||||
* [CCGA_DX](docs/ccga_dx.md)
|
||||
* [CCGA_MED](docs/ccga_med.md)
|
||||
|
@ -122,11 +124,13 @@ Currently documentation is available for the following systems:
|
|||
* [PASTEUR](docs/pasteur.md)
|
||||
* [PHOENIX](docs/phoenix.md)
|
||||
* [PRINCE](docs/prince.md)
|
||||
* [SANGER](docs/sanger.md)
|
||||
* [SEG_GLOBE](docs/seg_globe.md)
|
||||
* [SHH](docs/shh.md)
|
||||
* [UCT_HPC](docs/uct_hpc.md)
|
||||
* [UPPMAX](docs/uppmax.md)
|
||||
* [UTD_GANYMEDE](docs/utd_ganymede.md)
|
||||
* [UTD_SYSBIO](docs/utd_sysbio.md)
|
||||
* [UZH](docs/uzh.md)
|
||||
|
||||
### Uploading to `nf-core/configs`
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 57 KiB |
|
@ -1,450 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="1280"
|
||||
height="640"
|
||||
viewBox="0 0 338.66666 169.33333"
|
||||
version="1.1"
|
||||
id="svg8"
|
||||
inkscape:version="0.92.3 (2405546, 2018-03-11)"
|
||||
sodipodi:docname="social_preview_image_configs.svg"
|
||||
inkscape:export-filename="social_preview_image.png"
|
||||
inkscape:export-xdpi="96"
|
||||
inkscape:export-ydpi="96">
|
||||
<defs
|
||||
id="defs2">
|
||||
<clipPath
|
||||
id="d">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path9"
|
||||
d="M 0,266 H 1022 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="c">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path12"
|
||||
d="m 280.17,136.33 -21.5,-21.584 h 61 v 21.584 z" />
|
||||
</clipPath>
|
||||
<linearGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(47.34875,36.9925,-36.9925,47.34875,344.325,162.1875)"
|
||||
x2="1"
|
||||
id="a">
|
||||
<stop
|
||||
id="stop15"
|
||||
offset="0"
|
||||
stop-color="#0c542a" />
|
||||
<stop
|
||||
id="stop17"
|
||||
offset=".21472"
|
||||
stop-color="#0c542a" />
|
||||
<stop
|
||||
id="stop19"
|
||||
offset=".57995"
|
||||
stop-color="#25af64" />
|
||||
<stop
|
||||
id="stop21"
|
||||
offset=".84663"
|
||||
stop-color="#25af64" />
|
||||
<stop
|
||||
id="stop23"
|
||||
offset="1"
|
||||
stop-color="#25af64" />
|
||||
</linearGradient>
|
||||
<clipPath
|
||||
id="b">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path26"
|
||||
d="M 0,266 H 1022 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath202"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path204"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath158"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path160"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath86"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path88"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath94"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path96"
|
||||
d="M 804.509,211 H 968.795 V 114.019 H 804.509 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath110"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path112"
|
||||
d="M 804.597,506 H 968.883 V 409.019 H 804.597 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath126"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path128"
|
||||
d="M 133.598,209 H 297.883 V 112.019 H 133.598 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath142"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path144"
|
||||
d="M 133.686,504 H 297.972 V 407.019 H 133.686 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath54"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path56-6"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath30"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path32"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath202-3"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path204-6"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath158-7"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path160-5"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath86-3"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path88-5"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath94-6"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path96-2"
|
||||
d="M 804.509,211 H 968.795 V 114.019 H 804.509 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath110-9"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path112-1"
|
||||
d="M 804.597,506 H 968.883 V 409.019 H 804.597 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath126-2"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path128-7"
|
||||
d="M 133.598,209 H 297.883 V 112.019 H 133.598 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath142-0"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path144-9"
|
||||
d="M 133.686,504 H 297.972 V 407.019 H 133.686 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath54-3"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path56-60"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<clipPath
|
||||
id="clipPath30-6"
|
||||
clipPathUnits="userSpaceOnUse">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path32-2"
|
||||
d="M 0,600 H 1500 V 0 H 0 Z" />
|
||||
</clipPath>
|
||||
<linearGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(47.34875,36.9925,-36.9925,47.34875,344.325,162.1875)"
|
||||
x2="1"
|
||||
id="a-3">
|
||||
<stop
|
||||
id="stop15-61"
|
||||
offset="0"
|
||||
stop-color="#0c542a" />
|
||||
<stop
|
||||
id="stop17-29"
|
||||
offset=".21472"
|
||||
stop-color="#0c542a" />
|
||||
<stop
|
||||
id="stop19-3"
|
||||
offset=".57995"
|
||||
stop-color="#25af64" />
|
||||
<stop
|
||||
id="stop21-19"
|
||||
offset=".84663"
|
||||
stop-color="#25af64" />
|
||||
<stop
|
||||
id="stop23-4"
|
||||
offset="1"
|
||||
stop-color="#25af64" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
gradientUnits="userSpaceOnUse"
|
||||
gradientTransform="matrix(14.322136,11.189559,-11.189559,14.322136,103.39117,-43.22521)"
|
||||
x2="1"
|
||||
id="f">
|
||||
<stop
|
||||
id="stop12"
|
||||
offset="0"
|
||||
stop-color="#0c542a" />
|
||||
<stop
|
||||
id="stop14"
|
||||
offset=".21472"
|
||||
stop-color="#0c542a" />
|
||||
<stop
|
||||
id="stop16"
|
||||
offset=".57995"
|
||||
stop-color="#25af64" />
|
||||
<stop
|
||||
id="stop18"
|
||||
offset=".84663"
|
||||
stop-color="#25af64" />
|
||||
<stop
|
||||
id="stop20"
|
||||
offset="1"
|
||||
stop-color="#25af64" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="0.7"
|
||||
inkscape:cx="239.76779"
|
||||
inkscape:cy="266.99956"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="false"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1012"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="759"
|
||||
inkscape:window-maximized="1"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
units="px" />
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title />
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(31.749994,-15.785728)">
|
||||
<flowRoot
|
||||
xml:space="preserve"
|
||||
id="flowRoot308"
|
||||
style="font-style:normal;font-weight:normal;font-size:37.33333206px;line-height:1.25;font-family:sans-serif;text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;fill:#000000;fill-opacity:1;stroke:none"
|
||||
transform="matrix(0.26458333,0,0,0.26458333,-34.517006,20.683034)"><flowRegion
|
||||
id="flowRegion310"
|
||||
style="font-size:37.33333206px;text-align:center;text-anchor:middle"><rect
|
||||
id="rect312"
|
||||
width="1031.3657"
|
||||
height="101.01524"
|
||||
x="135.36044"
|
||||
y="417.76645"
|
||||
style="font-size:37.33333206px;text-align:center;text-anchor:middle" /></flowRegion><flowPara
|
||||
style="font-size:32px;text-align:center;text-anchor:middle"
|
||||
id="flowPara903">Config files used to define parameters specific to</flowPara><flowPara
|
||||
style="font-size:32px;text-align:center;text-anchor:middle"
|
||||
id="flowPara905">compute environments at different Institutions</flowPara></flowRoot> <g
|
||||
id="g603"
|
||||
transform="matrix(0.44611981,0,0,0.44611981,44.334855,81.689003)">
|
||||
<flowRoot
|
||||
xml:space="preserve"
|
||||
id="flowRoot1021"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:40px;line-height:1.25;font-family:'Maven Pro';-inkscape-font-specification:'Maven Pro Bold';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none"
|
||||
transform="matrix(1.9231376,0,0,1.9231376,-514.12361,-525.99533)"><flowRegion
|
||||
id="flowRegion1023"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Maven Pro';-inkscape-font-specification:'Maven Pro Bold'"><rect
|
||||
id="rect1025"
|
||||
width="275.99985"
|
||||
height="102.85306"
|
||||
x="274.76151"
|
||||
y="267.25372"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Maven Pro';-inkscape-font-specification:'Maven Pro Bold'" /></flowRegion><flowPara
|
||||
id="flowPara1027"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Maven Pro';-inkscape-font-specification:'Maven Pro Bold'">configs</flowPara><flowPara
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-family:'Maven Pro';-inkscape-font-specification:'Maven Pro Bold'"
|
||||
id="flowPara982" /></flowRoot> </g>
|
||||
<g
|
||||
id="g551"
|
||||
transform="matrix(0.44611981,0,0,0.44611981,44.677261,81.689003)">
|
||||
<path
|
||||
style="fill:#24af63;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path24"
|
||||
d="m 401.03289,-44.148517 v 1.35913 c -0.0378,0 -0.0378,0 -0.0755,0.03775 l -0.67956,-0.566304 c -1.51015,-1.283623 -3.13355,-2.416231 -4.94572,-3.322317 -0.30203,-0.151014 -0.60406,-0.339782 -0.94384,-0.415289 -0.0378,-0.03775 -0.0755,-0.07551 -0.11326,-0.07551 -0.67957,-0.264275 -1.35913,-0.490796 -2.07645,-0.679564 -1.5479,-0.339783 -3.0958,-0.377536 -4.64369,-0.07551 -2.00094,0.41529 -3.77536,1.283623 -5.47428,2.416231 -1.66115,1.132607 -3.1713,2.453983 -4.56818,3.850866 -0.26428,0.264276 -0.26428,0.264276 -0.41529,-0.07551 -0.75507,-1.547897 -1.58565,-3.058041 -2.605,-4.454924 -0.79282,-1.057101 -1.66116,-2.038694 -2.79376,-2.718259 -1.13261,-0.717318 -2.37848,-0.981594 -3.69986,-0.641811 -1.6234,0.377536 -2.94478,1.359129 -4.19065,2.41623 -0.75507,0.566304 -1.43463,1.245869 -2.1142,1.88768 -0.64181,0.566304 -1.24587,1.132608 -1.88768,1.698912 -0.11326,0.11326 -0.18877,0.11326 -0.30203,0 -0.64181,-0.679565 -1.32137,-1.283623 -2.1142,-1.698912 -1.17036,-0.641811 -2.37847,-0.717319 -3.62434,-0.302029 -1.0571,0.339782 -1.96319,0.906086 -2.90703,1.510144 -0.37754,0.226521 -0.71732,0.490797 -1.09485,0.679565 v -0.07551 c 0.0378,-0.07551 0.0378,-0.151014 0.0378,-0.226521 0.0755,-1.661158 0.18877,-3.36007 0.45304,-5.021228 0.37754,-2.30297 0.94384,-4.530432 1.96319,-6.60688 0.75507,-1.547897 1.77442,-2.982534 3.05804,-4.152895 1.69891,-1.547898 3.69985,-2.529491 5.88956,-3.133549 2.37848,-0.679565 4.79471,-0.981593 7.24869,-1.094854 0.9816,-0.03775 1.92544,-0.07551 2.90703,-0.113261 0.4908,0.188768 0.98159,0.302029 1.47239,0.453043 0.71732,0.226522 1.43464,0.453043 2.15195,0.641811 0.37754,0.151015 0.71732,0.264276 1.09486,0.41529 1.39688,0.490797 2.75601,1.132608 3.92637,2.076448 0.30203,0.226521 0.60406,0.490796 0.90609,0.755072 -0.0755,-0.226522 -0.15102,-0.41529 -0.22652,-0.641811 -0.52855,-1.396884 -1.32138,-2.529491 -2.605,-3.322317 -0.52855,-0.339782 -1.09485,-0.566304 -1.66116,-0.868333 0.0378,0 0.11326,0 0.15102,-0.03775 1.69891,-0.302029 3.43557,-0.453043 5.17224,-0.528551 1.47239,-0.07551 2.98253,-0.03775 4.45492,0.113261 1.73667,0.188768 3.43558,0.528551 5.05898,1.132608 2.41623,0.906086 4.49268,2.302969 6.11609,4.341664 1.39688,1.774419 2.30297,3.813113 2.86927,6.002821 0.5663,2.151955 0.79283,4.379418 0.86833,6.60688 -0.0378,0.792825 -0.0378,1.623404 -0.0378,2.453983 z"
|
||||
class="st0" />
|
||||
<path
|
||||
style="fill:#ecdc86;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path26-8"
|
||||
d="m 401.03289,-46.640254 h 0.0378 v 2.491737 h -0.0378 z"
|
||||
class="st4" />
|
||||
<path
|
||||
style="fill:#a0918f;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path28"
|
||||
d="m 387.25283,-82.468416 v 0.03775 h -1.69891 v -0.03775 z"
|
||||
class="st5" />
|
||||
<path
|
||||
style="fill:#24af63;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path30"
|
||||
d="m 388.34768,-11.340642 c 0.94384,-0.377536 1.84993,-0.868332 2.75601,-1.359129 1.0571,-0.641811 2.0387,-1.321376 3.02029,-2.038694 0.83058,-0.604058 1.62341,-1.245869 2.41623,-1.925434 l 1.35913,-1.132608 c 0.0755,-0.07551 0.0755,-0.03775 0.11326,0.03775 0.15102,0.604058 0.26428,1.245869 0.41529,1.88768 0.18877,0.868333 0.30203,1.736665 0.41529,2.604998 0.11326,1.019347 0.15102,2.000941 0.0755,3.020288 -0.0755,1.2458683 -0.30203,2.491737 -0.75507,3.6620985 -0.26428,0.7173183 -0.60406,1.396883 -1.01935,2.0386942 -0.52855,0.8305791 -1.13261,1.585651 -1.88768,2.2274621 -0.86833,0.7928255 -1.84993,1.47239026 -2.90703,2.0386942 -1.39688,0.79282552 -2.90702,1.3591295 -4.45492,1.8121726 -1.47239,0.4530432 -2.98254,0.7550719 -4.49268,1.0193471 -0.41529,0.075507 -0.83058,0.1510144 -1.24587,0.1510144 -0.86833,-0.037754 -1.73666,-0.2265216 -2.56724,-0.5285504 -1.24587,-0.4907967 -2.34073,-1.24586863 -3.58659,-1.81217257 -0.67957,-0.30202876 -1.35913,-0.52855034 -2.07645,-0.56630394 -0.94384,-0.0755072 -1.73667,0.26427518 -2.41623,0.90608631 l -1.47239,1.4723902 c -0.83058,0.8305792 -1.81218,1.3968831 -2.98254,1.5856511 -0.79282,0.1132608 -1.54789,0.075507 -2.34072,-0.037754 -1.09485,-0.1510144 -2.15195,-0.4152896 -3.1713,-0.7173183 -1.51015,-0.4907968 -2.90703,-1.1703615 -4.19065,-2.15195502 -1.20812,-0.9060863 -2.15196,-2.03869418 -2.94478,-3.32231648 -0.79283,-1.3213758 -1.24587,-2.7182589 -1.58565,-4.1906491 -0.15102,-0.6418112 -0.22652,-1.3213759 -0.30203,-1.963187 -0.11326,-0.9438399 -0.15102,-1.8499259 -0.11326,-2.7937659 0.0378,-1.321376 0.15101,-2.604998 0.33978,-3.926374 0.15101,0.151015 0.30203,0.264275 0.41529,0.377536 0.83058,0.755072 1.77442,1.434637 2.75601,2.038694 1.09486,0.641811 2.30297,1.170362 3.54884,1.510144 0.83058,0.226522 1.69891,0.377536 2.56724,0.41529 0.71732,0.07551 1.43464,0.07551 2.15196,0.03775 0.83058,-0.03775 1.69891,-0.11326 2.52949,-0.339782 0.11326,0 0.22652,0 0.30203,-0.03775 0.75507,-0.151014 1.51014,-0.339782 2.26522,-0.566304 0.86833,-0.264275 1.69891,-0.52855 2.52949,-0.830579 0.79282,-0.302028 1.6234,-0.641811 2.41623,-0.981593 0.22652,-0.113261 0.37753,-0.07551 0.5663,0.07551 1.32138,1.019347 2.75601,1.88768 4.30391,2.529491 1.73667,0.679565 3.51108,1.019347 5.36101,0.868333 1.35913,-0.264276 2.64275,-0.604058 3.88862,-1.094855 z"
|
||||
class="st0" />
|
||||
<path
|
||||
style="fill:#ecdc86;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path32-3"
|
||||
d="m 388.34768,-11.340642 c -1.24587,0.490797 -2.52949,0.830579 -3.85087,0.94384 -1.84992,0.151015 -3.62434,-0.188768 -5.36101,-0.868332 -1.54789,-0.604058 -2.98253,-1.472391 -4.30391,-2.529491 -0.18876,-0.151015 -0.33978,-0.188768 -0.5663,-0.07551 -0.79283,0.339783 -1.58565,0.679565 -2.41623,0.981594 -0.83058,0.302029 -1.66116,0.604057 -2.52949,0.830579 -0.75507,0.226522 -1.51014,0.377536 -2.26522,0.566304 -0.11326,0.03775 -0.22652,0.03775 -0.30203,0.03775 0.26428,-0.302029 0.52855,-0.604058 0.79283,-0.906087 1.0571,-1.208115 1.81217,-2.604998 2.22746,-4.190649 0.60406,-2.114201 1.20812,-4.266156 1.73667,-6.418111 0.37753,-1.585651 0.67956,-3.171302 0.90608,-4.794707 0.15102,-1.170361 0.37754,-5.625286 0.30203,-6.682386 -0.18877,-3.24681 -0.90608,-6.342605 -2.22746,-9.325139 -0.79283,-1.774419 -2.15195,-2.982534 -4.03963,-3.47333 -0.83058,-0.226522 -1.66116,-0.151015 -2.45399,0.11326 -0.0755,0.03775 -0.11326,0.07551 -0.18877,0.03775 1.24587,-1.057101 2.52949,-2.038694 4.19065,-2.41623 1.32138,-0.302029 2.56725,-0.07551 3.69986,0.641811 1.1326,0.717318 2.00094,1.661158 2.79376,2.718259 1.01935,1.396883 1.84993,2.907027 2.605,4.454924 0.15101,0.339783 0.15101,0.339783 0.41529,0.07551 1.39688,-1.434636 2.90703,-2.756012 4.56818,-3.850866 1.66116,-1.132608 3.47334,-2.000941 5.47428,-2.416231 1.54789,-0.302028 3.09579,-0.264275 4.64369,0.07551 0.71732,0.151014 1.39688,0.377536 2.07645,0.679564 0.0378,0.03775 0.11326,0.03775 0.11326,0.07551 -2.00094,0.03775 -3.69986,0.792825 -5.24775,1.963187 -0.75508,0.566303 -1.43464,1.208115 -1.96319,2.00094 -0.41529,0.641811 -0.79283,1.35913 -1.09485,2.076448 -0.67957,1.434637 -1.24587,2.94478 -1.66116,4.492678 -0.33978,1.321376 -0.56631,2.680505 -0.67957,4.039635 -0.0755,1.0571 -0.11326,2.114201 -0.0755,3.171302 0.0377,1.283622 0.18876,2.567244 0.33978,3.88862 0.26427,2.151955 0.64181,4.30391 1.01934,6.455865 0.18877,1.170361 0.33979,2.378477 0.56631,3.586592 0.26427,1.736665 1.24587,3.020287 2.64275,4.001881 0,-0.03775 0.0755,0 0.11326,0.03775 z"
|
||||
class="st4" />
|
||||
<path
|
||||
style="fill:#3f2b29;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path34-7"
|
||||
d="m 385.55392,-82.430663 h 1.69891 c 1.35913,0.07551 2.71826,0.302029 4.00188,0.755072 1.01935,0.339782 1.24587,1.019347 0.64181,1.88768 -0.41529,0.604057 -1.01935,1.057101 -1.66116,1.47239 -0.79282,0.52855 -1.66116,0.981594 -2.605,1.321376 -0.94384,0.377536 -1.84992,0 -2.45398,-0.94384 -0.18877,-0.302029 -0.33978,-0.604058 -0.41529,-0.94384 -0.0378,-0.113261 -0.0755,-0.151014 -0.18877,-0.151014 -2.1142,-0.377536 -4.00188,0.113261 -5.54978,1.623404 -1.28362,1.208115 -2.03869,2.756013 -2.56724,4.417171 -0.4908,1.510144 -0.71732,3.020288 -0.79283,4.605939 -0.0755,1.396883 0.0378,2.793766 0.22652,4.152895 0.0378,0.226522 0.11327,0.453044 0.11327,0.717319 0.0378,0.302028 -0.0755,0.566304 -0.30203,0.717318 -0.26428,0.188768 -0.56631,0.151014 -0.86834,0.151014 -0.71731,-0.226521 -1.43463,-0.453043 -2.15195,-0.641811 v -0.490796 c 0,-0.755072 0,-1.472391 0.0378,-2.227463 0.15102,-2.907027 0.60406,-5.7763 1.73667,-8.494559 0.83058,-2.038694 2.03869,-3.813113 3.7376,-5.209996 1.39689,-1.170361 2.98254,-1.925433 4.75696,-2.340723 0.90608,-0.226522 1.73666,-0.339782 2.605,-0.377536 z"
|
||||
class="st6" />
|
||||
<path
|
||||
style="fill:#396e35;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path36"
|
||||
d="m 374.86965,-62.685532 c 0.30203,0 0.60406,0.03775 0.86833,-0.151015 0.26428,-0.188768 0.33978,-0.453043 0.30203,-0.717318 -0.0378,-0.226521 -0.0755,-0.490797 -0.11326,-0.717318 0.15101,0 0.26427,-0.03775 0.41529,-0.03775 0.52855,0.302029 1.09485,0.566304 1.66116,0.868333 1.28362,0.792825 2.07644,1.925433 2.60499,3.322316 0.0755,0.226522 0.15102,0.41529 0.22653,0.641811 -0.30203,-0.264275 -0.60406,-0.52855 -0.90609,-0.755071 -1.20812,-0.906087 -2.52949,-1.547898 -3.92637,-2.076448 -0.41529,-0.113261 -0.75508,-0.226522 -1.13261,-0.377536 z"
|
||||
class="st7" />
|
||||
<path
|
||||
style="fill:#396e35;stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path38-9"
|
||||
d="m 372.71769,-63.81814 v 0.490797 c -0.49079,-0.151015 -0.98159,-0.264275 -1.47239,-0.453043 0.52855,-0.03775 1.01935,-0.03775 1.47239,-0.03775 z"
|
||||
class="st7" />
|
||||
</g>
|
||||
<g
|
||||
id="g596"
|
||||
transform="matrix(0.44611981,0,0,0.44611981,44.677261,81.689003)">
|
||||
<path
|
||||
d="m 150.58729,-13.861192 q -5.8632,0 -10.61714,-2.29774 -4.75394,-2.29774 -7.60631,-6.89322 -2.77314,-4.674713 -2.77314,-11.330235 0,-10.696376 5.70474,-16.163413 5.70473,-5.546269 15.21262,-5.546269 3.32776,0 6.73476,0.713092 3.40699,0.633859 6.02166,1.822345 v 10.141749 q -3.24853,-1.426183 -5.78397,-2.139275 -2.53543,-0.792324 -5.07087,-0.792324 -5.22934,0 -8.16094,2.61467 -2.9316,2.535437 -2.9316,8.002474 0,6.100896 2.6939,9.032495 2.77314,2.931599 8.95327,2.931599 4.67471,0 10.37944,-3.169296 v 10.062516 q -2.85236,1.505416 -5.94243,2.218508 -3.01083,0.792324 -6.81399,0.792324 z"
|
||||
style="font-weight:bold;font-size:medium;line-height:0%;font-family:'Maven Pro';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path569"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 189.64516,-13.702727 q -10.14175,0 -15.21262,-5.387804 -5.07088,-5.387805 -5.07088,-16.004948 0,-11.092538 4.99164,-16.163413 5.07088,-5.070874 15.29186,-5.070874 10.22098,0 15.37109,5.229339 5.15011,5.150107 5.15011,16.004948 0,10.458679 -5.30858,15.925715 -5.30857,5.467037 -15.21262,5.467037 z m 0,-9.428657 q 4.35778,0 6.57629,-3.010832 2.21851,-3.090064 2.21851,-8.953263 0,-6.417826 -2.21851,-9.111728 -2.13927,-2.773134 -6.57629,-2.773134 -4.59548,0 -6.65552,2.773134 -1.98081,2.693902 -1.98081,9.111728 0,6.100896 2.06004,9.032495 2.13927,2.9316 6.57629,2.9316 z"
|
||||
style="font-weight:bold;font-size:medium;line-height:0%;font-family:'Maven Pro';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path571"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 218.31492,-50.783497 q 1.18848,-1.030021 4.12008,-2.29774 3.01084,-1.267719 6.65553,-2.139275 3.72392,-0.950789 7.13091,-0.950789 6.57629,0 9.11173,1.98081 v 8.636333 q -3.1693,-0.713091 -9.11173,-0.713091 -3.72392,0 -6.10089,0.396162 v 30.979874 h -11.80563 z"
|
||||
style="font-weight:bold;font-size:medium;line-height:0%;font-family:'Maven Pro';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path573"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 268.56064,-56.329766 q 18.77809,0 18.54039,24.482816 H 260.6374 q 1.50542,8.319403 11.17177,8.319403 2.9316,0 5.70474,-0.792324 2.85236,-0.871556 6.02166,-2.218507 v 10.141749 q -6.49706,2.693902 -14.57876,2.693902 -6.1009,0 -10.69638,-2.456205 -4.59548,-2.535437 -7.13092,-7.21015 -2.53543,-4.674712 -2.53543,-11.013305 0,-10.458679 5.1501,-16.163413 5.22934,-5.783966 14.81646,-5.783966 z m -0.47539,9.032495 q -6.33859,0 -7.52708,7.923242 h 14.81646 q -0.55462,-4.278551 -2.29774,-6.100896 -1.66388,-1.822346 -4.99164,-1.822346 z"
|
||||
style="font-weight:bold;font-size:medium;line-height:0%;font-family:'Maven Pro';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path575"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 318.12052,-74.790919 h 8.47787 l -29.15753,67.6644822 h -8.39863 z"
|
||||
style="font-weight:bold;font-size:medium;line-height:0%;font-family:'Maven Pro';fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path577"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
<g
|
||||
id="g589"
|
||||
transform="matrix(0.44611981,0,0,0.44611981,44.677261,81.689003)">
|
||||
<path
|
||||
d="m 15.436598,-51.575821 q 3.090064,-1.663881 9.428657,-3.090064 6.338593,-1.426184 11.330235,-1.426184 8.477868,0 12.756419,3.327762 4.27855,3.327761 4.27855,10.934073 v 26.939021 h -11.80563 v -24.562049 q 0,-6.89322 -7.764776,-6.89322 -1.743113,0 -3.565459,0.396162 -1.822345,0.396162 -2.852367,0.950789 v 30.108318 H 15.436598 Z"
|
||||
style="font-weight:bold;font-size:79.23241425px;line-height:0%;font-family:'Maven Pro';fill:#24af63;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path559"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 66.729683,-45.95032 h -5.863199 v -9.032495 h 5.942431 v -0.950789 q 0.07923,-9.032495 4.120086,-12.518721 4.120085,-3.565459 11.17177,-3.565459 1.267719,0 2.773134,0.237697 1.505416,0.237697 2.456205,0.554627 v 10.379446 q -0.871556,-0.871556 -1.743113,-1.267718 -0.871556,-0.475395 -2.218507,-0.475395 -2.218508,0 -3.486227,1.188486 -1.267718,1.188487 -1.267718,4.040854 v 2.376972 h 8.715565 v 9.032495 h -8.794798 v 31.059107 H 66.729683 Z"
|
||||
style="font-weight:bold;font-size:79.23241425px;line-height:0%;font-family:'Maven Pro';fill:#24af63;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path561"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 96.008535,-43.256418 h 24.165885 v 8.160939 H 96.008535 Z"
|
||||
style="font-weight:bold;font-size:79.23241425px;line-height:0%;font-family:'Maven Pro';fill:#24af63;fill-opacity:1;stroke:none;stroke-width:0.37753597"
|
||||
id="path563"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
style="display:inline;fill:url(#f);stroke-width:0.37753597"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path67"
|
||||
d="m 105.17203,-43.255454 -8.129199,8.160957 h 23.064239 v -8.160957 z" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 26 KiB |
|
@ -10,6 +10,7 @@ import os
|
|||
import sys
|
||||
import argparse
|
||||
import re
|
||||
import yaml
|
||||
|
||||
############################################
|
||||
############################################
|
||||
|
@ -51,19 +52,22 @@ def check_config(Config, Github):
|
|||
### Ignore these profiles
|
||||
ignore_me = ['czbiohub_aws']
|
||||
tests.update(ignore_me)
|
||||
with open(Github, 'r') as ghfile:
|
||||
for line in ghfile:
|
||||
if re.search('profile: ', line):
|
||||
line = line.replace('\'','').replace('[','').replace(']','').replace('\n','')
|
||||
profiles = line.split(':')[1].split(',')
|
||||
for p in profiles:
|
||||
tests.add(p.strip())
|
||||
# parse yaml GitHub actions file
|
||||
try:
|
||||
with open(Github, 'r') as ghfile:
|
||||
wf = yaml.safe_load(ghfile)
|
||||
profile_list = wf["jobs"]["profile_test"]["strategy"]["matrix"]["profile"]
|
||||
except Exception as e:
|
||||
print("Could not parse yaml file: {}, {}".format(Github, e))
|
||||
sys.exit(1)
|
||||
# Add profiles to test
|
||||
for profile in profile_list:
|
||||
tests.add(profile.strip())
|
||||
|
||||
###Check if sets are equal
|
||||
if tests == config_profiles:
|
||||
sys.exit(0)
|
||||
else:
|
||||
#Maybe report what is missing here too
|
||||
try:
|
||||
assert tests == config_profiles
|
||||
except (AssertionError):
|
||||
print("Tests don't seem to test these profiles properly. Please check whether you added the profile to the Github Actions testing YAML.\n")
|
||||
print(config_profiles.symmetric_difference(tests))
|
||||
sys.exit(1)
|
||||
|
|
21
conf/aws_tower.config
Normal file
21
conf/aws_tower.config
Normal file
|
@ -0,0 +1,21 @@
|
|||
//Nextflow config file for running on AWS batch
|
||||
params {
|
||||
config_profile_description = 'AWS Batch with Tower Profile'
|
||||
config_profile_contact = 'Gisela Gabernet (@ggabernet)'
|
||||
config_profile_url = 'https://aws.amazon.com/batch/'
|
||||
}
|
||||
|
||||
timeline {
|
||||
overwrite = true
|
||||
}
|
||||
report {
|
||||
overwrite = true
|
||||
}
|
||||
trace {
|
||||
overwrite = true
|
||||
}
|
||||
dag {
|
||||
overwrite = true
|
||||
}
|
||||
|
||||
process.executor = 'awsbatch'
|
|
@ -12,7 +12,7 @@ env {
|
|||
process {
|
||||
executor = 'slurm'
|
||||
queue = { task.memory <= 1536.GB ? (task.time > 2.d || task.memory > 384.GB ? 'biohpc_gen_production' : 'biohpc_gen_normal') : 'biohpc_gen_highmem' }
|
||||
beforeScript = 'module use /dss/dssfs01/pr53da/pr53da-dss-0000/spack/modules/x86_avx2/linux*'
|
||||
beforeScript = 'module use /dss/dsslegfs02/pn73se/pn73se-dss-0000/spack/modules/x86_avx2/linux*'
|
||||
module = 'charliecloud/0.22:miniconda3'
|
||||
}
|
||||
|
||||
|
|
18
conf/cambridge.config
Normal file
18
conf/cambridge.config
Normal file
|
@ -0,0 +1,18 @@
|
|||
params {
|
||||
config_profile_description = 'Cambridge HPC cluster profile.'
|
||||
config_profile_contact = 'Andries van Tonder (ajv37@cam.ac.uk)'
|
||||
config_profile_url = "https://docs.hpc.cam.ac.uk/hpc"
|
||||
}
|
||||
singularity {
|
||||
enabled = true
|
||||
autoMounts = true
|
||||
}
|
||||
process {
|
||||
executor = 'slurm'
|
||||
clusterOptions = '-p cclake'
|
||||
}
|
||||
params {
|
||||
max_memory = 192.GB
|
||||
max_cpus = 56
|
||||
max_time = 12.h
|
||||
}
|
|
@ -11,7 +11,6 @@ singularity {
|
|||
}
|
||||
|
||||
process {
|
||||
beforeScript = 'module load devel/singularity/3.4.2'
|
||||
executor = 'slurm'
|
||||
queue = { task.memory > 60.GB || task.cpus > 20 ? 'qbic' : 'compute' }
|
||||
scratch = 'true'
|
||||
|
|
|
@ -10,7 +10,6 @@ singularity {
|
|||
}
|
||||
|
||||
process {
|
||||
beforeScript = 'module load devel/singularity/3.4.2'
|
||||
executor = 'slurm'
|
||||
queue = { task.memory > 60.GB || task.cpus > 20 ? 'qbic' : 'compute' }
|
||||
scratch = 'true'
|
||||
|
|
|
@ -17,7 +17,7 @@ process {
|
|||
params {
|
||||
max_memory = 224.GB
|
||||
max_cpus = 32
|
||||
max_time = 72.h
|
||||
max_time = '72.h'
|
||||
|
||||
igenomes_base = '/camp/svc/reference/Genomics/aws-igenomes'
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ process {
|
|||
workDir = "s3://czb-nextflow/intermediates/"
|
||||
|
||||
aws.region = 'us-west-2'
|
||||
executor.awscli = '/home/ec2-user/miniconda/bin/aws'
|
||||
aws.batch.cliPath = '/home/ec2-user/miniconda/bin/aws'
|
||||
params.tracedir = './'
|
||||
|
||||
params {
|
||||
|
|
|
@ -31,7 +31,7 @@ process {
|
|||
params {
|
||||
saveReference = true
|
||||
// iGenomes reference base
|
||||
igenomes_base = '/exports/igmm/eddie/NextGenResources/igenomes'
|
||||
igenomes_base = '/exports/igmm/eddie/BioinformaticsResources/igenomes'
|
||||
max_memory = 384.GB
|
||||
max_cpus = 32
|
||||
max_time = 240.h
|
||||
|
@ -42,8 +42,8 @@ env {
|
|||
}
|
||||
|
||||
singularity {
|
||||
envWhitelist = "SINGULARITY_TMPDIR"
|
||||
runOptions = '-p'
|
||||
envWhitelist = "SINGULARITY_TMPDIR,TMPDIR"
|
||||
runOptions = '-p -B "$TMPDIR"'
|
||||
enabled = true
|
||||
autoMounts = true
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ process {
|
|||
clusterOptions = { "-A $params.project ${params.clusterOptions ?: ''}" }
|
||||
|
||||
/* The Hebbe scheduler fails if you try to request an amount of memory for a job */
|
||||
withName: '*' { memory = null }
|
||||
withName: '.*' { memory = null }
|
||||
}
|
||||
|
||||
params {
|
||||
|
|
|
@ -27,7 +27,7 @@ executor {
|
|||
singularity {
|
||||
enabled = true
|
||||
autoMounts = true
|
||||
runOptions = "-B /rds/,/rdsgpfs/,/rds/general/user/$USER/ephemeral/tmp/:/tmp,/rds/general/user/$USER/ephemeral/tmp/:/var/tmp"
|
||||
runOptions = "-B /rds/,/rds/general/user/$USER/ephemeral/tmp/:/tmp,/rds/general/user/$USER/ephemeral/tmp/:/var/tmp"
|
||||
}
|
||||
|
||||
process {
|
||||
|
|
|
@ -19,7 +19,7 @@ singularity{
|
|||
cacheDir = params.singularity_cache_dir
|
||||
}
|
||||
params {
|
||||
max_memory = 768.GB
|
||||
max_cpus = 70
|
||||
max_memory = 320.GB
|
||||
max_cpus = 32
|
||||
max_time = 336.h
|
||||
}
|
||||
|
|
|
@ -1,24 +1,28 @@
|
|||
params {
|
||||
config_profile_description = 'MPCDF HPC profiles (unoffically) provided by nf-core/configs.'
|
||||
config_profile_contact = 'James Fellows Yates (@jfy133)'
|
||||
config_profile_url = 'https://www.mpcdf.mpg.de/services/computing'
|
||||
config_profile_url = 'https://www.mpcdf.mpg.de/services/supercomputing'
|
||||
}
|
||||
|
||||
|
||||
profiles {
|
||||
cobra {
|
||||
// Does not have singularity! Conda module must be used, but it is
|
||||
// recommended to set NXF_CONDA_CACHEDIR var in ~/.bash{_profile,rc}
|
||||
// To create common cache dir
|
||||
|
||||
process {
|
||||
beforeScript = 'module load anaconda/3/2020.02'
|
||||
beforeScript = 'module load singularity'
|
||||
executor = 'slurm'
|
||||
}
|
||||
|
||||
executor {
|
||||
queueSize = 8
|
||||
}
|
||||
|
||||
// Set $NXF_SINGULARITY_CACHEDIR in your ~/.bash_profile
|
||||
// to stop downloading the same image for every run
|
||||
singularity {
|
||||
enabled = true
|
||||
autoMounts = true
|
||||
}
|
||||
|
||||
params {
|
||||
config_profile_description = 'MPCDF cobra profile (unofficially) provided by nf-core/configs.'
|
||||
|
@ -28,18 +32,22 @@ profiles {
|
|||
}
|
||||
}
|
||||
raven {
|
||||
// Does not have singularity! Conda module must be used, but it is
|
||||
// recommended to set NXF_CONDA_CACHEDIR var in ~/.bash{_profile,rc}
|
||||
// to create common cache dir
|
||||
|
||||
process {
|
||||
beforeScript = 'module load anaconda/3/2020.02'
|
||||
beforeScript = 'module load singularity'
|
||||
executor = 'slurm'
|
||||
}
|
||||
|
||||
executor {
|
||||
queueSize = 8
|
||||
}
|
||||
|
||||
// Set $NXF_SINGULARITY_CACHEDIR in your ~/.bash_profile
|
||||
// to stop downloading the same image for every run
|
||||
singularity {
|
||||
enabled = true
|
||||
autoMounts = true
|
||||
}
|
||||
|
||||
params {
|
||||
config_profile_description = 'MPCDF raven profile (unofficially) provided by nf-core/configs.'
|
||||
|
@ -47,5 +55,8 @@ profiles {
|
|||
max_cpus = 192
|
||||
max_time = 24.h
|
||||
}
|
||||
}
|
||||
debug {
|
||||
cleanup = false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
params {
|
||||
// Specific nf-core/configs params
|
||||
config_profile_contact = 'Szilveszter Juhos (@szilvajuhos)'
|
||||
config_profile_description = 'MUNIN profile provided by nf-core/configs.'
|
||||
config_profile_contact = 'Maxime Garcia (@maxulysse)'
|
||||
config_profile_description = 'MUNIN profile provided by nf-core/configs'
|
||||
config_profile_url = 'https://ki.se/forskning/barntumorbanken'
|
||||
|
||||
// Local AWS iGenomes reference file paths on munin
|
||||
|
@ -26,8 +26,12 @@ singularity {
|
|||
}
|
||||
|
||||
// To use docker, use nextflow run -profile munin,docker
|
||||
docker {
|
||||
enabled = false
|
||||
mountFlags = 'z'
|
||||
fixOwnership = true
|
||||
profiles {
|
||||
docker {
|
||||
docker {
|
||||
enabled = false
|
||||
mountFlags = 'z'
|
||||
fixOwnership = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ process {
|
|||
// Fixes for SGE and Java incompatibility due to Java using more memory than you tell it to use
|
||||
|
||||
withName: makeSeqDict {
|
||||
clusterOptions = { "-S /bin/bash -v JAVA_OPTS='-XX:ParallelGCThreads=1' -l h_vmem=${(task.memory.toGiga() + 3)}G,virtual_free=${(task.memory.toGiga() + 3)}G" }
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 2)}G,virtual_free=${(task.memory.toGiga() * 2)}G" }
|
||||
}
|
||||
|
||||
withName: fastqc {
|
||||
|
@ -69,7 +69,7 @@ process {
|
|||
}
|
||||
|
||||
withName: adapter_removal {
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 2)}G,virtual_free=${(task.memory.toGiga() * 2)}G" }
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 4)}G,virtual_free=${(task.memory.toGiga() * 4)}G" }
|
||||
}
|
||||
|
||||
withName: dedup {
|
||||
|
@ -79,6 +79,10 @@ process {
|
|||
withName: markduplicates {
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() + 6)}G,virtual_free=${(task.memory.toGiga() + 6)}G" }
|
||||
}
|
||||
|
||||
withName: library_merge {
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() + 6)}G,virtual_free=${(task.memory.toGiga() + 6)}G" }
|
||||
}
|
||||
|
||||
withName: malt {
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 2)}G,virtual_free=${(task.memory.toGiga() * 2)}G" }
|
||||
|
@ -93,7 +97,7 @@ process {
|
|||
}
|
||||
|
||||
withName: mtnucratio {
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 2)}G,virtual_free=${(task.memory.toGiga() * 2)}G" }
|
||||
clusterOptions = { "-S /bin/bash -l h_vmem=${(task.memory.toGiga() * 4)}G,virtual_free=${(task.memory.toGiga() * 4)}G" }
|
||||
}
|
||||
|
||||
withName: vcf2genome {
|
||||
|
@ -212,4 +216,4 @@ profiles {
|
|||
bwaalnl = 16500
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ params {
|
|||
// Specific nf-core/eager process configuration
|
||||
process {
|
||||
|
||||
maxRetries = 2
|
||||
maxRetries = 3
|
||||
|
||||
withName: malt {
|
||||
maxRetries = 1
|
||||
|
@ -106,43 +106,43 @@ profiles {
|
|||
withLabel:'sc_tiny'{
|
||||
cpus = { check_max( 1, 'cpus' ) }
|
||||
memory = { check_max( 2.GB * task.attempt, 'memory' ) }
|
||||
time = { task.attempt == 3 ? 1440.h : task.attempt == 2 ? 48.h : 2.h }
|
||||
time = { task.attempt == 3 ? 1440.h : 48.h }
|
||||
}
|
||||
|
||||
withLabel:'sc_small'{
|
||||
cpus = { check_max( 1, 'cpus' ) }
|
||||
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||
time = { task.attempt == 3 ? 1440.h : task.attempt == 2 ? 48.h : 2.h }
|
||||
time = { task.attempt == 3 ? 1440.h : 48.h }
|
||||
}
|
||||
|
||||
withLabel:'sc_medium'{
|
||||
cpus = { check_max( 1, 'cpus' ) }
|
||||
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||
time = { task.attempt == 3 ? 1440.h : task.attempt == 2 ? 48.h : 2.h }
|
||||
time = { task.attempt == 3 ? 1440.h : 48.h }
|
||||
}
|
||||
|
||||
withLabel:'mc_small'{
|
||||
cpus = { check_max( 2, 'cpus' ) }
|
||||
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
|
||||
time = { task.attempt == 3 ? 1440.h : task.attempt == 2 ? 48.h : 2.h }
|
||||
time = { task.attempt == 3 ? 1440.h : 48.h }
|
||||
}
|
||||
|
||||
withLabel:'mc_medium' {
|
||||
cpus = { check_max( 4, 'cpus' ) }
|
||||
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
|
||||
time = { task.attempt == 3 ? 1440.h : task.attempt == 2 ? 48.h : 2.h }
|
||||
time = { task.attempt == 3 ? 1440.h : 48.h }
|
||||
}
|
||||
|
||||
withLabel:'mc_large'{
|
||||
cpus = { check_max( 8, 'cpus' ) }
|
||||
memory = { check_max( 32.GB * task.attempt, 'memory' ) }
|
||||
time = { task.attempt == 3 ? 1440.h : task.attempt == 2 ? 48.h : 2.h }
|
||||
time = { task.attempt == 3 ? 1440.h : 48.h }
|
||||
}
|
||||
|
||||
withLabel:'mc_huge'{
|
||||
cpus = { check_max( 32, 'cpus' ) }
|
||||
memory = { check_max( 512.GB * task.attempt, 'memory' ) }
|
||||
time = { task.attempt == 3 ? 1440.h : task.attempt == 2 ? 48.h : 2.h }
|
||||
time = { task.attempt == 3 ? 1440.h : 48.h }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ profiles {
|
|||
params {
|
||||
config_profile_description = 'Rough HOPS screening MPI-SHH profile, provided by nf-core/configs.'
|
||||
fasta = '/projects1/Reference_Genomes/Human/HG19/hg19_complete.fasta'
|
||||
bwa_index = '/projects1/Reference_Genomes/Human/HG19/hg19_complete.fasta'
|
||||
bwa_index = '/projects1/Reference_Genomes/Human/HG19/'
|
||||
fasta_index = '/projects1/Reference_Genomes/Human/HG19/hg19_complete.fasta.fai'
|
||||
seq_dict = '/projects1/Reference_Genomes/Human/HG19/hg19_complete.dict'
|
||||
bwaalnn = 0.01
|
||||
|
|
19
conf/pipeline/rnaseq/utd_sysbio.config
Normal file
19
conf/pipeline/rnaseq/utd_sysbio.config
Normal file
|
@ -0,0 +1,19 @@
|
|||
params {
|
||||
config_profile_description = 'University of Texas at Dallas HPC cluster profile provided by nf-core/configs'
|
||||
config_profile_contact = 'Edmund Miller(@emiller88)'
|
||||
config_profile_url = 'http://docs.oithpc.utdallas.edu/'
|
||||
}
|
||||
|
||||
process {
|
||||
|
||||
withName : "STAR_ALIGN" {
|
||||
memory = 36.GB
|
||||
}
|
||||
|
||||
withLabel:process_high {
|
||||
cpus = { check_max( 16 * task.attempt, 'cpus' ) }
|
||||
memory = { check_max( 60.GB * task.attempt, 'memory' ) }
|
||||
time = { check_max( 16.h * task.attempt, 'time' ) }
|
||||
}
|
||||
|
||||
}
|
25
conf/pipeline/sarek/cfc.config
Normal file
25
conf/pipeline/sarek/cfc.config
Normal file
|
@ -0,0 +1,25 @@
|
|||
// Profile config names for nf-core/configs
|
||||
|
||||
params {
|
||||
// Specific nf-core/configs params
|
||||
config_profile_contact = 'Friederike Hanssen (@FriederikeHanssen)'
|
||||
config_profile_description = 'nf-core/sarek CFC profile provided by nf-core/configs'
|
||||
}
|
||||
|
||||
// Specific nf-core/sarek process configuration
|
||||
process {
|
||||
withName:'StrelkaSingle|Strelka|StrelkaBP|MantaSingle|Manta' {
|
||||
memory = 59.GB
|
||||
cpus = 20
|
||||
}
|
||||
withName:'MSIsensor_scan|MSIsensor_msi' {
|
||||
memory = 55.GB
|
||||
}
|
||||
withName:BamQC {
|
||||
memory = 372.GB
|
||||
}
|
||||
withName:MapReads{
|
||||
cpus = 20
|
||||
memory = 59.GB
|
||||
}
|
||||
}
|
|
@ -2,8 +2,9 @@
|
|||
|
||||
params {
|
||||
// Specific nf-core/configs params
|
||||
config_profile_contact = 'Maxime Garcia (@MaxUlysse)'
|
||||
config_profile_contact = 'Maxime Garcia (@maxulysse)'
|
||||
config_profile_description = 'nf-core/sarek MUNIN profile provided by nf-core/configs'
|
||||
config_profile_url = 'https://ki.se/forskning/barntumorbanken'
|
||||
|
||||
// Specific nf-core/sarek params
|
||||
annotation_cache = true
|
||||
|
|
|
@ -20,21 +20,33 @@ params {
|
|||
primer_sets {
|
||||
artic {
|
||||
'1' {
|
||||
fasta = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V1/nCoV-2019.reference.fasta'
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V1/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V1/nCoV-2019.primer.bed'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V1/nCoV-2019.primer.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
'2' {
|
||||
fasta = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V2/nCoV-2019.reference.fasta'
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V2/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V2/nCoV-2019.primer.bed'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V2/nCoV-2019.primer.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
'3' {
|
||||
fasta = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta'
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V3/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V3/nCoV-2019.primer.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
'4' {
|
||||
fasta = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V4/SARS-CoV-2.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/artic-network/artic-ncov2019/raw/master/primer_schemes/nCoV-2019/V4/SARS-CoV-2.scheme.bed'
|
||||
scheme = 'SARS-CoV-2'
|
||||
}
|
||||
'1200' {
|
||||
fasta = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/primer_schemes/artic/nCoV-2019/V1200/nCoV-2019.reference.fasta'
|
||||
gff = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/GCA_009858895.3_ASM985889v3_genomic.200409.gff.gz'
|
||||
primer_bed = 'https://github.com/nf-core/test-datasets/raw/viralrecon/genome/MN908947.3/primer_schemes/artic/nCoV-2019/V1200/nCoV-2019.bed'
|
||||
scheme = 'nCoV-2019'
|
||||
}
|
||||
}
|
||||
|
|
32
conf/sanger.config
Normal file
32
conf/sanger.config
Normal file
|
@ -0,0 +1,32 @@
|
|||
params {
|
||||
config_profile_description = 'The Wellcome Sanger Institute HPC cluster profile'
|
||||
config_profile_contact = 'Anthony Underwood (@aunderwo)'
|
||||
config_profile_url = 'https://www.sanger.ac.uk/group/informatics-support-group/'
|
||||
}
|
||||
|
||||
singularity {
|
||||
enabled = true
|
||||
cacheDir = "${baseDir}/singularity"
|
||||
runOptions = '--bind /lustre --bind /nfs/pathnfs01 --bind /nfs/pathnfs02 --bind /nfs/pathnfs03 --bind /nfs/pathnfs04 --bind /nfs/pathnfs05 --bind /nfs/pathnfs06 --no-home'
|
||||
}
|
||||
|
||||
process{
|
||||
executor = 'lsf'
|
||||
queue = 'normal'
|
||||
errorStrategy = { task.attempt <= 5 ? "retry" : "finish" }
|
||||
process.maxRetries = 5
|
||||
}
|
||||
|
||||
executor{
|
||||
name = 'lsf'
|
||||
perJobMemLimit = true
|
||||
poolSize = 4
|
||||
submitRateLimit = '5 sec'
|
||||
killBatchSize = 50
|
||||
}
|
||||
|
||||
params {
|
||||
max_memory = 128.GB
|
||||
max_cpus = 64
|
||||
max_time = 12.h
|
||||
}
|
35
conf/utd_sysbio.config
Normal file
35
conf/utd_sysbio.config
Normal file
|
@ -0,0 +1,35 @@
|
|||
//Profile config names for nf-core/configs
|
||||
params {
|
||||
config_profile_description = 'University of Texas at Dallas HPC cluster profile provided by nf-core/configs'
|
||||
config_profile_contact = 'Edmund Miller(@emiller88)'
|
||||
config_profile_url = 'http://docs.oithpc.utdallas.edu/'
|
||||
singularity_cache_dir = '/scratch/applied-genomics/singularity'
|
||||
}
|
||||
|
||||
env {
|
||||
TMPDIR = '/home/$USER/scratch/tmp'
|
||||
}
|
||||
|
||||
singularity {
|
||||
enabled = true
|
||||
envWhitelist='SINGULARITY_BINDPATH'
|
||||
autoMounts = true
|
||||
cacheDir = params.singularity_cache_dir
|
||||
}
|
||||
|
||||
process {
|
||||
beforeScript = 'module load singularity/3.4.1'
|
||||
executor = 'slurm'
|
||||
queue = { task.memory >= 30.GB && task.cpu <= 16 ? 'normal': 'smallmem' }
|
||||
}
|
||||
|
||||
// Preform work directory cleanup after a successful run
|
||||
cleanup = true
|
||||
|
||||
params {
|
||||
// TODO Need to initialize this
|
||||
// igenomes_base = '/scratch/applied-genomics/references/iGenomes/references/'
|
||||
max_memory = 90.GB
|
||||
max_cpus = 16
|
||||
max_time = 96.h
|
||||
}
|
5
docs/aws_tower.md
Normal file
5
docs/aws_tower.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
# nf-core/configs: AWS Batch with Tower Configuration
|
||||
|
||||
To be used when submitting jobs to AWS Batch by using Tower Forge. If you are not using Tower Forge, consider using the profile `awsbatch` where you can directly specify the Batch queue, AWS region and AWS cli path.
|
||||
|
||||
This profile defines `awsbatch` as executor, and allows `overwrite` of `trace`, `timeline`, `report` and `dag` to allow resuming pipelines.
|
18
docs/cambridge.md
Normal file
18
docs/cambridge.md
Normal file
|
@ -0,0 +1,18 @@
|
|||
# nf-core/configs: Cambridge HPC Configuration
|
||||
|
||||
All nf-core pipelines have been successfully configured for use on the Cambridge HPC cluster at the [The University of Cambridge](https://www.cam.ac.uk/).
|
||||
To use, run the pipeline with `-profile cambridge`. This will download and launch the [`cambridge.config`](../conf/cambridge.config) whichhas been pre-configured
|
||||
with a setup suitable for the Cambridge HPC cluster. Using this profile, either a docker image containing all of the required software will be downloaded,
|
||||
and converted to a Singularity image or a Singularity image downloaded directly before execution of the pipeline.
|
||||
|
||||
The latest version of Nextflow is not installed by default on the Cambridge HPC cluster. You will need to install it into a directory you have write access to.
|
||||
Follow these instructions from the Nextflow documentation.
|
||||
|
||||
- Install Nextflow : [here](https://www.nextflow.io/docs/latest/getstarted.html#)
|
||||
|
||||
All of the intermediate files required to run the pipeline will be stored in the `work/` directory. It is recommended to delete this directory after the pipeline
|
||||
has finished successfully because it can get quite large, and all of the main output files will be saved in the `results/` directory anyway.
|
||||
|
||||
> NB: You will need an account to use the Cambridge HPC cluster in order to run the pipeline. If in doubt contact IT.
|
||||
> NB: Nextflow will need to submit the jobs via SLURM to the Cambridge HPC cluster and as such the commands above will have to be executed on one of the login
|
||||
nodes. If in doubt contact IT.
|
|
@ -4,18 +4,16 @@ All nf-core pipelines have been successfully configured for use on the HPCs at [
|
|||
|
||||
> :warning: these profiles are not officially supported by the MPCDF.
|
||||
|
||||
To run Nextflow, the `jdk` module must be loaded. To use the nf-core profile(s), run the pipeline with `-profile <cluster>,mpcdf`.
|
||||
To run Nextflow, the `jdk` module must be loaded. To use the nf-core profile(s), run the pipeline with `-profile mpcdf,<cluster>`.
|
||||
|
||||
Currently the following clusters are supported: cobra, raven
|
||||
Currently profiles for the following clusters are supported: `cobra`, `raven`
|
||||
|
||||
All profiles use `singularity` as the corresponding containerEngine. To prevent repeatedly downloading the same singularity image for every pipeline run, for all profiles we recommend specifying a cache location in your `~/.bash_profile` with the `$NXF_SINGULARITY_CACHEDIR` bash variable.
|
||||
|
||||
>NB: Nextflow will need to submit the jobs via SLURM to the clusters and as such the commands above will have to be executed on one of the head nodes. Check the [MPCDF documentation](https://www.mpcdf.mpg.de/services/computing).
|
||||
|
||||
## cobra
|
||||
|
||||
Cobra does not currently support singularity, therefore the anaconda/module is loaded for each process.
|
||||
|
||||
Due to this, we also recommend setting the `$NXF_CONDA_CACHEDIR` to a location of your choice to store all environments (so to prevent nextflow building the environment on every run).
|
||||
|
||||
To use: `-profile cobra,mpcdf`
|
||||
|
||||
Sets the following parameters:
|
||||
|
@ -31,10 +29,6 @@ Sets the following parameters:
|
|||
|
||||
## raven
|
||||
|
||||
Raven does not currently support singularity, therefore `module load anaconda/3/2020.02` is loaded for each process.
|
||||
|
||||
Due to this, we also recommend setting the `$NXF_CONDA_CACHEDIR` to a location of your choice to store all environments (so to prevent nextflow building the environment on every run).
|
||||
|
||||
To use: `-profile raven,mpcdf`
|
||||
|
||||
Sets the following parameters:
|
||||
|
|
23
docs/pipeline/rnaseq/utd_sysbio.md
Normal file
23
docs/pipeline/rnaseq/utd_sysbio.md
Normal file
|
@ -0,0 +1,23 @@
|
|||
# nf-core/configs: UTD sysbio rnaseq specific configuration
|
||||
|
||||
Extra specific configuration for rnaseq pipeline
|
||||
|
||||
## Usage
|
||||
|
||||
To use, run the pipeline with `-profile sysbio`.
|
||||
|
||||
This will download and launch the eager specific [`utd_sysbio.config`](../../../conf/pipeline/rnaseq/utd_sysbio.config) which has been pre-configured with a setup suitable for the shh cluster.
|
||||
|
||||
Example: `nextflow run nf-core/rnaseq -profile sysbio`
|
||||
|
||||
## rnaseq specific configurations for UTD sysbio
|
||||
|
||||
Specific configurations for UTD Sysbio has been made for rnaseq.
|
||||
|
||||
### General profiles
|
||||
|
||||
<!-- TODO -->
|
||||
|
||||
### Contextual profiles
|
||||
|
||||
<!-- TODO -->
|
54
docs/sanger.md
Normal file
54
docs/sanger.md
Normal file
|
@ -0,0 +1,54 @@
|
|||
# nf-core/configs: Wellcome Sanger Institute Configuration
|
||||
|
||||
To use, run the pipeline with `-profile sanger`. This will download and launch the [`sanger.config`](../conf/sanger.config) which has been
|
||||
pre-configured with a setup suitable for the Wellcome Sanger Institute LSF cluster.
|
||||
Using this profile, either a docker image containing all of the required software will be downloaded, and converted to a Singularity image or
|
||||
a Singularity image downloaded directly before execution of the pipeline.
|
||||
|
||||
## Running the workflow on the Wellcome Sanger Institute cluster
|
||||
|
||||
The latest version of Nextflow is not installed by default on the cluster. You will need to install it into a directory you have write access to
|
||||
|
||||
- Install Nextflow : [here](https://www.nextflow.io/docs/latest/getstarted.html#)
|
||||
|
||||
A recommended place to move the `nextflow` executable to is `~/bin` so that it's in the `PATH`.
|
||||
|
||||
Nextflow manages each process as a separate job that is submitted to the cluster by using the `bsub` command.
|
||||
Since the Nextflow pipeline will submit individual jobs for each process to the cluster and dependencies will be provided bu Singularity images you shoudl make sure that your account has access to the Singularity binary by adding these lines to your `.bashrc` file
|
||||
|
||||
```bash
|
||||
[[ -f /software/pathogen/farm5 ]] && module load ISG/singularity
|
||||
```
|
||||
|
||||
Nextflow shouldn't run directly on the submission node but on a compute node.
|
||||
To do so make a shell script with a similar structure to the following code and submit with `bsub < $PWD/my_script.sh`
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
#BSUB -o /path/to/a/log/dir/%J.o
|
||||
#BSUB -e /path/to/a/log/dir//%J.e
|
||||
#BSUB -M 8000
|
||||
#BSUB -q long
|
||||
#BSUB -n 4
|
||||
|
||||
export HTTP_PROXY='http://wwwcache.sanger.ac.uk:3128'
|
||||
export HTTPS_PROXY='http://wwwcache.sanger.ac.uk:3128'
|
||||
export NXF_ANSI_LOG=false
|
||||
export NXF_OPTS="-Xms8G -Xmx8G -Dnxf.pool.maxThreads=2000"
|
||||
export NXF_VER=21.04.0-edge
|
||||
|
||||
|
||||
nextflow run \
|
||||
/path/to/nf-core/pipeline/main.nf \
|
||||
-w /path/to/some/dir/work \
|
||||
-profile sanger \
|
||||
-c my_specific.config \
|
||||
-qs 1000 \
|
||||
-resume
|
||||
|
||||
## clean up on exit 0 - delete this if you want to keep the work dir
|
||||
status=$?
|
||||
if [[ $status -eq 0 ]]; then
|
||||
rm -r /path/to/some/dir/work
|
||||
fi
|
||||
```
|
234
docs/uppmax.md
234
docs/uppmax.md
|
@ -8,42 +8,47 @@ We have a Slack channel dedicated to UPPMAX users on the nf-core Slack: [https:/
|
|||
|
||||
## Using the UPPMAX config profile
|
||||
|
||||
Before running the pipeline you will need to either install `Nextflow` or load it using the environment module system (this can be done with e.g. `module load bioinfo-tools Nextflow/<VERSION>` where `VERSION` is e.g. `20.10`).
|
||||
|
||||
To use, run the pipeline with `-profile uppmax` (one hyphen).
|
||||
This will download and launch the [`uppmax.config`](../conf/uppmax.config) which has been pre-configured with a setup suitable for the UPPMAX servers.
|
||||
Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Singularity image before execution of the pipeline.
|
||||
It will enable `Nextflow` to manage the pipeline jobs via the `Slurm` job scheduler.
|
||||
Using this profile, `Docker` image(s) containing required software(s) will be downloaded, and converted to `Singularity` image(s) if needed before execution of the pipeline.
|
||||
|
||||
Recent version of `Nextflow` also support the environment variable `NXF_SINGULARITY_CACHEDIR` which can be used to supply images.
|
||||
Images for some `nf-core` pipelines are available under `/sw/data/ToolBox/nf-core/` and those can be used by `NXF_SINGULARITY_CACHEDIR=/sw/data/ToolBox/nf-core/; export NXF_SINGULARITY_CACHEDIR`.
|
||||
|
||||
In addition to this config profile, you will also need to specify an UPPMAX project id.
|
||||
You can do this with the `--project` flag (two hyphens) when launching nextflow. For example:
|
||||
You can do this with the `--project` flag (two hyphens) when launching `Nextflow`.
|
||||
For example:
|
||||
|
||||
```bash
|
||||
nextflow run nf-core/PIPELINE -profile uppmax --project snic2018-1-234 # ..rest of pipeline flags
|
||||
# Launch a nf-core pipeline with the uppmax profile for the project id snic2018-1-234
|
||||
$ nextflow run nf-core/<PIPELINE> -profile uppmax --project snic2018-1-234 [...]
|
||||
```
|
||||
|
||||
> NB: If you're not sure what your UPPMAX project ID is, try running `groups` or checking SUPR.
|
||||
|
||||
Before running the pipeline you will need to either install Nextflow or load it using the environment module system.
|
||||
Just run `Nextflow` on a login node and it will handle everything else.
|
||||
|
||||
This config enables Nextflow to manage the pipeline jobs via the Slurm job scheduler and using Singularity for software management.
|
||||
Remember to use `-bg` to launch `Nextflow` in the background, so that the pipeline doesn't exit if you leave your terminal session.
|
||||
Alternatively, you can also launch `Nextflow` in a `screen` or a `tmux` session.
|
||||
|
||||
Just run Nextflow on a login node and it will handle everything else.
|
||||
## Using AWS iGenomes references
|
||||
|
||||
Remember to use `-bg` to launch Nextflow in the background, so that the pipeline doesn't exit if you leave your terminal session.
|
||||
|
||||
## Using iGenomes references
|
||||
|
||||
A local copy of the iGenomes resource has been made available on all UPPMAX clusters so you should be able to run the pipeline against any reference available in the `igenomes.config`.
|
||||
A local copy of the `AWS iGenomes` resource has been made available on all UPPMAX clusters so you should be able to run the pipeline against any reference available in the `conf/igenomes.config`.
|
||||
You can do this by simply using the `--genome <GENOME_ID>` parameter.
|
||||
|
||||
## Getting more memory
|
||||
|
||||
If your nf-core pipeline run is running out of memory, you can run on a fat node with more memory using the following nextflow flags:
|
||||
If your `nf-core` pipeline run is running out of memory, you can run on a fat node with more memory using the following `Nextflow` flags:
|
||||
|
||||
```bash
|
||||
--clusterOptions "-C mem256GB" --max_memory "256GB"
|
||||
--clusterOptions "-C mem256GB -p node" --max_memory "256GB"
|
||||
```
|
||||
|
||||
This raises the ceiling of available memory from the default of `128.GB` to `256.GB`.
|
||||
Rackham has nodes with 128GB, 256GB and 1TB memory available.
|
||||
`rackham` has nodes with 128GB, 256GB and 1TB memory available.
|
||||
|
||||
Note that each job will still start with the same request as normal, but restarted attempts with larger requests will be able to request greater amounts of memory.
|
||||
|
||||
|
@ -53,15 +58,15 @@ All jobs will be submitted to fat nodes using this method, so it's only for use
|
|||
|
||||
The UPPMAX nf-core configuration profile uses the `hostname` of the active environment to automatically apply the following resource limits:
|
||||
|
||||
* `rackham`
|
||||
* cpus available: 20 cpus
|
||||
* memory available: 125 GB
|
||||
* `bianca`
|
||||
* cpus available: 16 cpus
|
||||
* memory available: 109 GB
|
||||
* `irma`
|
||||
* cpus available: 16 cpus
|
||||
* memory available: 250 GB
|
||||
* `rackham`
|
||||
* cpus available: 20 cpus
|
||||
* memory available: 125 GB
|
||||
|
||||
## Development config
|
||||
|
||||
|
@ -74,14 +79,197 @@ It is not suitable for use with real data.
|
|||
|
||||
To use it, submit with `-profile uppmax,devel`.
|
||||
|
||||
## Running on Bianca
|
||||
## Running on bianca
|
||||
|
||||
For security reasons, there is no internet access on Bianca so you can't download from or upload files to the cluster directly. Before running a nf-core pipeline on Bianca you will first have to download the pipeline and singularity images needed elsewhere and transfer them via the wharf area to your Bianca project.
|
||||
> :warning: For more information, please follow the following guides:
|
||||
>
|
||||
> * [UPPMAX `bianca` user guide](http://uppmax.uu.se/support/user-guides/bianca-user-guide/).
|
||||
> * [nf-core guide for running offline](https://nf-co.re/usage/offline)
|
||||
> * [nf-core `tools` guide for downloading pipelines for offline use](https://nf-co.re/tools#downloading-pipelines-for-offline-use).
|
||||
> * [UPPMAX `Singularity` guide](https://www.uppmax.uu.se/support-sv/user-guides/singularity-user-guide/).
|
||||
|
||||
You can follow the guide for downloading pipelines [for offline use](https://nf-co.re/tools#downloading-pipelines-for-offline-use). Note that you will have to download the singularity images as well.
|
||||
For security reasons, there is no internet access on `bianca` so you can't download from or upload files to the cluster directly.
|
||||
Before running a nf-core pipeline on `bianca` you will first have to download the pipeline and singularity images needed elsewhere and transfer them via the `wharf` area to your own `bianca` project.
|
||||
|
||||
Next transfer the pipeline and the singularity images to your project. Before running the pipeline you will have to indicate to nextflow where the singularity images are located by setting `NXF_SINGULARITY_CACHEDIR` :
|
||||
In this guide, we use `rackham` to download and transfer files to the `wharf` area, but it can also be done on your own computer.
|
||||
If you use `rackham` to download the pipeline and the singularity containers, we recommend using an interactive session (cf [interactive guide](https://www.uppmax.uu.se/support/faq/running-jobs-faq/how-can-i-run-interactively-on-a-compute-node/)), which is what we do in the following guide.
|
||||
|
||||
`export NXF_SINGULARITY_CACHEDIR=Your_Location_For_The_Singularity_directory/.`
|
||||
### Download and install Nextflow
|
||||
|
||||
You should now be able to run your nf-core pipeline on bianca.
|
||||
You can use the `Nextflow` UPPMAX provided `module`, but if necessary, you can also download a more recent version.
|
||||
|
||||
```bash
|
||||
# Connect to bianca
|
||||
$ ssh -A <USER>-<BIANCA_PROJECT>@bianca.uppmax.uu.se
|
||||
|
||||
# See the available versions for the module
|
||||
module spider Nextflow
|
||||
|
||||
# Load a specific version of the Nextflow module
|
||||
module load bioinfo-tools Nextflow/<VERSION>`
|
||||
```
|
||||
|
||||
```bash
|
||||
# Connect to rackham
|
||||
$ ssh -X <USER>@rackham.uppmax.uu.se
|
||||
# Or stay in your terminal
|
||||
|
||||
# Download the nextflow-all bundle
|
||||
$ wget https://github.com/nextflow-io/nextflow/releases/download/v<NEXTFLOW_VERSION>/nextflow-<NEXTFLOW_VERSION>-all
|
||||
|
||||
# Connect to the wharf area using sftp
|
||||
$ sftp <USER>-<BIANCA_PROJECT>@bianca-sftp.uppmax.uu.se:<USER>-<BIANCA_PROJECT>
|
||||
|
||||
# Transfer nextflow to the wharf area
|
||||
sftp> put nextflow-<NEXTFLOW_VERSION>-all .
|
||||
|
||||
# Exit sftp
|
||||
$ exit
|
||||
|
||||
# Connect to bianca
|
||||
$ ssh -A <USER>-<BIANCA_PROJECT>@bianca.uppmax.uu.se
|
||||
|
||||
# Go to your project
|
||||
$ cd /castor/project/proj_nobackup
|
||||
|
||||
# Make folder for Nextflow
|
||||
$ mkdir tools
|
||||
$ mkdir tools/nextflow
|
||||
|
||||
# Move Nextflow from the wharf area to its directory
|
||||
$ mv /castor/project/proj_nobackup/wharf/<USER>/<USER>-<BIANCA_PROJECT>/nextflow-<NEXTFLOW_VERSION>-all /castor/project/proj_nobackup/tools/nextflow
|
||||
|
||||
# Establish permission
|
||||
$ chmod a+x /castor/project/proj_nobackup/tools/nextflow/nextflow-<NEXTFLOW_VERSION>-all
|
||||
|
||||
# If you want other people to use it
|
||||
# Be sure that your group has rights to the directory as well
|
||||
$ chown -R .<BIANCA_PROJECT> /castor/project/proj_nobackup/tools/nextflow/nextflow-<NEXTFLOW_VERSION>-all
|
||||
|
||||
# Make a link to it
|
||||
$ ln -s /castor/project/proj_nobackup/tools/nextflow/nextflow-<NEXTFLOW_VERSION>-all /castor/project/proj_nobackup/tools/nextflow/nextflow
|
||||
|
||||
# And every time you're launching Nextflow, don't forget to export the following ENV variables
|
||||
# Or add them to your .bashrc file
|
||||
$ export NXF_HOME=/castor/project/proj/nobackup/tools/nextflow/
|
||||
$ export PATH=${NXF_HOME}:${PATH}
|
||||
$ export NXF_TEMP=$SNIC_TMP
|
||||
$ export NXF_LAUNCHER=$SNIC_TMP
|
||||
$ export NXF_SINGULARITY_CACHEDIR=/castor/project/proj_nobackup/singularity-images
|
||||
```
|
||||
|
||||
### Install nf-core tools
|
||||
|
||||
You can use the `nf-core` UPPMAX provided `module`, but if necessary, you can also download a more recent version.
|
||||
|
||||
```bash
|
||||
# Connect to rackham
|
||||
$ ssh -X <USER>@rackham.uppmax.uu.se
|
||||
|
||||
# See the available versions for the module
|
||||
module spider nf-core
|
||||
|
||||
# Load a specific version of the nf-core module
|
||||
module load bioinfo-tools nf-core/<VERSION>`
|
||||
```
|
||||
|
||||
```bash
|
||||
# Connect to rackham
|
||||
$ ssh -X <USER>@rackham.uppmax.uu.se
|
||||
# Or stay in your terminal
|
||||
|
||||
# Install the latest pip version
|
||||
$ pip3 install --upgrade --force-reinstall git+https://github.com/nf-core/tools.git@dev --user
|
||||
```
|
||||
|
||||
### Download and transfer a nf-core pipeline
|
||||
|
||||
```bash
|
||||
# Connect to rackham
|
||||
$ ssh -X <USER>@rackham.uppmax.uu.se
|
||||
# Or stay in your terminal
|
||||
|
||||
# Open an interactive session (if you are on rackham)
|
||||
$ interactive <rackham_project>
|
||||
|
||||
# Download a pipeline with the singularity images
|
||||
$ nf-core download <PIPELINE> -r <PIPELINE_VERSION> -s --compress none
|
||||
|
||||
# If necessary, extra singularity images can be download separately
|
||||
# For example, if you downloaded nf-core/sarek, you will need extra images for annotation
|
||||
# Here we download the nf-core/sarek GRCh38 specific images
|
||||
$ singularity pull --name nfcore-sareksnpeff-2.7.GRCh38.img docker://nfcore/sareksnpeff:2.7.GRCh38
|
||||
$ singularity pull --name nfcore-sarekvep-2.7.GRCh38.img docker://nfcore/sarekvep:2.7.GRCh38
|
||||
|
||||
# Which can then be moved into the nf-core/sarek download folder
|
||||
$ mv *.img nf-core-sarek-2.7/singularity-images/.
|
||||
|
||||
# Connect to the wharf area using sftp
|
||||
$ sftp <USER>-<BIANCA_PROJECT>@bianca-sftp.uppmax.uu.se:<USER>-<BIANCA_PROJECT>
|
||||
|
||||
# Transfer <PIPELINE> folder from rackham to the wharf area
|
||||
sftp> put -r nf-core-<PIPELINE>-<PIPELINE_VERSION> .
|
||||
|
||||
# The archives will be in the wharf folder in your user home on your bianca project
|
||||
|
||||
# Connect to bianca
|
||||
$ ssh -A <USER>-<BIANCA_PROJECT>@bianca.uppmax.uu.se
|
||||
|
||||
# Go to your project
|
||||
$ cd /castor/project/proj_nobackup
|
||||
|
||||
# Make and go into a nf-core directory (where you will store all nf-core pipelines')
|
||||
$ mkdir nf-core
|
||||
$ cd nf-core
|
||||
|
||||
# Move the folder from the wharf area to the project
|
||||
$ cp /castor/project/proj_nobackup/wharf/<USER>/<USER>-<BIANCA_PROJECT>/nf-core-<PIPELINE>-<PIPELINE_VERSION> .
|
||||
|
||||
# If you want other people to use it,
|
||||
# Be sure that your group has rights to the directory as well
|
||||
$ chown -R .<BIANCA_PROJECT> nf-core-<PIPELINE>-<PIPELINE_VERSION>
|
||||
|
||||
# Make a symbolic link to the extracted repository
|
||||
$ ln -s nf-core-<PIPELINE>-<PIPELINE_VERSION> nf-core-<PIPELINE>-default
|
||||
```
|
||||
|
||||
The principle is to have every member of your project to be able to use the same `nf-core/<PIPELINE>` version at the same time.
|
||||
So every member of the project who wants to use `nf-core/<PIPELINE>` will need to do:
|
||||
|
||||
```bash
|
||||
# Connect to bianca
|
||||
$ ssh -A <USER>-<BIANCA_PROJECT>@bianca.uppmax.uu.se
|
||||
|
||||
# Go to your user directory
|
||||
$ cd /home/<USER>
|
||||
|
||||
# Make a symbolic link to the default nf-core/<PIPELINE>
|
||||
$ ln -s /castor/project/proj_nobackup/nf-core/nf-core-<PIPELINE>-default nf-core-<PIPELINE>
|
||||
```
|
||||
|
||||
And then `nf-core/<PIPELINE>` can be used with:
|
||||
|
||||
```bash
|
||||
# run <PIPELINE> on bianca
|
||||
$ nextflow run ~/<PIPELINE> -profile uppmax --project <BIANCA_PROJECT> --genome <GENOME_ASSEMBLY> ...
|
||||
```
|
||||
|
||||
## Update a pipeline
|
||||
|
||||
To update, repeat the same steps as for installing and update the link.
|
||||
|
||||
```bash
|
||||
# Connect to bianca (Connect to rackham first if needed)
|
||||
$ ssh -A <USER>-<BIANCA_PROJECT>@bianca.uppmax.uu.se
|
||||
|
||||
# Go to the nf-core directory in your project
|
||||
$ cd /castor/project/proj_nobackup/nf-core
|
||||
|
||||
# Remove link
|
||||
$ unlink nf-core-<PIPELINE>-default
|
||||
|
||||
# Link to new nf-core/<PIPELINE> version
|
||||
$ ln -s nf-core-<PIPELINE>-<PIPELINE_VERSION> nf-core-<PIPELINE>-default
|
||||
```
|
||||
|
||||
You can for example keep a `nf-core-<PIPELINE>-default` version that you are sure is working, an make a link for a `nf-core-<PIPELINE>-testing` or `nf-core-<PIPELINE>-development`.
|
||||
|
|
16
docs/utd_sysbio.md
Normal file
16
docs/utd_sysbio.md
Normal file
|
@ -0,0 +1,16 @@
|
|||
# nf-core/configs: UTD Sysbio Configuration
|
||||
|
||||
All nf-core pipelines have been successfully configured for use on the Sysbio HPC cluster at the [The Univeristy of Texas at Dallas](https://www.utdallas.edu/).
|
||||
|
||||
To use, run the pipeline with `-profile utd_sysbio`. This will download and launch the [`utd_sysbio.config`](../conf/utd_sysbio.config) which has been pre-configured with a setup suitable for the Sysbio HPC cluster. Using this profile, a docker image containing all of the required software will be downloaded, and converted to a Singularity image before execution of the pipeline.
|
||||
|
||||
Before running the pipeline you will need to load Singularity using the environment module system on Sysbio. You can do this by issuing the commands below:
|
||||
|
||||
```bash
|
||||
## Singularity environment modules
|
||||
module purge
|
||||
module load singularity
|
||||
```
|
||||
|
||||
>NB: You will need an account to use the HPC cluster on Sysbio in order to run the pipeline. If in doubt contact OIT.
|
||||
>NB: Nextflow will need to submit the jobs via SLURM to the HPC cluster and as such the commands above will have to be executed on one of the login nodes. If in doubt contact OIT.
|
|
@ -12,6 +12,7 @@
|
|||
profiles {
|
||||
abims { includeConfig "${params.custom_config_base}/conf/abims.config" }
|
||||
awsbatch { includeConfig "${params.custom_config_base}/conf/awsbatch.config" }
|
||||
aws_tower { includeConfig "${params.custom_config_base}/conf/aws_tower.config" }
|
||||
bi { includeConfig "${params.custom_config_base}/conf/bi.config" }
|
||||
bigpurple { includeConfig "${params.custom_config_base}/conf/bigpurple.config" }
|
||||
binac { includeConfig "${params.custom_config_base}/conf/binac.config" }
|
||||
|
@ -42,11 +43,13 @@ profiles {
|
|||
pasteur { includeConfig "${params.custom_config_base}/conf/pasteur.config" }
|
||||
phoenix { includeConfig "${params.custom_config_base}/conf/phoenix.config" }
|
||||
prince { includeConfig "${params.custom_config_base}/conf/prince.config" }
|
||||
sanger { includeConfig "${params.custom_config_base}/conf/sanger.config"}
|
||||
seg_globe { includeConfig "${params.custom_config_base}/conf/seg_globe.config"}
|
||||
shh { includeConfig "${params.custom_config_base}/conf/shh.config" }
|
||||
uct_hpc { includeConfig "${params.custom_config_base}/conf/uct_hpc.config" }
|
||||
uppmax { includeConfig "${params.custom_config_base}/conf/uppmax.config" }
|
||||
utd_ganymede { includeConfig "${params.custom_config_base}/conf/utd_ganymede.config" }
|
||||
utd_sysbio { includeConfig "${params.custom_config_base}/conf/utd_sysbio.config" }
|
||||
uzh { includeConfig "${params.custom_config_base}/conf/uzh.config" }
|
||||
jax { includeConfig "${params.custom_config_base}/conf/jax.config" }
|
||||
}
|
||||
|
@ -67,6 +70,7 @@ params {
|
|||
genotoul: ['.genologin1.toulouse.inra.fr', '.genologin2.toulouse.inra.fr'],
|
||||
genouest: ['.genouest.org'],
|
||||
uppmax: ['.uppmax.uu.se'],
|
||||
utd_ganymede: ['ganymede.utdallas.edu']
|
||||
utd_ganymede: ['ganymede.utdallas.edu'],
|
||||
utd_sysbio: ['sysbio.utdallas.edu']
|
||||
]
|
||||
}
|
||||
|
|
|
@ -10,4 +10,5 @@
|
|||
|
||||
profiles {
|
||||
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/eddie.config" }
|
||||
utd_sysbio { includeConfig "${params.custom_config_base}/conf/pipeline/rnaseq/utd_sysbio.config" }
|
||||
}
|
||||
|
|
|
@ -12,5 +12,7 @@ profiles {
|
|||
munin { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/munin.config" }
|
||||
uppmax { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/uppmax.config" }
|
||||
icr_davros { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/icr_davros.config" }
|
||||
cfc { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/cfc.config" }
|
||||
cfc_dev { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/cfc.config" }
|
||||
eddie { includeConfig "${params.custom_config_base}/conf/pipeline/sarek/eddie.config" }
|
||||
}
|
Loading…
Reference in a new issue