diff --git a/.github/workflows/update_freyja.yml b/.github/workflows/update_freyja.yml
index 08f1445aa..054cdd6d4 100644
--- a/.github/workflows/update_freyja.yml
+++ b/.github/workflows/update_freyja.yml
@@ -9,7 +9,7 @@ name: Update Freyja
on:
workflow_dispatch:
schedule:
- - cron: '30 7 * * *'
+ - cron: '30 7 1 * *'
run-name: Updating Freyja
diff --git a/.gitpod.yml b/.gitpod.yml
index e5a3bb083..d1709bab7 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -10,7 +10,7 @@ tasks:
pip install graphviz
. /opt/conda/etc/profile.d/conda.sh
conda activate base
- git checkout main
+ git checkout master
vscode:
extensions: # based on nf-core.nf-core-extensionpack
- codezombiech.gitignore # Language support for .gitignore files
diff --git a/Program_Licenses.md b/Program_Licenses.md
index 15586a21c..8bb262fe0 100644
--- a/Program_Licenses.md
+++ b/Program_Licenses.md
@@ -19,7 +19,7 @@ The licenses of the open-source software that is contained in these Docker image
| Bakta | GNU GPLv3 | https://github.com/oschwengers/bakta/blob/main/LICENSE |
| Bandage | GNU GPLv3 | https://github.com/rrwick/Bandage?tab=GPL-3.0-1-ov-file#readme |
| BBTools | non-standard - see `licence.txt` and `legal.txt` that is included in docker image under `/bbmap/docs/`; Also on sourceforge repo for BBTools | https://jgi.doe.gov/disclaimer/ |
-| bcftools | MIT or **GNU GPLv3** (depends on how bcftools was compiled, I believe GNU GPLv3 applies here) | https://github.com/samtools/bcftools/blob/develop/LICENSE |
+| bcftools | MIT & **GNU GPLv3** | https://github.com/samtools/bcftools/blob/develop/LICENSE |
| bedtools | MIT | https://github.com/arq5x/bedtools2/blob/master/LICENSE |
| blast+ | Public Domain | https://www.ncbi.nlm.nih.gov/IEB/ToolBox/CPP_DOC/lxr/source/scripts/projects/blast/LICENSE |
| bowtie2 | GNU GPLv3 | https://github.com/BenLangmead/bowtie2/blob/master/LICENSE |
@@ -60,6 +60,7 @@ The licenses of the open-source software that is contained in these Docker image
| Freyja | BSD-2 | https://github.com/andersen-lab/Freyja/blob/main/LICENSE |
| GAMBIT | GNU aGPLv3 | https://github.com/jlumpe/gambit/blob/master/LICENSE |
| GAMMA | Apache 2.0 | https://github.com/rastanton/GAMMA/blob/main/LICENSE |
+| GenoFlu | GNU GPL v3 | https://github.com/USDA-VS/GenoFLU/blob/main/LICENSE |
| Genotyphi | GNU GPLv3 | https://github.com/katholt/genotyphi/blob/main/LICENSE |
| geNomad | ACADEMIC, INTERNAL, RESEARCH & DEVELOPMENT, NON-COMMERCIAL USE ONLY | https://github.com/apcamargo/genomad/blob/main/LICENSE |
| GenoVi | BY-NC-SA Creative Commons License | https://github.com/robotoD/GenoVi/blob/main/LICENSE.txt |
@@ -113,7 +114,9 @@ The licenses of the open-source software that is contained in these Docker image
| ONTime | MIT | https://github.com/mbhall88/ontime/blob/main/LICENSE |
| OrthoFinder | GNU GPLv3 | https://github.com/davidemms/OrthoFinder/blob/master/License.md |
| Panaroo | MIT | https://github.com/gtonkinhill/panaroo/blob/master/LICENSE |
+| pango_aliasor | MIT | https://github.com/corneliusroemer/pango_aliasor/blob/main/LICENSE |
| Pangolin | GNU GPLv3 | https://github.com/cov-lineages/pangolin/blob/master/LICENSE.txt |
+| panqc | MIT | https://github.com/maxgmarin/panqc/blob/main/LICENSE |
| Parsnp | Battelle National Biodefense Institute (BNBI) | https://github.com/marbl/parsnp?tab=License-1-ov-file#readme |
| pasty | Apache 2.0 | https://github.com/rpetit3/pasty/blob/main/LICENSE |
| Pavian | GNU GPLv3 | https://github.com/fbreitwieser/pavian/blob/master/DESCRIPTION |
@@ -143,6 +146,7 @@ The licenses of the open-source software that is contained in these Docker image
| raven | MIT | https://github.com/lbcb-sci/raven/blob/master/LICENSE |
| RAxML | GNU GPLv3 | https://github.com/stamatak/standard-RAxML/blob/master/gpl-3.0.txt |
| RAxML-NG | GNU AGPLv3| https://github.com/amkozlov/raxml-ng/blob/master/LICENSE.txt |
+| RAxML-NG | GNU GPLv2 | https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html |
| ResFinder | Apache 2.0 | https://bitbucket.org/genomicepidemiology/resfinder/src/master/ |
| Roary | GNU GPLv3 | https://github.com/sanger-pathogens/Roary/blob/master/GPL-LICENSE |
| SalmID| MIT | https://github.com/hcdenbakker/SalmID/blob/master/LICENSE |
@@ -155,6 +159,7 @@ The licenses of the open-source software that is contained in these Docker image
| seqtk | MIT | https://github.com/lh3/seqtk/blob/master/LICENSE |
| Seroba | GNU GPLv3 | https://github.com/sanger-pathogens/seroba/blob/master/LICENSE |
| SerotypeFinder | Apache 2.0 | https://bitbucket.org/genomicepidemiology/serotypefinder/ |
+| ShigaPass | GNU GPLv3 | https://github.com/imanyass/ShigaPass/blob/main/LICENSE |
| shigatyper | non-standard license see --> | https://github.com/CFSAN-Biostatistics/shigatyper/blob/master/LICENSE |
| ShigEiFinder | GNU GPLv3 | https://github.com/LanLab/ShigEiFinder/blob/main/LICENSE |
| Shovill | GNU GPLv3 | https://github.com/tseemann/shovill/blob/master/LICENSE |
@@ -173,6 +178,7 @@ The licenses of the open-source software that is contained in these Docker image
| SRA-toolkit | Public Domain | https://github.com/ncbi/sra-tools/blob/master/LICENSE |
| SRST2 | BSD | https://github.com/katholt/srst2/blob/master/LICENSE.txt |
| Staramr | Apache 2.0 | https://github.com/phac-nml/staramr/blob/master/LICENSE |
+| stxtyper | Public Domain | https://github.com/ncbi/stxtyper/blob/main/LICENSE |
| sylph | MIT | https://github.com/bluenote-1577/sylph/blob/main/LICENSE |
| TBProfiler | GNU GPLv3 | https://github.com/jodyphelan/TBProfiler/blob/master/LICENSE |
| TipToft | GNU GPLv3 | https://github.com/andrewjpage/tiptoft/blob/master/LICENSE |
@@ -185,6 +191,7 @@ The licenses of the open-source software that is contained in these Docker image
| Verkko | Public Domain | https://github.com/marbl/verkko/blob/master/README.licenses |
| VIBRANT | GNU GPLv3 | https://github.com/AnantharamanLab/VIBRANT/blob/master/LICENSE |
| VIGOR4 | GNU GPLv3 | https://github.com/JCVenterInstitute/VIGOR4/blob/master/LICENSE.txt |
+| Viridian | MIT | https://github.com/iqbal-lab-org/viridian/blob/master/LICENSE |
| VirSorter2 | GNU GPLv2 | https://github.com/jiarong/VirSorter2/blob/master/LICENSE |
| VirulenceFinder | Apache 2.0 | https://bitbucket.org/genomicepidemiology/virulencefinder/src/master/ |
| wtdbg2 | GNU GPLv3 | https://github.com/ruanjue/wtdbg2/blob/master/LICENSE.txt |
diff --git a/README.md b/README.md
index 963e43320..dfb39158e 100644
--- a/README.md
+++ b/README.md
@@ -90,6 +90,11 @@ singularity exec --bind $(pwd)/fastq:/data staphb-bbtools-38.96.simg bbduk.sh in
Further documentation can be found at [docs.sylabs.io](https://docs.sylabs.io/guides/3.1/user-guide/cli.html)
+## Training
+
+- Training materials (slides & exercises) from the North East Bioinformatics Regional Resource 2023 workshop **Docker for Public Health Bioinformatics** can be located here, within [`training/NE-BRR-docker-for-PH-bioinformatics-May2023/`](training/NE-BRR-docker-for-PH-bioinformatics-May2023/)
+- Video training from APHL 2024 workshop **Intermediate Docker Bioinformatics Workshop** can be located in [APHL's e-learning resources](https://learn.aphl.org/learn/course/external/view/elearning/355/intermediate-docker-bioinformatics-workshop)
+
## Logs
In November 2020, Docker began to implement pull rate limits for images hosted on dockerhub. This limits the number of `docker pull`'s per time period (e.g. anonymous users allowed 100 pulls per six hours). We applied and were approved for Docker's "Open Source Program," which should have removed the pull rate limits for all `staphb` docker images! 🎉 🥳 If you encounter an error such as `ERROR: toomanyrequests: Too Many Requests.` or `You have reached your pull rate limit. You may increase the limit by authenticating and upgrading: https://www.docker.com/increase-rate-limits.` , please let us know by [submitting an issue.](https://github.com/StaPH-B/docker-builds/issues)
@@ -106,7 +111,7 @@ To learn more about the docker pull rate limits and the open source software pro
| :--------: | ------- | -------- |
| [ABRicate](https://hub.docker.com/r/staphb/abricate/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/abricate)](https://hub.docker.com/r/staphb/abricate) |
- 0.8.7
- 0.8.13
- 0.8.13s (+serotypefinder db)
- 0.9.8
- 1.0.0
- [1.0.1 (+ A. baumannii plasmid typing db)](abricate/1.0.1-Abaum-plasmid)
- [1.0.1 (+ InsaFlu db)](abricate/1.0.1-insaflu-220727)
- [1.0.1 (+ *Vibrio cholerae* db)](abricate/1.0.1-vibrio-cholera/)
| https://github.com/tseemann/abricate |
| [ACI](https://hub.docker.com/r/staphb/aci/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/aci)](https://hub.docker.com/r/staphb/aci) | - [1.4.20240116](./aci/1.4.20240116/)
| https://github.com/erinyoung/ACI |
-| [ANIclustermap](https://hub.docker.com/r/staphb/aniclustermap/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/aniclustermap)](https://hub.docker.com/r/staphb/aniclustermap) | - [1.3.0](aniclusteramp/1.3.0/)
| https://github.com/moshi4/ANIclustermap |
+| [ANIclustermap](https://hub.docker.com/r/staphb/aniclustermap/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/aniclustermap)](https://hub.docker.com/r/staphb/aniclustermap) | - [1.3.0](aniclusteramp/1.3.0/)
- [1.4.0](aniclusteramp/1.4.0/)
| https://github.com/moshi4/ANIclustermap |
| [any2fasta](https://hub.docker.com/r/staphb/any2fasta/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/any2fasta)](https://hub.docker.com/r/staphb/any2fasta) | | https://github.com/tseemann/any2fasta |
| [ARIBA](https://hub.docker.com/r/staphb/ariba/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ariba)](https://hub.docker.com/r/staphb/ariba) | - [2.14.4](./ariba/2.14.4/)
- [2.14.6](./ariba/2.14.6/)
- [2.14.7](./ariba/2.14.7/)
| https://github.com/sanger-pathogens/ariba |
| [artic](https://hub.docker.com/r/staphb/artic)
[![docker pulls](https://badgen.net/docker/pulls/staphb/artic)](https://hub.docker.com/r/staphb/artic) | - [1.2.4-1.11.1 (artic-medaka)](artic/1.2.4-1.11.1/)
- [1.2.4-1.12.0 (artic-medaka)](artic/1.2.4-1.12.0/)
| https://github.com/artic-network/fieldbioinformatics |
@@ -115,16 +120,16 @@ To learn more about the docker pull rate limits and the open source software pro
| [artic-ncov2019-medaka](https://hub.docker.com/r/staphb/artic-ncov2019-medaka)
[![docker pulls](https://badgen.net/docker/pulls/staphb/artic-ncov2019-medaka)](https://hub.docker.com/r/staphb/artic-ncov2019-medaka) | | https://github.com/artic-network/artic-ncov2019 |
| [artic-ncov2019-nanopolish](https://hub.docker.com/r/staphb/artic-ncov2019-nanopolish)
[![docker pulls](https://badgen.net/docker/pulls/staphb/artic-ncov2019-nanopolish)](https://hub.docker.com/r/staphb/artic-ncov2019-nanopolish) | | https://github.com/artic-network/artic-ncov2019 |
| [assembly_snptyper](https://hub.docker.com/r/staphb/assembly_snptyperh)
[![docker pulls](https://badgen.net/docker/pulls/staphb/assembly_snptyper)](https://hub.docker.com/r/staphb/assembly_snptyper) | - [0.1.1](./assembly_snptyper/0.1.1/)
| https://github.com/boasvdp/assembly_snptyper |
-| [Augur](https://hub.docker.com/r/staphb/augur)
[![docker pulls](https://badgen.net/docker/pulls/staphb/augur)](https://hub.docker.com/r/staphb/augur) | - [6.3.0](./augur/6.3.0/)
- [7.0.2](./augur/7.0.2/)
- [8.0.0](./augur/8.0.0/)
- [9.0.0](./augur/9.0.0/)
- [16.0.3](./augur/16.0.3/)
- [24.2.2](./augur/24.2.2/)
- [24.2.3](./augur/24.2.3/)
- [24.3.0](./augur/24.3.0/)
| https://github.com/nextstrain/augur |
+| [Augur](https://hub.docker.com/r/staphb/augur)
[![docker pulls](https://badgen.net/docker/pulls/staphb/augur)](https://hub.docker.com/r/staphb/augur) | - [6.3.0](./augur/6.3.0/)
- [7.0.2](./augur/7.0.2/)
- [8.0.0](./augur/8.0.0/)
- [9.0.0](./augur/9.0.0/)
- [16.0.3](./augur/16.0.3/)
- [24.2.2](./augur/24.2.2/)
- [24.2.3](./augur/24.2.3/)
- [24.3.0](./augur/24.3.0/)
- [24.4.0](./augur/24.4.0/)
| https://github.com/nextstrain/augur |
| [Auspice](https://hub.docker.com/r/staphb/auspice)
[![docker pulls](https://badgen.net/docker/pulls/staphb/auspice)](https://hub.docker.com/r/staphb/auspice) | | https://github.com/nextstrain/auspice |
-| [bakta](https://hub.docker.com/r/staphb/bakta)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bakta)](https://hub.docker.com/r/staphb/bakta) | - [1.9.2](./bakta/1.9.2/)
- [1.9.2-light](./bakta/1.9.2-5.1-light/)
- [1.9.3](./bakta/1.9.3/)
- [1.9.3-light](./bakta/1.9.3-5.1-light/)
| https://github.com/oschwengers/bakta |
+| [bakta](https://hub.docker.com/r/staphb/bakta)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bakta)](https://hub.docker.com/r/staphb/bakta) | - [1.9.2](./bakta/1.9.2/)
- [1.9.2-light](./bakta/1.9.2-5.1-light/)
- [1.9.3](./bakta/1.9.3/)
- [1.9.3-light](./bakta/1.9.3-5.1-light/)
- [1.9.4](./bakta/1.9.4/)
| https://github.com/oschwengers/bakta |
| [bandage](https://hub.docker.com/r/staphb/bandage)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bandage)](https://hub.docker.com/r/staphb/bandage) | - [0.8.1](./bandage/0.8.1/)
| https://rrwick.github.io/Bandage/ |
| [BBTools](https://hub.docker.com/r/staphb/bbtools/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bbtools)](https://hub.docker.com/r/staphb/bbtools) | - [38.76](./bbtools/38.76/)
- [38.86](./bbtools/38.86/)
- [38.95](./bbtools/38.95/)
- [38.96](./bbtools/38.96/)
- [38.97](./bbtools/38.97/)
- [38.98](./bbtools/38.98/)
- [38.99](./bbtools/38.99/)
- [39.00](./bbtools/39.00/)
- [39.01](./bbtools/39.01/)
- [39.06](./bbtools/39.06/)
| https://jgi.doe.gov/data-and-tools/bbtools/ |
-| [bcftools](https://hub.docker.com/r/staphb/bcftools/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bcftools)](https://hub.docker.com/r/staphb/bcftools) | - [1.10.2](./bcftools/1.10.2/)
- [1.11](./bcftools/1.11/)
- [1.12](./bcftools/1.12/)
- [1.13](./bcftools/1.13/)
- [1.14](./bcftools/1.14/)
- [1.15](./bcftools/1.15/)
- [1.16](./bcftools/1.16/)
- [1.17](./bcftools/1.17/)
- [1.18](bcftools/1.18/)
- [1.19](./bcftools/1.19/)
- [1.20](./bcftools/1.20/)
| https://github.com/samtools/bcftools |
+| [bcftools](https://hub.docker.com/r/staphb/bcftools/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bcftools)](https://hub.docker.com/r/staphb/bcftools) | - [1.10.2](./bcftools/1.10.2/)
- [1.11](./bcftools/1.11/)
- [1.12](./bcftools/1.12/)
- [1.13](./bcftools/1.13/)
- [1.14](./bcftools/1.14/)
- [1.15](./bcftools/1.15/)
- [1.16](./bcftools/1.16/)
- [1.17](./bcftools/1.17/)
- [1.18](bcftools/1.18/)
- [1.19](./bcftools/1.19/)
- [1.20](./bcftools/1.20/)
- [1.20.c](./bcftools/1.20.c/)
| https://github.com/samtools/bcftools |
| [bedtools](https://hub.docker.com/r/staphb/bedtools/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bedtools)](https://hub.docker.com/r/staphb/bedtools) | - 2.29.2
- 2.30.0
- [2.31.0](bedtools/2.31.0/)
- [2.31.1](bedtools/2.31.1/)
| https://bedtools.readthedocs.io/en/latest/
https://github.com/arq5x/bedtools2 |
| [berrywood-report-env](https://hub.docker.com/r/staphb/berrywood-report-env/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/berrywood-report-env)](https://hub.docker.com/r/staphb/berrywood-report-env) | | none |
-| [blast+](https://hub.docker.com/r/staphb/blast/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/blast)](https://hub.docker.com/r/staphb/blast) | - [2.13.0](blast/2.13.0/)
- [2.14.0](blast/2.14.0/)
- [2.14.1](blast/2.14.1/)
- [2.15.0](blast/2.15.0/)
| https://www.ncbi.nlm.nih.gov/books/NBK279690/ |
-| [bowtie2](https://hub.docker.com/r/staphb/bowtie2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bowtie2)](https://hub.docker.com/r/staphb/bowtie2) | - [2.4.4](./bowtie2/2.4.4/)
- [2.4.5](./bowtie2/2.4.5/)
- [2.5.1](./bowtie2/2.5.1/)
- [2.5.3](./bowtie2/2.5.3/)
| http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml
https://github.com/BenLangmead/bowtie2 |
+| [blast+](https://hub.docker.com/r/staphb/blast/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/blast)](https://hub.docker.com/r/staphb/blast) | - [2.13.0](blast/2.13.0/)
- [2.14.0](blast/2.14.0/)
- [2.14.1](blast/2.14.1/)
- [2.15.0](blast/2.15.0/)
- [2.16.0](./blast/2.16.0/)
| https://www.ncbi.nlm.nih.gov/books/NBK279690/ |
+| [bowtie2](https://hub.docker.com/r/staphb/bowtie2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bowtie2)](https://hub.docker.com/r/staphb/bowtie2) | - [2.4.4](./bowtie2/2.4.4/)
- [2.4.5](./bowtie2/2.4.5/)
- [2.5.1](./bowtie2/2.5.1/)
- [2.5.3](./bowtie2/2.5.3/)
- [2.5.4](./bowtie2/2.5.4/)
| http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml
https://github.com/BenLangmead/bowtie2 |
| [Bracken](https://hub.docker.com/r/staphb/bracken/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bracken)](https://hub.docker.com/r/staphb/bracken) | | https://ccb.jhu.edu/software/bracken/index.shtml?t=manual
https://github.com/jenniferlu717/Bracken |
| [BUSCO](https://hub.docker.com/r/staphb/busco/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/busco)](https://hub.docker.com/r/staphb/busco) | - [5.4.7](./busco/5.4.7/)
- [5.6.1](./busco/5.6.1/)
- [5.6.1-prok-bacteria_odb10_2024-01-08](./busco/5.6.1-prok-bacteria_odb10_2024-01-08/)
- [5.7.1](./busco/5.7.1/)
- [5.7.1-prok-bacteria_odb10_2024-01-08](./busco/5.7.1-prok-bacteria_odb10_2024-01-08/)
| https://busco.ezlab.org/busco_userguide.html
https://gitlab.com/ezlab/busco |
| [BWA](https://hub.docker.com/r/staphb/bwa)
[![docker pulls](https://badgen.net/docker/pulls/staphb/bwa)](https://hub.docker.com/r/staphb/bwa) | - 0.7.17
- [0.7.18](./bwa/0.7.18/)
| https://github.com/lh3/bwa |
@@ -133,21 +138,21 @@ To learn more about the docker pull rate limits and the open source software pro
| [centroid](https://hub.docker.com/r/staphb/centroid/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/centroid)](https://hub.docker.com/r/staphb/centroid) | | https://github.com/stjacqrm/centroid |
| [CDC-SPN](https://hub.docker.com/r/staphb/cdc-spn/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/cdc-spn)](https://hub.docker.com/r/staphb/cdc-spn) | | https://github.com/BenJamesMetcalf/Spn_Scripts_Reference |
| [cfsan-snp-pipeline](https://hub.docker.com/r/staphb/cfsan-snp-pipeline)
[![docker pulls](https://badgen.net/docker/pulls/staphb/cfsan-snp-pipeline)](https://hub.docker.com/r/staphb/cfsan-snp-pipeline) | | https://github.com/CFSAN-Biostatistics/snp-pipeline |
-| [CheckM](https://hub.docker.com/r/staphb/checkm)
[![docker pulls](https://badgen.net/docker/pulls/staphb/checkm)](https://hub.docker.com/r/staphb/checkm) | | https://github.com/Ecogenomics/CheckM |
+| [CheckM](https://hub.docker.com/r/staphb/checkm)
[![docker pulls](https://badgen.net/docker/pulls/staphb/checkm)](https://hub.docker.com/r/staphb/checkm) | - [1.2.2](./checkm/1.2.2/)
- [1.2.3](./checkm/1.2.3/)
| https://github.com/Ecogenomics/CheckM |
| [Circlator](https://hub.docker.com/r/staphb/circlator)
[![docker pulls](https://badgen.net/docker/pulls/staphb/circlator)](https://hub.docker.com/r/staphb/circlator) | | https://github.com/sanger-pathogens/circlator |
| [Circos](https://hub.docker.com/r/staphb/circos)
[![docker pulls](https://badgen.net/docker/pulls/staphb/circos)](https://hub.docker.com/r/staphb/circos) | - [0.69-9](./circos/0.69.9/)
| https://circos.ca/ |
| [CirculoCov](https://hub.docker.com/r/staphb/circulocov)
[![docker pulls](https://badgen.net/docker/pulls/staphb/circulocov)](https://hub.docker.com/r/staphb/circulocov) | - [0.1.20240104](./circulocov/0.1.20240104/)
| https://github.com/erinyoung/CirculoCov |
-| [Clair3](https://hub.docker.com/r/staphb/clair3)
[![docker pulls](https://badgen.net/docker/pulls/staphb/clair3)](https://hub.docker.com/r/staphb/clair3) | | https://github.com/HKU-BAL/Clair3 |
+| [Clair3](https://hub.docker.com/r/staphb/clair3)
[![docker pulls](https://badgen.net/docker/pulls/staphb/clair3)](https://hub.docker.com/r/staphb/clair3) | - [1.0.9](./clair3/1.0.9/)
- [1.0.10](./clair3/1.0.10/)
| https://github.com/HKU-BAL/Clair3 |
| [Clustalo](https://hub.docker.com/r/staphb/clustalo)
[![docker pulls](https://badgen.net/docker/pulls/staphb/clustalo)](https://hub.docker.com/r/staphb/clustalo) | | http://www.clustal.org/omega/ |
| [colorid](https://hub.docker.com/r/staphb/colorid)
[![docker pulls](https://badgen.net/docker/pulls/staphb/colorid)](https://hub.docker.com/r/staphb/colorid) | | https://github.com/hcdenbakker/colorid |
| [cutshaw-report-env](https://hub.docker.com/r/staphb/cutshaw-report-env)
[![docker pulls](https://badgen.net/docker/pulls/staphb/cutshaw-report-env)](https://hub.docker.com/r/staphb/cutshaw-report-env) | | https://github.com/VADGS/CutShaw |
| [datasets-sars-cov-2](https://github.com/CDCgov/datasets-sars-cov-2)
[![docker pulls](https://badgen.net/docker/pulls/staphb/datasets-sars-cov-2)](https://hub.docker.com/r/staphb/datasets-sars-cov-2) | | https://github.com/CDCgov/datasets-sars-cov-2 |
| [diamond](https://github.com/bbuchfink/diamond)
[![docker pulls](https://badgen.net/docker/pulls/staphb/diamond)](https://hub.docker.com/r/staphb/diamond) | | https://github.com/bbuchfink/diamond|
-| [dnaapler](https://hub.docker.com/r/staphb/dnaapler)
[![docker pulls](https://badgen.net/docker/pulls/staphb/dnaapler)](https://hub.docker.com/r/staphb/dnaapler) | - [0.4.0](dnaapler/0.4.0/)
- [0.5.0](dnaapler/0.5.0/)
- [0.5.1](dnaapler/0.5.1/)
- [0.7.0](dnaapler/0.7.0/)
| https://github.com/gbouras13/dnaapler |
+| [dnaapler](https://hub.docker.com/r/staphb/dnaapler)
[![docker pulls](https://badgen.net/docker/pulls/staphb/dnaapler)](https://hub.docker.com/r/staphb/dnaapler) | - [0.4.0](dnaapler/0.4.0/)
- [0.5.0](dnaapler/0.5.0/)
- [0.5.1](dnaapler/0.5.1/)
- [0.7.0](dnaapler/0.7.0/)
- [0.8.0](dnaapler/0.8.0/)
| https://github.com/gbouras13/dnaapler |
| [dragonflye](https://hub.docker.com/r/staphb/dragonflye)
[![docker pulls](https://badgen.net/docker/pulls/staphb/dragonflye)](https://hub.docker.com/r/staphb/dragonflye) | - [1.0.14](./dragonflye/1.0.14/)
- [1.1.1](./dragonflye/1.1.1/)
- [1.1.2](./dragonflye/1.1.2/)
- [1.2.0](./dragonflye/1.2.0/)
- [1.2.1](./dragonflye/1.2.1/)
| https://github.com/rpetit3/dragonflye |
| [Dr. PRG ](https://hub.docker.com/r/staphb/drprg)
[![docker pulls](https://badgen.net/docker/pulls/staphb/drprg)](https://hub.docker.com/r/staphb/drprg) | | https://mbh.sh/drprg/ |
-| [DSK](https://hub.docker.com/r/staphb/dsk)
[![docker pulls](https://badgen.net/docker/pulls/staphb/dsk)](https://hub.docker.com/r/staphb/dsk) | - [0.0.100](./dsk/0.0.100/)
- [2.3.3](./dsk/2.3.3/)
/ul> | https://gatb.inria.fr/software/dsk/ |
-| [el_gato](https://hub.docker.com/r/staphb/elgato)
[![docker pulls](https://badgen.net/docker/pulls/staphb/elgato)](https://hub.docker.com/r/staphb/elgato) | - [1.15.2](./elgato/1.15.2)
| https://github.com/appliedbinf/el_gato |
+| [DSK](https://hub.docker.com/r/staphb/dsk)
[![docker pulls](https://badgen.net/docker/pulls/staphb/dsk)](https://hub.docker.com/r/staphb/dsk) | - [0.0.100](./dsk/0.0.100/)
- [2.3.3](./dsk/2.3.3/)
| https://gatb.inria.fr/software/dsk/ |
+| [el_gato](https://hub.docker.com/r/staphb/elgato)
[![docker pulls](https://badgen.net/docker/pulls/staphb/elgato)](https://hub.docker.com/r/staphb/elgato) | - [1.15.2](./elgato/1.15.2)
- [1.18.2](./elgato/1.18.2)
- [1.19.0](./elgato/1.19.0)
| https://github.com/appliedbinf/el_gato |
| [emboss](https://hub.docker.com/r/staphb/emboss)
[![docker pulls](https://badgen.net/docker/pulls/staphb/emboss)](https://hub.docker.com/r/staphb/emboss) | | http://emboss.sourceforge.net |
| [emmtyper](https://hub.docker.com/r/staphb/emmtyper)
[![docker pulls](https://badgen.net/docker/pulls/staphb/emmtyper)](https://hub.docker.com/r/staphb/emmtyper) | | https://github.com/MDU-PHL/emmtyper |
| [emm-typing-tool](https://hub.docker.com/r/staphb/emm-typing-tool)
[![docker pulls](https://badgen.net/docker/pulls/staphb/emm-typing-tool)](https://hub.docker.com/r/staphb/emm-typing-tool) | | https://github.com/phe-bioinformatics/emm-typing-tool |
@@ -165,6 +170,7 @@ To learn more about the docker pull rate limits and the open source software pro
| [Freyja](https://hub.docker.com/r/staphb/freyja)
[![docker pulls](https://badgen.net/docker/pulls/staphb/freyja)](https://hub.docker.com/r/staphb/freyja) | - [1.2](./freyja/1.2/)
- [1.2.1](./freyja/1.2.1/)
- [1.3.1](./freyja/1.3.1/)
- [1.3.2](./freyja/1.3.2/)
- [1.3.4](./freyja/1.3.4/)
- [1.3.7](./freyja/1.3.7/)
- [1.3.8](./freyja/1.3.8/)
- [1.3.9](./freyja/1.3.9/)
- [1.3.10](./freyja/1.3.10/)
- [1.3.11](./freyja/1.3.11/)
- [1.3.12](./freyja/1.3.12/)
- [1.4.2](./freyja/1.4.2/)
- [1.4.3](freyja/1.4.3/)
- [1.4.4](freyja/1.4.4/)
- [1.4.5](freyja/1.4.5/)
- [1.4.7](freyja/1.4.7/)
- [1.4.8](freyja/1.4.8/)
- [1.4.9](freyja/1.4.9/)
- [1.5.0](freyja/1.5.0/)
- [1.5.1](freyja/1.5.1/)
| https://github.com/andersen-lab/Freyja |
| [GAMBIT](https://hub.docker.com/r/staphb/gambit)
[![docker pulls](https://badgen.net/docker/pulls/staphb/gambit)](https://hub.docker.com/r/staphb/gambit) | | https://github.com/jlumpe/gambit |
| [GAMMA](https://hub.docker.com/r/staphb/gamma)
[![docker pulls](https://badgen.net/docker/pulls/staphb/gamma)](https://hub.docker.com/r/staphb/gamma) | | https://github.com/rastanton/GAMMA/ |
+| [GenoFLU](https://hub.docker.com/r/staphb/genoflu)
[![docker pulls](https://badgen.net/docker/pulls/staphb/genoflu)]() | | https://github.com/USDA-VS/GenoFLU |
| [geNomad](https://hub.docker.com/r/staphb/genomad)
[![docker pulls](https://badgen.net/docker/pulls/staphb/genomad)](https://hub.docker.com/r/staphb/genomad) | - [1.7.4](./genomad/1.7.4/)
- [1.8.0](./genomad/1.8.0/)
| https://github.com/apcamargo/genomad |
| [GenoVi](https://hub.docker.com/r/staphb/genovi)
[![docker pulls](https://badgen.net/docker/pulls/staphb/genovi)](https://hub.docker.com/r/staphb/genovi) | - [0.2.16](./genovi/0.2.16/)
| https://github.com/robotoD/GenoVi |
| [gfastats](https://hub.docker.com/r/staphb/gfastats)
[![docker pulls](https://badgen.net/docker/pulls/staphb/gfastats)](https://hub.docker.com/r/staphb/gfastats) | - [1.3.6](./gfastats/1.3.6/)
- [1.3.7](./gfastats/1.3.7/)
| https://github.com/vgl-hub/gfastats |
@@ -172,13 +178,13 @@ To learn more about the docker pull rate limits and the open source software pro
| [heatcluster](https://hub.docker.com/r/staphb/heatcluster)
[![docker pulls](https://badgen.net/docker/pulls/staphb/heatcluster)](https://hub.docker.com/r/staphb/heatcluster) | - [1.0.2c](./heatcluster/1.0.2c/)
| https://github.com/DrB-S/heatcluster/tree/main |
| [hmmer](https://hub.docker.com/r/staphb/hmmer)
[![docker pulls](https://badgen.net/docker/pulls/staphb/hmmer)](https://hub.docker.com/r/staphb/hmmer) | - [3.3](hmmer/3.3/)
- [3.3.2](hmmer/3.3.2/)
- [3.4](./hmmer/3.4/)
| http://hmmer.org/ |
| [homopolish](https://hub.docker.com/r/staphb/homopolish)
[![docker pulls](https://badgen.net/docker/pulls/staphb/homopolish)](https://hub.docker.com/r/staphb/homopolish) | | https://github.com/ythuang0522/homopolish/ |
-| [htslib](https://hub.docker.com/r/staphb/htslib)
[![docker pulls](https://badgen.net/docker/pulls/staphb/htslib)](https://hub.docker.com/r/staphb/htslib) | - [1.14](./htslib/1.14)
- [1.15](./htslib/1.15)
- [1.16](./htslib/1.16)
- [1.17](./htslib/1.17)
- [1.18](./htslib/1.18/)
- [1.19](./htslib/1.19/)
- [1.20](./htslib/1.20/)
| https://www.htslib.org/ |
+| [htslib](https://hub.docker.com/r/staphb/htslib)
[![docker pulls](https://badgen.net/docker/pulls/staphb/htslib)](https://hub.docker.com/r/staphb/htslib) | - [1.14](./htslib/1.14)
- [1.15](./htslib/1.15)
- [1.16](./htslib/1.16)
- [1.17](./htslib/1.17)
- [1.18](./htslib/1.18/)
- [1.19](./htslib/1.19/)
- [1.20](./htslib/1.20/)
- [1.20.c](./htslib/1.20.c/)
| https://www.htslib.org/ |
| [iqtree](https://hub.docker.com/r/staphb/iqtree/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/iqtree)](https://hub.docker.com/r/staphb/iqtree) | | http://www.iqtree.org/ |
| [iqtree2](https://hub.docker.com/r/staphb/iqtree2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/iqtree2)](https://hub.docker.com/r/staphb/iqtree2) | - 2.1.2
- 2.2.2.2
- [2.2.2.6](iqtree2/2.2.2.6/)
- [2.2.2.7](iqtree2/2.2.2.7/)
- [2.3.1](iqtree2/2.3.1/)
- [2.3.4](iqtree2/2.3.4/)
| http://www.iqtree.org/ |
| [IPA](https://hub.docker.com/r/staphb/pbipa)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pbipa)](https://hub.docker.com/r/staphb/pbipa) | | https://github.com/PacificBiosciences/pbipa |
| [IRMA](https://hub.docker.com/r/staphb/irma/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/irma)](https://hub.docker.com/r/staphb/irma) | - 1.0.2
- 1.0.3
- 1.1.2
- 1.1.3
- [1.1.4](./irma/1.1.4/)
| https://wonder.cdc.gov/amd/flu/irma/|
| [isPcr](https://users.soe.ucsc.edu/~kent/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ispcr)](https://hub.docker.com/r/staphb/ispcr) | | https://users.soe.ucsc.edu/~kent/ |
-| [iVar](https://hub.docker.com/r/staphb/ivar/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ivar)](https://hub.docker.com/r/staphb/ivar) | - 1.1
- 1.1 (+SARS-CoV2 reference)
- 1.2.1
- 1.2.1 (+SC2 ref)
- 1.2.2 (+SC2 ref and artic bedfiles)
- 1.3
- 1.3.1
- 1.3.2
- 1.4.1
- 1.4.2
| https://github.com/andersen-lab/ivar |
+| [iVar](https://hub.docker.com/r/staphb/ivar/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ivar)](https://hub.docker.com/r/staphb/ivar) | - [1.1](./ivar/1.1/)
- [1.1 (+SARS-CoV2 reference)](./ivar/1.1-SC2/)
- [1.2.1](./ivar/1.2.1/)
- [1.2.1 (+SC2 ref)](./ivar/1.2.1-SC2/)
- [1.2.2 (+SC2 ref and artic bedfiles)](./ivar/1.2.2_artic20200528/)
- [1.3](./ivar/1.3/)
- [1.3.1](./ivar/1.3.1/)
- [1.3.2](./ivar/1.3.2/)
- [1.4.1](./ivar/1.4.1/)
- [1.4.2](./ivar/1.4.2/)
- [1.4.3](./ivar/1.4.3/)
| https://github.com/andersen-lab/ivar |
| [Jasmine](https://hub.docker.com/r/staphb/pbjasmine/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pbjasmine)](https://hub.docker.com/r/staphb/pbjasmine) | - [2.0.0](./pbjasmine/2.0.0/)
| https://github.com/PacificBiosciences/jasmine |
| [Kaptive](https://hub.docker.com/r/staphb/kaptive/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/kaptive)](https://hub.docker.com/r/staphb/kaptive) | - [2.0.0](./kaptive/2.0.0/)
- [2.0.3](./kaptive/2.0.3/)
- [2.0.5](./kaptive/2.0.5/)
- [2.0.8](./kaptive/2.0.8/)
| https://github.com/klebgenomics/Kaptive |
| [Kleborate](https://hub.docker.com/r/staphb/kleborate/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/kleborate)](https://hub.docker.com/r/staphb/kleborate) | - [2.0.4](./kleborate/2.0.4/)
- [2.1.0](./kleborate/2.1.0/)
- [2.2.0](./kleborate/2.3.2/)
- [2.3.2](./kleborate/2.3.2)
- [2.3.2-2023-05](kleborate/2.3.2-2023-05/)
- [2.4.1](kleborate/2.4.1/)
| https://github.com/katholt/Kleborate/
https://github.com/katholt/Kaptive/ |
@@ -199,27 +205,29 @@ To learn more about the docker pull rate limits and the open source software pro
| [mashtree](https://hub.docker.com/r/staphb/mashtree)
[![docker pulls](https://badgen.net/docker/pulls/staphb/mashtree)](https://hub.docker.com/r/staphb/mashtree) | - [0.52.0](./mashtree/0.52.0/)
- [0.57.0](./mashtree/0.57.0/)
- [1.0.4](./mashtree/1.0.4/)
- [1.2.0](./mashtree/1.2.0/)
- [1.4.6](./mashtree/1.4.6/)
| https://github.com/lskatz/mashtree |
| [MaSuRCA](https://hub.docker.com/r/staphb/masurca)
[![docker pulls](https://badgen.net/docker/pulls/staphb/masurca)](https://hub.docker.com/r/staphb/masurca) | | https://github.com/alekseyzimin/masurca |
| [medaka](https://hub.docker.com/r/staphb/medaka)
[![docker pulls](https://badgen.net/docker/pulls/staphb/medaka)](https://hub.docker.com/r/staphb/medaka) | | https://github.com/nanoporetech/medaka |
-| [metaphlan](https://hub.docker.com/r/staphb/metaphlan)
[![docker pulls](https://badgen.net/docker/pulls/staphb/metaphlan)](https://hub.docker.com/r/staphb/metaphlan) | - 3.0.3-no-db (no database)
- 3.0.3 (~3GB db)
- [4.1.0](./metaphlan/4.1.0/) (no database)
| https://github.com/biobakery/MetaPhlAn |
+| [metaphlan](https://hub.docker.com/r/staphb/metaphlan)
[![docker pulls](https://badgen.net/docker/pulls/staphb/metaphlan)](https://hub.docker.com/r/staphb/metaphlan) | - [3.0.3-no-db (no database)](./metaphlan/3.0.3-no-db/)
- [3.0.3 (~3GB db)](./metaphlan/3.0.3/)
- [4.1.0](./metaphlan/4.1.0/) (no database)
- [4.1.1](./metaphlan/4.1.1/) (no database)
| https://github.com/biobakery/MetaPhlAn |
| [MIDAS](https://hub.docker.com/r/staphb/midas)
[![docker pulls](https://badgen.net/docker/pulls/staphb/midas)](https://hub.docker.com/r/staphb/midas) | - 1.3.2 (no database)
| https://github.com/snayfach/MIDAS |
| [minimap2](https://hub.docker.com/r/staphb/minimap2)
[![docker pulls](https://badgen.net/docker/pulls/staphb/minimap2)](https://hub.docker.com/r/staphb/minimap2) | - 2.17
- 2.18
- 2.21
- 2.22
- 2.23
- 2.24
- 2.25
- [2.26](./minimap2/2.26)
- [2.27](./minimap2/2.27/)
li>[2.28](./minimap2/2.28/)
| https://github.com/lh3/minimap2 |
| [minipolish](https://hub.docker.com/r/staphb/minipolish)
[![docker pulls](https://badgen.net/docker/pulls/staphb/minipolish)](https://hub.docker.com/r/staphb/minipolish) | | https://github.com/rrwick/Minipolish |
| [mlst](https://hub.docker.com/r/staphb/mlst)
[![docker pulls](https://badgen.net/docker/pulls/staphb/mlst)](https://hub.docker.com/r/staphb/mlst) | - 2.16.2
- 2.17.6
- 2.19.0
- 2.22.0
- 2.22.1
- 2.23.0
- [2.23.0-2023-07 (databases updated July 2023)](mlst/2.23.0-2023-07/)
- [2.23.0-2023-08 (databases updated Aug 2023)](mlst/2.23.0-2023-08/)
- [2.23.0-2024-01 (databases updated Jan 2024)](mlst/2.23.0-2024-01/)
- [2.23.0-2024-03 (databases updated March 2024)](mlst/2.23.0-2024-03/)
| https://github.com/tseemann/mlst |
| [Mugsy](https://hub.docker.com/r/staphb/mugsy)
[![docker pulls](https://badgen.net/docker/pulls/staphb/mugsy)](https://hub.docker.com/r/staphb/mugsy) | | http://mugsy.sourceforge.net/ |
-| [MultiQC](https://hub.docker.com/r/staphb/multiqc)
[![docker pulls](https://badgen.net/docker/pulls/staphb/multiqc)](https://hub.docker.com/r/staphb/multiqc) | - [1.7](./multiqc/1.7/)
- [1.8](./multiqc/1.8/)
- [1.18](./multiqc/1.18/)
- [1.19](./multiqc/1.19/)
- [1.22.2](./multiqc/1.22.2/)
| https://github.com/MultiQC/MultiQC |
+| [MultiQC](https://hub.docker.com/r/staphb/multiqc)
[![docker pulls](https://badgen.net/docker/pulls/staphb/multiqc)](https://hub.docker.com/r/staphb/multiqc) | - [1.7](./multiqc/1.7/)
- [1.8](./multiqc/1.8/)
- [1.18](./multiqc/1.18/)
- [1.19](./multiqc/1.19/)
- [1.22.2](./multiqc/1.22.2/)
- [1.22.3](./multiqc/1.22.3/)
| https://github.com/MultiQC/MultiQC |
| [Mummer](https://hub.docker.com/r/staphb/mummer)
[![docker pulls](https://badgen.net/docker/pulls/staphb/mummer)](https://hub.docker.com/r/staphb/mummer) | - 4.0.0
- 4.0.0 + RGDv2
- 4.0.0 + RGDv2 + gnuplot
| https://github.com/mummer4/mummer |
| [Mykrobe + Genotyphi + sonneityping](https://hub.docker.com/r/staphb/mykrobe)
[![docker pulls](https://badgen.net/docker/pulls/staphb/mykrobe)](https://hub.docker.com/r/staphb/mykrobe) | - 0.11.0 (Mykrobe) & 1.9.1 (Genotyphi)
- 0.12.1 (Mykrobe) & 1.9.1 (Genotyphi) & v20210201 (sonneityping)
- 0.12.1 (Mykrobe) & 2.0 (Genotyphi) & v20210201 (sonneityping)
- [0.12.2 (Mykrobe) & 2.0 (Genotyphi) & v20210201 (sonneityping)](mykrobe/0.12.2/)
- [0.13.0](./mykrobe/0.13.0)
| https://github.com/Mykrobe-tools/mykrobe
https://github.com/typhoidgenomics/genotyphi
https://github.com/katholt/sonneityping |
| [NanoPlot](https://hub.docker.com/r/staphb/nanoplot)
[![docker pulls](https://badgen.net/docker/pulls/staphb/nanoplot)](https://hub.docker.com/r/staphb/nanoplot) | - [1.27.0](./nanoplot/1.27.0/)
- [1.29.0](./nanoplot/1.29.0/)
- [1.30.1](./nanoplot/1.30.1/)
- [1.32.0](./nanoplot/1.32.0/)
- [1.33.0](./nanoplot/1.33.0/)
- [1.40.0](./nanoplot/1.40.0/)
- [1.41.6](./nanoplot/1.41.6/)
- [1.42.0](./nanoplot/1.42.0/)
| https://github.com/wdecoster/NanoPlot |
| [ngmaster](https://hub.docker.com/r/staphb/ngmaster)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ngmaster)](https://hub.docker.com/r/staphb/ngmaster) | | https://github.com/MDU-PHL/ngmaster |
-| [NCBI Datasets](https://hub.docker.com/r/staphb/ncbi-datasets)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ncbi-datasets)](https://hub.docker.com/r/staphb/ncbi-datasets) | Click to see all datasets versions
**datasets versions** - [13.31.0](./ncbi-datasets/13.31.0/)
- [13.35.0](./ncbi-datasets/13.35.0/)
- [13.43.2](./ncbi-datasets/13.43.2/)
- [14.0.0](./ncbi-datasets/14.0.0/)
- [14.3.0](./ncbi-datasets/14.3.0/)
- [14.7.0](./ncbi-datasets/14.7.0/)
- [14.13.2](./ncbi-datasets/14.13.2/)
- [14.20.0](./ncbi-datasets/14.20.0/)
- [14.27.0](ncbi-datasets/14.27.0/)
- [15.1.0](ncbi-datasets/15.1.0/)
- [15.2.0](ncbi-datasets/15.2.0/)
- [15.11.0](ncbi-datasets/15.11.0/)
- [15.27.1](ncbi-datasets/15.27.1/)
- [15.31.0](ncbi-datasets/15.31.1/)
- [16.2.0](ncbi-datasets/16.2.0/)
- [16.8.1](./ncbi-datasets/16.8.1/)
- [16.10.3](./ncbi-datasets/16.10.3/)
- [16.15.0](./ncbi-datasets/16.15.0/)
| [https://github.com/ncbi/datasets](https://github.com/ncbi/datasets)
[https://www.ncbi.nlm.nih.gov/datasets/docs/v1/](https://www.ncbi.nlm.nih.gov/datasets/docs/v1/) |
-| [NCBI AMRFinderPlus](https://hub.docker.com/r/staphb/ncbi-amrfinderplus)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ncbi-amrfinderplus)](https://hub.docker.com/r/staphb/ncbi-amrfinderplus) | **AMRFinderPlus & database verion** Click to see AMRFinderplus v3.11.4 and older versions!
- 3.1.1b
- 3.8.4
- 3.8.28
- 3.9.3
- 3.9.8
- 3.10.1
- 3.10.5
- 3.10.16
- 3.10.20
- 3.10.24
- 3.10.30
- 3.10.36
- 3.10.42
- 3.11.2 & 2022-12-19.1
- [3.11.2 & 2023-02-23.1](ncbi-amrfinderplus/3.11.2-2023-02-23.1/)
- [3.11.4 & 2023-02-23.1](ncbi-amrfinderplus/3.11.4-2023-02-23.1/)
- [3.11.8 & 2023-02-23.1](ncbi-amrfinderplus/3.11.8-2023-02-23.1/)
- [3.11.11 & 2023-04-17.1](ncbi-amrfinderplus/3.11.11-2023-04-17.1)
- [3.11.14 & 2023-04-17.1](ncbi-amrfinderplus/3.11.14-2023-04-17.1/)
- [3.11.17 & 2023-07-13.2](ncbi-amrfinderplus/3.11.17-2023-07-13.2/)
- [3.11.18 & 2023-08-08.2](ncbi-amrfinderplus/3.11.18-2023-08-08.2/)
- [3.11.20 & 2023-09-26.1](ncbi-amrfinderplus/3.11.20-2023-09-26.1/)
- [3.11.26 & 2023-11-15.1](ncbi-amrfinderplus/3.11.26-2023-11-15.1/)
- [3.12.8 & 2024-01-31.1](ncbi-amrfinderplus/3.12.8-2024-01-31.1/)
- [3.12.8 & 2024-05-02.2](./ncbi-amrfinderplus/3.12.8-2024-05-02.2/)
| [https://github.com/ncbi/amr](https://github.com/ncbi/amr) |
-| [NCBI table2asn](https://hub.docker.com/r/staphb/ncbi-table2asn)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ncbi-table2asn)](https://hub.docker.com/r/staphb/ncbi-table2asn) | - [1.26.678](./ncbi-table2asn/1.26.678/)
- [1.28.943](./ncbi-table2asn/1.28.943/)
| [https://www.ncbi.nlm.nih.gov/genbank/table2asn/](https://www.ncbi.nlm.nih.gov/genbank/table2asn/)
[https://ftp.ncbi.nlm.nih.gov/asn1-converters/versions/2022-06-14/by_program/table2asn/](https://ftp.ncbi.nlm.nih.gov/asn1-converters/versions/2022-06-14/by_program/table2asn/) |
+| [NCBI Datasets](https://hub.docker.com/r/staphb/ncbi-datasets)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ncbi-datasets)](https://hub.docker.com/r/staphb/ncbi-datasets) | Click to see all datasets versions
**datasets versions** - [13.31.0](./ncbi-datasets/13.31.0/)
- [13.35.0](./ncbi-datasets/13.35.0/)
- [13.43.2](./ncbi-datasets/13.43.2/)
- [14.0.0](./ncbi-datasets/14.0.0/)
- [14.3.0](./ncbi-datasets/14.3.0/)
- [14.7.0](./ncbi-datasets/14.7.0/)
- [14.13.2](./ncbi-datasets/14.13.2/)
- [14.20.0](./ncbi-datasets/14.20.0/)
- [14.27.0](ncbi-datasets/14.27.0/)
- [15.1.0](ncbi-datasets/15.1.0/)
- [15.2.0](ncbi-datasets/15.2.0/)
- [15.11.0](ncbi-datasets/15.11.0/)
- [15.27.1](ncbi-datasets/15.27.1/)
- [15.31.0](ncbi-datasets/15.31.1/)
- [16.2.0](ncbi-datasets/16.2.0/)
- [16.8.1](./ncbi-datasets/16.8.1/)
- [16.10.3](./ncbi-datasets/16.10.3/)
- [16.15.0](./ncbi-datasets/16.15.0/)
- [16.22.1](./ncbi-datasets/16.22.1/)
| [https://github.com/ncbi/datasets](https://github.com/ncbi/datasets)
[https://www.ncbi.nlm.nih.gov/datasets/docs/v1/](https://www.ncbi.nlm.nih.gov/datasets/docs/v1/) |
+| [NCBI AMRFinderPlus](https://hub.docker.com/r/staphb/ncbi-amrfinderplus)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ncbi-amrfinderplus)](https://hub.docker.com/r/staphb/ncbi-amrfinderplus) | **AMRFinderPlus & database verion** Click to see AMRFinderplus v3.11.4 and older versions!
- 3.1.1b
- 3.8.4
- 3.8.28
- 3.9.3
- 3.9.8
- 3.10.1
- 3.10.5
- 3.10.16
- 3.10.20
- 3.10.24
- 3.10.30
- 3.10.36
- 3.10.42
- 3.11.2 & 2022-12-19.1
- [3.11.2 & 2023-02-23.1](ncbi-amrfinderplus/3.11.2-2023-02-23.1/)
- [3.11.4 & 2023-02-23.1](ncbi-amrfinderplus/3.11.4-2023-02-23.1/)
- [3.11.8 & 2023-02-23.1](ncbi-amrfinderplus/3.11.8-2023-02-23.1/)
- [3.11.11 & 2023-04-17.1](ncbi-amrfinderplus/3.11.11-2023-04-17.1)
- [3.11.14 & 2023-04-17.1](ncbi-amrfinderplus/3.11.14-2023-04-17.1/)
- [3.11.17 & 2023-07-13.2](ncbi-amrfinderplus/3.11.17-2023-07-13.2/)
- [3.11.18 & 2023-08-08.2](ncbi-amrfinderplus/3.11.18-2023-08-08.2/)
- [3.11.20 & 2023-09-26.1](ncbi-amrfinderplus/3.11.20-2023-09-26.1/)
- [3.11.26 & 2023-11-15.1](ncbi-amrfinderplus/3.11.26-2023-11-15.1/)
- [3.12.8 & 2024-01-31.1](ncbi-amrfinderplus/3.12.8-2024-01-31.1/)
- [3.12.8 & 2024-05-02.2](./ncbi-amrfinderplus/3.12.8-2024-05-02.2/)
- [3.12.8 & 2024-07-22.1](./ncbi-amrfinderplus/3.12.8-2024-07-22.1/)
| [https://github.com/ncbi/amr](https://github.com/ncbi/amr) |
+| [NCBI table2asn](https://hub.docker.com/r/staphb/ncbi-table2asn)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ncbi-table2asn)](https://hub.docker.com/r/staphb/ncbi-table2asn) | - [1.26.678](./ncbi-table2asn/1.26.678/)
- [1.28.943](./ncbi-table2asn/1.28.943/)
- [1.28.1021](./ncbi-table2asn/1021/)
| [https://www.ncbi.nlm.nih.gov/genbank/table2asn/](https://www.ncbi.nlm.nih.gov/genbank/table2asn/)
[https://ftp.ncbi.nlm.nih.gov/asn1-converters/versions/2022-06-14/by_program/table2asn/](https://ftp.ncbi.nlm.nih.gov/asn1-converters/versions/2022-06-14/by_program/table2asn/) |
| [ONTime](https://hub.docker.com/r/staphb/ontime)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ontime)](https://hub.docker.com/r/staphb/ontime) | - [0.2.3](ontime/0.2.3/)
- [0.3.1](ontime/0.3.1/)
| https://github.com/mbhall88/ontime |
| [OrthoFinder](https://hub.docker.com/r/staphb/orthofinder)
[![docker pulls](https://badgen.net/docker/pulls/staphb/orthofinder)](https://hub.docker.com/r/staphb/orthofinder) | | https://github.com/davidemms/OrthoFinder |
| [Panaroo](https://hub.docker.com/r/staphb/panaroo)
[![docker pulls](https://badgen.net/docker/pulls/staphb/panaroo)](https://hub.docker.com/r/staphb/panaroo) | - [1.2.10](panaroo/1.2.10/)
- [1.3.4](panaroo/1.3.4/)
- [1.5.0](./panaroo/1.5.0/)
| (https://hub.docker.com/r/staphb/panaroo) |
-| [Pangolin](https://hub.docker.com/r/staphb/pangolin)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pangolin)](https://hub.docker.com/r/staphb/pangolin) | Click to see Pangolin v4.2 and older versions!
**Pangolin version & pangoLEARN data release date** - 1.1.14
- 2.0.4 & 2020-07-20
- 2.0.5 & 2020-07-20
- 2.1.1 & 2020-12-17
- 2.1.3 & 2020-12-17
- 2.1.6 & 2021-01-06
- 2.1.7 & 2021-01-11
- 2.1.7 & 2021-01-20
- 2.1.8 & 2021-01-22
- 2.1.10 & 2021-02-01
- 2.1.11 & 2021-02-01
- 2.1.11 & 2021-02-05
- 2.2.1 & 2021-02-06
- 2.2.2 & 2021-02-06
- 2.2.2 & 2021-02-11
- 2.2.2 & 2021-02-12
- 2.3.0 & 2021-02-12
- 2.3.0 & 2021-02-18
- 2.3.0 & 2021-02-21
- 2.3.2 & 2021-02-21
- 2.3.3 & 2021-03-16
- 2.3.4 & 2021-03-16
- 2.3.5 & 2021-03-16
- 2.3.6 & 2021-03-16
- 2.3.6 & 2021-03-29
- 2.3.8 & 2021-04-01
- 2.3.8 & 2021-04-14
- 2.3.8 & 2021-04-21
- 2.3.8 & 2021-04-23
- 2.4 & 2021-04-28
- 2.4.1 & 2021-04-28
- 2.4.2 & 2021-04-28
- 2.4.2 & 2021-05-10
- 2.4.2 & 2021-05-11
- 2.4.2 & 2021-05-19
- 3.0.5 & 2021-06-05
- 3.1.3 & 2021-06-15
- 3.1.5 & 2021-06-15
- 3.1.5 & 2021-07-07-2
- 3.1.7 & 2021-07-09
- 3.1.8 & 2021-07-28
- 3.1.10 & 2021-07-28
- 3.1.11 & 2021-08-09
- 3.1.11 & 2021-08-24
- 3.1.11 & 2021-09-17
- 3.1.14 & 2021-09-28
- 3.1.14 & 2021-10-13
- 3.1.16 & 2021-10-18
- 3.1.16 & 2021-11-04
- 3.1.16 & 2021-11-09
- 3.1.16 & 2021-11-18
- 3.1.16 & 2021-11-25
- 3.1.17 & 2021-11-25
- 3.1.17 & 2021-12-06
- 3.1.17 & 2022-01-05
- 3.1.18 & 2022-01-20
- 3.1.19 & 2022-01-20
- 3.1.20 & 2022-02-02
- 3.1.20 & 2022-02-28
**Pangolin version & pangolin-data version** - 4.0 & 1.2.133
- 4.0.1 & 1.2.133
- 4.0.2 & 1.2.133
- 4.0.3 & 1.2.133
- 4.0.4 & 1.2.133
- 4.0.5 & 1.3
- 4.0.6 & 1.6
- 4.0.6 & 1.8
- 4.0.6 & 1.9
- 4.1.1 & 1.11
- 4.1.2 & 1.12
- 4.1.2 & 1.13
- 4.1.2 & 1.14
- 4.1.3 & 1.15.1
- 4.1.3 & 1.16
- 4.1.3 & 1.17
- 4.2 & 1.18
- 4.2 & 1.18.1
- 4.2 & 1.18.1.1
- 4.2 & 1.19
**Pangolin version & pangolin-data version** - [4.3 & 1.20](pangolin/4.3-pdata-1.20/)
- [4.3 & 1.21](pangolin/4.3-pdata-1.21/)
- [4.3.1 & 1.22](pangolin/4.3.1-pdata-1.22/)
- [4.3.1 & 1.23](pangolin/4.3.1-pdata-1.23/)
- [4.3.1 & 1.23.1](pangolin/4.3.1-pdata-1.23.1/)
- [4.3.1 & 1.23.1 with XDG_CACHE_HOME=/tmp](pangolin/4.3.1-pdata-1.23.1-1/)
- [4.3.1 & 1.24](pangolin/4.3.1-pdata-1.24/)
- [4.3.1 & 1.25.1](pangolin/4.3.1-pdata-1.25.1/)
- [4.3.1 & 1.26](pangolin/4.3.1-pdata-1.26/)
- [4.3.1 & 1.27](pangolin/4.3.1-pdata-1.27/)
- [4.3.1 & 1.28](pangolin/4.3.1-pdata-1.28/)
| https://github.com/cov-lineages/pangolin
https://github.com/cov-lineages/pangoLEARN
https://github.com/cov-lineages/pango-designation
https://github.com/cov-lineages/scorpio
https://github.com/cov-lineages/constellations
https://github.com/cov-lineages/lineages (archived)
https://github.com/hCoV-2019/pangolin (archived) |
+| [pango_aliasor](https://hub.docker.com/r/staphb/pango_aliasor)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pango_aliasor)](https://hub.docker.com/r/staphb/pango_aliasor) | - [0.3.0](./pango_aliasor/0.3.0/)
| https://github.com/corneliusroemer/pango_aliasor |
+| [Pangolin](https://hub.docker.com/r/staphb/pangolin)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pangolin)](https://hub.docker.com/r/staphb/pangolin) | Click to see Pangolin v4.2 and older versions!
**Pangolin version & pangoLEARN data release date** - 1.1.14
- 2.0.4 & 2020-07-20
- 2.0.5 & 2020-07-20
- 2.1.1 & 2020-12-17
- 2.1.3 & 2020-12-17
- 2.1.6 & 2021-01-06
- 2.1.7 & 2021-01-11
- 2.1.7 & 2021-01-20
- 2.1.8 & 2021-01-22
- 2.1.10 & 2021-02-01
- 2.1.11 & 2021-02-01
- 2.1.11 & 2021-02-05
- 2.2.1 & 2021-02-06
- 2.2.2 & 2021-02-06
- 2.2.2 & 2021-02-11
- 2.2.2 & 2021-02-12
- 2.3.0 & 2021-02-12
- 2.3.0 & 2021-02-18
- 2.3.0 & 2021-02-21
- 2.3.2 & 2021-02-21
- 2.3.3 & 2021-03-16
- 2.3.4 & 2021-03-16
- 2.3.5 & 2021-03-16
- 2.3.6 & 2021-03-16
- 2.3.6 & 2021-03-29
- 2.3.8 & 2021-04-01
- 2.3.8 & 2021-04-14
- 2.3.8 & 2021-04-21
- 2.3.8 & 2021-04-23
- 2.4 & 2021-04-28
- 2.4.1 & 2021-04-28
- 2.4.2 & 2021-04-28
- 2.4.2 & 2021-05-10
- 2.4.2 & 2021-05-11
- 2.4.2 & 2021-05-19
- 3.0.5 & 2021-06-05
- 3.1.3 & 2021-06-15
- 3.1.5 & 2021-06-15
- 3.1.5 & 2021-07-07-2
- 3.1.7 & 2021-07-09
- 3.1.8 & 2021-07-28
- 3.1.10 & 2021-07-28
- 3.1.11 & 2021-08-09
- 3.1.11 & 2021-08-24
- 3.1.11 & 2021-09-17
- 3.1.14 & 2021-09-28
- 3.1.14 & 2021-10-13
- 3.1.16 & 2021-10-18
- 3.1.16 & 2021-11-04
- 3.1.16 & 2021-11-09
- 3.1.16 & 2021-11-18
- 3.1.16 & 2021-11-25
- 3.1.17 & 2021-11-25
- 3.1.17 & 2021-12-06
- 3.1.17 & 2022-01-05
- 3.1.18 & 2022-01-20
- 3.1.19 & 2022-01-20
- 3.1.20 & 2022-02-02
- 3.1.20 & 2022-02-28
**Pangolin version & pangolin-data version** - 4.0 & 1.2.133
- 4.0.1 & 1.2.133
- 4.0.2 & 1.2.133
- 4.0.3 & 1.2.133
- 4.0.4 & 1.2.133
- 4.0.5 & 1.3
- 4.0.6 & 1.6
- 4.0.6 & 1.8
- 4.0.6 & 1.9
- 4.1.1 & 1.11
- 4.1.2 & 1.12
- 4.1.2 & 1.13
- 4.1.2 & 1.14
- 4.1.3 & 1.15.1
- 4.1.3 & 1.16
- 4.1.3 & 1.17
- 4.2 & 1.18
- 4.2 & 1.18.1
- 4.2 & 1.18.1.1
- 4.2 & 1.19
**Pangolin version & pangolin-data version** - [4.3 & 1.20](pangolin/4.3-pdata-1.20/)
- [4.3 & 1.21](pangolin/4.3-pdata-1.21/)
- [4.3.1 & 1.22](pangolin/4.3.1-pdata-1.22/)
- [4.3.1 & 1.23](pangolin/4.3.1-pdata-1.23/)
- [4.3.1 & 1.23.1](pangolin/4.3.1-pdata-1.23.1/)
- [4.3.1 & 1.23.1 with XDG_CACHE_HOME=/tmp](pangolin/4.3.1-pdata-1.23.1-1/)
- [4.3.1 & 1.24](pangolin/4.3.1-pdata-1.24/)
- [4.3.1 & 1.25.1](pangolin/4.3.1-pdata-1.25.1/)
- [4.3.1 & 1.26](pangolin/4.3.1-pdata-1.26/)
- [4.3.1 & 1.27](pangolin/4.3.1-pdata-1.27/)
- [4.3.1 & 1.28](pangolin/4.3.1-pdata-1.28/)
- [4.3.1 & 1.28.1](pangolin/4.3.1-pdata-1.28.1/)
- [4.3.1 & 1.29](pangolin/4.3.1-pdata-1.29/)
- [4.3.1 & 1.30](pangolin/4.3.1-pdata-1.30/)
| https://github.com/cov-lineages/pangolin
https://github.com/cov-lineages/pangoLEARN
https://github.com/cov-lineages/pango-designation
https://github.com/cov-lineages/scorpio
https://github.com/cov-lineages/constellations
https://github.com/cov-lineages/lineages (archived)
https://github.com/hCoV-2019/pangolin (archived) |
+| [panqc](https://hub.docker.com/r/staphb/panqc)
[![docker pulls](https://badgen.net/docker/pulls/staphb/panqc)](https://hub.docker.com/r/staphb/panqc) | | https://github.com/maxgmarin/panqc/releases/tag/0.4.0 |
| [parallel-perl](https://hub.docker.com/r/staphb/parallel-perl)
[![docker pulls](https://badgen.net/docker/pulls/staphb/parallel-perl)](https://hub.docker.com/r/staphb/parallel-perl) | | https://www.gnu.org/software/parallel |
| [parsnp](https://hub.docker.com/r/staphb/parsnp)
[![docker pulls](https://badgen.net/docker/pulls/staphb/parsnp)](https://hub.docker.com/r/staphb/parsnp) | - [1.5.6](./parsnp/1.5.6/)
- [2.0.4](./parsnp/2.0.4/)
- [2.0.5](./parsnp/2.0.5/)
| https://github.com/marbl/parsnp |
-| [pasty](https://hub.docker.com/r/staphb/pasty)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pasty)](https://hub.docker.com/r/staphb/pasty) | - 1.0.2
- [1.0.3](pasty/1.0.3/)
| https://github.com/rpetit3/pasty |
+| [pasty](https://hub.docker.com/r/staphb/pasty)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pasty)](https://hub.docker.com/r/staphb/pasty) | - 1.0.2
- [1.0.3](pasty/1.0.3/)
- [2.2.1](./pasty/2.2.1/)
| https://github.com/rpetit3/pasty |
| [pbmm2](https://hub.docker.com/r/staphb/pbmm2)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pbmm2)](https://hub.docker.com/r/staphb/pbmm2) | - [1.13.1](./pbmm2/1.13.1/)
| https://github.com/PacificBiosciences/pbmm2 |
| [Pavian](https://hub.docker.com/r/staphb/pavian)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pavian)](https://hub.docker.com/r/staphb/pavian) | | https://github.com/fbreitwieser/pavian |
| [pbptyper](https://hub.docker.com/r/staphb/pbptyper)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pbptyper)](https://hub.docker.com/r/staphb/pbptyper) | | https://github.com/rpetit3/pbptyper |
@@ -238,8 +246,8 @@ To learn more about the docker pull rate limits and the open source software pro
| [Porechop](https://hub.docker.com/r/staphb/porechop/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/porechop)](https://hub.docker.com/r/staphb/porechop) | | https://github.com/rrwick/Porechop |
| [PPanGGOLiN](https://hub.docker.com/r/staphb/ppanggolin/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ppanggolin)](https://hub.docker.com/r/staphb/ppanggolin) | - [1.2.105](./ppanggolin/1.2.105/)
- [2.0.3](./ppanggolin/2.0.3/)
- [2.0.5](./ppanggolin/2.0.5/)
| https://github.com/labgem/PPanGGOLiN |
| [Prokka](https://hub.docker.com/r/staphb/prokka/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/prokka)](https://hub.docker.com/r/staphb/prokka) | | https://github.com/tseemann/prokka |
-| [pyCirclize](https://hub.docker.com/r/staphb/pycirclize/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pycirclize)](https://hub.docker.com/r/staphb/pycirclize) | - [1.0.0](pycirclize/1.0.0/)
- [1.2.0](./pycirclize/1.2.0/)
- [1.5.0](./pycirclize/1.5.0/)
| https://github.com/moshi4/pyCirclize |
-| [pyGenomeViz](https://hub.docker.com/r/staphb/pygenomeviz/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pygenomeviz)](https://hub.docker.com/r/staphb/pygenomeviz) | - 0.2.2
- 0.3.2
- [0.4.2](pygenomeviz/0.4.2/)
- [0.4.3](pygenomeviz/0.4.3/)
- [0.4.4](pygenomeviz/0.4.4/)
| https://github.com/moshi4/pyGenomeViz |
+| [pyCirclize](https://hub.docker.com/r/staphb/pycirclize/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pycirclize)](https://hub.docker.com/r/staphb/pycirclize) | - [1.0.0](pycirclize/1.0.0/)
- [1.2.0](./pycirclize/1.2.0/)
- [1.5.0](./pycirclize/1.5.0/)
- [1.6.0](./pycirclize/1.6.0/)
| https://github.com/moshi4/pyCirclize |
+| [pyGenomeViz](https://hub.docker.com/r/staphb/pygenomeviz/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pygenomeviz)](https://hub.docker.com/r/staphb/pygenomeviz) | - 0.2.2
- 0.3.2
- [0.4.2](pygenomeviz/0.4.2/)
- [0.4.3](pygenomeviz/0.4.3/)
- [0.4.4](pygenomeviz/0.4.4/)
- [1.1.0](pygenomeviz/1.1.0/)
| https://github.com/moshi4/pyGenomeViz |
| [pyMLST](https://hub.docker.com/r/staphb/pymlst/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pymlst)](https://hub.docker.com/r/staphb/pymlst) | - [2.1.5](./pymlst/2.1.5/)
- [2.1.6](./pymlst/2.1.6/)
| https://github.com/bvalot/pyMLST |
| [pypolca](https://hub.docker.com/r/staphb/pypolca/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/pypolca)](https://hub.docker.com/r/staphb/pypolca) | - [0.3.1](./pypolca/0.3.1/)
| https://github.com/gbouras13/pypolca |
| [QUAST](https://hub.docker.com/r/staphb/quast/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/quast)](https://hub.docker.com/r/staphb/quast) | - 5.0.0
- 5.0.2
- [5.2.0](./quast/5.2.0)
- [5.2.0-slim](./quast/5.2.0-slim)
| https://github.com/ablab/quast |
@@ -249,18 +257,20 @@ To learn more about the docker pull rate limits and the open source software pro
| [raven](https://hub.docker.com/r/staphb/raven/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/raven)](https://hub.docker.com/r/staphb/raven) | - 1.5.1
- 1.8.1
- [1.8.3](./raven/1.8.3)
| https://github.com/lbcb-sci/raven |
| [RAxML](https://hub.docker.com/r/staphb/raxml/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/raxml)](https://hub.docker.com/r/staphb/raxml) | - 8.2.12
- [8.2.13](./raxml/8.2.13/)
| https://github.com/stamatak/standard-RAxML |
| [RAxML-NG](https://hub.docker.com/r/staphb/raxml-ng/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/raxml-ng)](https://hub.docker.com/r/staphb/raxml-ng) | - [1.2.2](./raxml-ng/1.2.2/)
| https://github.com/amkozlov/raxml-ng |
+| [rdp](https://hub.docker.com/r/staphb/rdp)
[![docker pulls](https://badgen.net/docker/pulls/staphb/rdp)](https://hub.docker.com/r/staphb/rdp) | | https://sourceforge.net/projects/rdp-classifier/files/rdp-classifier/rdp_classifier_2.14.zip/download |
| [ResFinder](https://hub.docker.com/r/staphb/resfinder/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/resfinder)](https://hub.docker.com/r/staphb/resfinder) | - [4.1.1](./resfinder/4.1.11/)
- [4.5.0](./resfinder/4.5.0/)
| https://bitbucket.org/genomicepidemiology/resfinder/src/master/ |
| [Roary](https://hub.docker.com/r/staphb/roary/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/roary)](https://hub.docker.com/r/staphb/roary) | | https://github.com/sanger-pathogens/Roary |
| [SalmID](https://hub.docker.com/r/staphb/salmid)
[![docker pulls](https://badgen.net/docker/pulls/staphb/salmid)](https://hub.docker.com/r/staphb/salmid) | | https://github.com/hcdenbakker/SalmID |
| [samclip](https://hub.docker.com/r/staphb/samclip)
[![docker pulls](https://badgen.net/docker/pulls/staphb/samclip)](https://hub.docker.com/r/staphb/samclip) | - [0.4.0](./samclip/0.4.0/)
| https://github.com/tseemann/samclip |
-| [Samtools](https://hub.docker.com/r/staphb/samtools)
[![docker pulls](https://badgen.net/docker/pulls/staphb/samtools)](https://hub.docker.com/r/staphb/samtools) | - [1.9](./samtools/1.9/)
- [1.10](./samtools/1.10/)
- [1.11](./samtools/1.11/)
- [1.12](./samtools/1.12/)
- [1.13](./samtools/1.13/)
- [1.14](./samtools/1.14/)
- [1.15](./samtools/1.15/)
- [1.16](./samtools/1.16/)
- [1.16.1](./samtools/1.16.1/)
- [1.17](./samtools/1.17/)
- [1.17-2023-06](./samtools/1.17-2023-06/)
- [1.18](./samtools/1.18/)
- [1.19](./samtools/1.19/)
- [1.20](./samtools/1.20/)
| https://github.com/samtools/samtools |
-| [SeqKit](https://hub.docker.com/r/staphb/SeqKit)
[![docker pulls](https://badgen.net/docker/pulls/staphb/seqkit)](https://hub.docker.com/r/staphb/seqkit) | - [2.3.1](./seqkit/2.3.1/)
- [2.6.1](./seqkit/2.6.1/)
- [2.7.0](./seqkit/2.7.0/)
- [2.8.0](./seqkit/2.8.0/)
- [2.8.1](./seqkit/2.8.1/)
| https://github.com/shenwei356/seqkit |
+| [Samtools](https://hub.docker.com/r/staphb/samtools)
[![docker pulls](https://badgen.net/docker/pulls/staphb/samtools)](https://hub.docker.com/r/staphb/samtools) | - [1.9](./samtools/1.9/)
- [1.10](./samtools/1.10/)
- [1.11](./samtools/1.11/)
- [1.12](./samtools/1.12/)
- [1.13](./samtools/1.13/)
- [1.14](./samtools/1.14/)
- [1.15](./samtools/1.15/)
- [1.16](./samtools/1.16/)
- [1.16.1](./samtools/1.16.1/)
- [1.17](./samtools/1.17/)
- [1.17-2023-06](./samtools/1.17-2023-06/)
- [1.18](./samtools/1.18/)
- [1.19](./samtools/1.19/)
- [1.20](./samtools/1.20/)
- [1.20.c](./samtools/1.20.c/)
| https://github.com/samtools/samtools |
+| [SeqKit](https://hub.docker.com/r/staphb/SeqKit)
[![docker pulls](https://badgen.net/docker/pulls/staphb/seqkit)](https://hub.docker.com/r/staphb/seqkit) | - [2.3.1](./seqkit/2.3.1/)
- [2.6.1](./seqkit/2.6.1/)
- [2.7.0](./seqkit/2.7.0/)
- [2.8.0](./seqkit/2.8.0/)
- [2.8.1](./seqkit/2.8.1/)
- [2.8.2](./seqkit/2.8.2/)
| https://github.com/shenwei356/seqkit |
| [SeqSero](https://hub.docker.com/r/staphb/seqsero/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/seqsero)](https://hub.docker.com/r/staphb/seqsero) | | https://github.com/denglab/SeqSero |
| [SeqSero2](https://hub.docker.com/r/staphb/seqsero2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/seqsero2)](https://hub.docker.com/r/staphb/seqsero2) | - [0.1.0](./seqsero2/0.1.0/)
- [1.0.0](./seqsero2/1.0.0/)
- [1.0.2](./seqsero2/1.0.2/)
- [1.1.0](./seqsero2/1.1.0/)
- [1.1.1](./seqsero2/1.1.1/)
- [1.2.1](./seqsero2/1.2.1/)
- [1.3.1](./seqsero2/1.3.1/)
| https://github.com/denglab/SeqSero2/ |
| [seqtk](https://hub.docker.com/r/staphb/seqtk)
[![docker pulls](https://badgen.net/docker/pulls/staphb/seqtk)](https://hub.docker.com/r/staphb/seqtk) | - [1.3](seqtk/1.3/)
- [1.4](seqtk/1.4/)
| https://github.com/lh3/seqtk |
| [seqyclean](https://hub.docker.com/r/staphb/seqyclean)
[![docker pulls](https://badgen.net/docker/pulls/staphb/seqyclean)](https://hub.docker.com/r/staphb/seqyclean) | | https://github.com/ibest/seqyclean |
| [Seroba](https://hub.docker.com/r/staphb/seroba)
[![docker pulls](https://badgen.net/docker/pulls/staphb/seroba)](https://hub.docker.com/r/staphb/seroba) | | https://github.com/sanger-pathogens/seroba |
| [SerotypeFinder](https://hub.docker.com/r/staphb/serotypefinder/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/serotypefinder)](https://hub.docker.com/r/staphb/serotypefinder) | - [1.1 (perl version)](./serotypefinder/1.1/)
- [2.0.1 (python version)](./serotypefinder/2.0.1/)
- [2.0.2](./serotypefinder/2.0.2/)
| https://bitbucket.org/genomicepidemiology/serotypefinder/ |
+| [ShigaPass](https://hub.docker.com/r/staphb/shigapass/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/shigapass)](https://hub.docker.com/r/staphb/shigapass) | - [1.5.0](shigapass/1.5.0/)
| https://github.com/imanyass/ShigaPass |
| [shigatyper](https://hub.docker.com/r/staphb/shigatyper/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/shigatyper)](https://hub.docker.com/r/staphb/shigatyper) | - 2.0.1
- 2.0.2
- 2.0.3
- [2.0.4](shigatyper/2.0.4/)
- [2.0.5](shigatyper/2.0.5/)
| https://github.com/CFSAN-Biostatistics/shigatyper |
| [ShigEiFinder](https://hub.docker.com/r/staphb/shigeifinder/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/shigeifinder)](https://hub.docker.com/r/staphb/shigeifinder) | - [1.3.2](shigeifinder/1.3.2/)
- [1.3.3](shigeifinder/1.3.3/)
- [1.3.5](shigeifinder/1.3.5/)
| https://github.com/LanLab/ShigEiFinder |
| [Shovill](https://hub.docker.com/r/staphb/shovill/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/shovill)](https://hub.docker.com/r/staphb/shovill) | | https://github.com/tseemann/shovill |
@@ -268,7 +278,7 @@ To learn more about the docker pull rate limits and the open source software pro
| [SISTR](https://hub.docker.com/r/staphb/sistr/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/sistr)](https://hub.docker.com/r/staphb/sistr) | | https://github.com/phac-nml/sistr_cmd |
| [SKA](https://hub.docker.com/r/staphb/ska/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ska)](https://hub.docker.com/r/staphb/ska) | | https://github.com/simonrharris/SKA |
| [SKA2](https://hub.docker.com/r/staphb/ska2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/ska2)](https://hub.docker.com/r/staphb/ska2) | - [0.3.6](./ska2/0.3.6/)
- [0.3.7](./ska2/0.3.7/)
| https://github.com/bacpop/ska.rust |
-| [skani](https://github.com/bluenote-1577/skani)
[![docker pulls](https://badgen.net/docker/pulls/staphb/skani)](https://hub.docker.com/r/staphb/skani) | - [0.2.0](./skani/0.2.0)
- [0.2.1](./skani/0.2.1)
| https://github.com/bluenote-1577/skani |
+| [skani](https://github.com/bluenote-1577/skani)
[![docker pulls](https://badgen.net/docker/pulls/staphb/skani)](https://hub.docker.com/r/staphb/skani) | - [0.2.0](./skani/0.2.0)
- [0.2.1](./skani/0.2.1)
- [0.2.2](./skani/0.2.2)
| https://github.com/bluenote-1577/skani |
| [SKESA](https://hub.docker.com/r/staphb/skesa)
[![docker pulls](https://badgen.net/docker/pulls/staphb/skesa)](https://hub.docker.com/r/staphb/skesa) | - [2.3.0](./skesa/2.3.0/)
- [2.4.0 (`gfa_connector` & `kmercounter` included)](./skesa/2.4.0/)
- [skesa.2.4.0_saute.1.3.0_2 (also known as 2.5.1)](./skesa/skesa.2.4.0_saute.1.3.0_2/)
| https://github.com/ncbi/SKESA |
| [Smalt](https://hub.docker.com/r/staphb/smalt)
[![docker pulls](https://badgen.net/docker/pulls/staphb/smalt)](https://hub.docker.com/r/staphb/smalt) | | https://www.sanger.ac.uk/tool/smalt-0/ |
| [snpeff](https://hub.docker.com/r/staphb/snpeff)
[![docker pulls](https://badgen.net/docker/pulls/staphb/snpeff)](https://hub.docker.com/r/staphb/snpeff) | | https://pcingola.github.io/SnpEff |
@@ -280,6 +290,7 @@ To learn more about the docker pull rate limits and the open source software pro
| [SRA-toolkit](https://hub.docker.com/r/staphb/sratoolkit/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/sratoolkit)](https://hub.docker.com/r/staphb/sratoolkit) | - 2.9.2
- [3.0.7](./sratoolkit/3.0.7/)
| https://github.com/ncbi/sra-tools |
| [SRST2](https://hub.docker.com/r/staphb/srst2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/srst2)](https://hub.docker.com/r/staphb/srst2) | - 0.2.0
- [0.2.0 + custom Vibrio cholerae database](srst2/0.2.0-vibrio-230224/README.md)
| https://github.com/katholt/srst2 |
| [Staramr](https://hub.docker.com/r/staphb/staramr/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/staramr)](https://hub.docker.com/r/staphb/staramr) | - [0.5.1](./staramr/0.5.1/)
- [0.7.1](./staramr/0.7.1/)
- [0.8.0](./staramr/0.8.0/)
- [0.10.0](./staramr/0.10.0/)
| https://github.com/phac-nml/staramr |
+| [stxtyper](https://hub.docker.com/r/staphb/stxtyper)
[![docker pulls](https://badgen.net/docker/pulls/staphb/stxtyper)](https://hub.docker.com/r/staphb/stxtyper) | - [1.0.24](stxtyper/1.0.24)
| https://github.com/ncbi/stxtyper |
| [sylph](https://hub.docker.com/r/staphb/sylph)
[![docker pulls](https://badgen.net/docker/pulls/staphb/sylph)](https://hub.docker.com/r/staphb/sylph) | - [0.4.1](./sylph/0.4.1)
- [0.5.1](./sylph/0.5.1)
- [0.6.0](./sylph/0.6.0)
- [0.6.1](./sylph/0.6.1)
| https://github.com/bluenote-1577/sylph |
| [TBProfiler](https://hub.docker.com/r/staphb/tbprofiler/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/tbprofiler)](https://hub.docker.com/r/staphb/tbprofiler) | - [4.3.0](./tbprofiler/4.3.0/)
- [4.4.0](./tbprofiler/4.4.0/)
- [4.4.2](./tbprofiler/4.4.2/)
- [5.0.1](tbprofiler/5.0.1/)
- [6.2.0](tbprofiler/6.2.0/)
- [6.2.1](tbprofiler/6.2.1/)
| https://github.com/jodyphelan/TBProfiler |
| [TipToft](https://hub.docker.com/r/staphb/tiptoft/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/tiptoft)](https://hub.docker.com/r/staphb/tiptoft) | | https://github.com/andrewjpage/tiptoft |
@@ -289,9 +300,10 @@ To learn more about the docker pull rate limits and the open source software pro
| [Trycycler](https://hub.docker.com/r/staphb/trycycler/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/trycycler)](https://hub.docker.com/r/staphb/trycycler) | - [0.3.1](./trycycler/0.3.1/)
- [0.3.2](./trycycler/0.3.2/)
- [0.3.3](./trycycler/0.3.3/)
- [0.5.0](./trycycler/0.5.0/)
- [0.5.3](./trycycler/0.5.3/)
- [0.5.4](./trycycler/0.5.4/)
- [0.5.5](./trycycler/0.5.5/)
| https://github.com/rrwick/Trycycler |
| [Unicycler](https://hub.docker.com/r/staphb/unicycler/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/unicycler)](https://hub.docker.com/r/staphb/unicycler) | | https://github.com/rrwick/Unicycler |
| [VADR](https://hub.docker.com/r/staphb/vadr/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/vadr)](https://hub.docker.com/r/staphb/vadr) | - 1.1
- 1.1.2
- 1.1.3
- 1.2
- 1.2.1
- 1.3 & SARS-CoV-2 models 1.3-1
- 1.3 & SARS-CoV-2 models 1.3-2
- 1.4 & SARS-CoV-2 models 1.3-2
- 1.4.1 & SARS-CoV-2 models 1.3-2
- [1.4.2 & SARS-CoV-2 models 1.3-2, MPXV models 1.4.2-1](vadr/1.4.2/)
- [1.5 & SARS-CoV-2 models 1.3-2, MPXV models 1.4.2-1](vadr/1.5/)
- [1.5.1 & SARS-CoV-2 models 1.3-2, MPXV models 1.4.2-1, RSV models 1.5-2](vadr/1.5.1/)
- [1.6.3 & SARS-CoV-2 models 1.3-2, MPXV models 1.4.2-1, RSV models 1.5-2, Influenza v1.6.3-1](vadr/1.6.3/)
- [1.6.3 & SARS-CoV-2 models 1.3-2, MPXV models 1.4.2-1, RSV models 1.5-2, Influenza v1.6.3-1, HAV v1.0.0](vadr/1.6.3-hav/)
- [1.6.3 & SARS-CoV-2 models 1.3-2, MPXV models 1.4.2-1, RSV models 1.5-2, Influenza v1.6.3-2, HAV v1.0.0](vadr/1.6.3-hav-flu2/)
| https://github.com/nawrockie/vadr (archived, now redirects to ncbi/vadr) https://github.com/ncbi/vadr |
-| [Verkko](https://hub.docker.com/r/staphb/verkko/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/verkko)](https://hub.docker.com/r/staphb/verkko) | - [2.0](./verkko/2.0/)
- [2.1](./verkko/2.1/)
| https://github.com/marbl/verkko |
+| [Verkko](https://hub.docker.com/r/staphb/verkko/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/verkko)](https://hub.docker.com/r/staphb/verkko) | - [2.0](./verkko/2.0/)
- [2.1](./verkko/2.1/)
- [2.2](./verkko/2.2/)
| https://github.com/marbl/verkko |
| [VIBRANT](https://hub.docker.com/r/staphb/vibrant/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/vibrant)](https://hub.docker.com/r/staphb/vibrant) | | https://github.com/AnantharamanLab/VIBRANT |
-| [VIGOR4](https://hub.docker.com/r/staphb/vigor4/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/vigor4)](https://hub.docker.com/r/staphb/vigor4) | | https://github.com/JCVenterInstitute/VIGOR4 |
+| [VIGOR4](https://hub.docker.com/r/staphb/vigor4/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/vigor4)](https://hub.docker.com/r/staphb/vigor4) | - [4.1.20190131](./vigor4/4.1.20190131/)
- [4.1.20200702](./vigor4/4.1.20200702/)
| https://github.com/JCVenterInstitute/VIGOR4 |
+| [Viridian](https://hub.docker.com/r/staphb/viridian/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/viridian)](https://hub.docker.com/r/staphb/viridian) | - [1.2.2](./viridian/1.2.2/)
| https://github.com/iqbal-lab-org/viridian |
| [VirSorter2](https://hub.docker.com/r/staphb/virsorter2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/virsorter2)](https://hub.docker.com/r/staphb/virsorter2/) | | https://github.com/jiarong/VirSorter2 |
| [VirulenceFinder](https://hub.docker.com/r/staphb/virulencefinder/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/virulencefinder)](https://hub.docker.com/r/staphb/virulencefinder/) | - [2.1.4](virulencefinder/2.0.4/)
| https://bitbucket.org/genomicepidemiology/virulencefinder/src/master/
https://bitbucket.org/genomicepidemiology/virulencefinder_db/src/master/ |
| [wtdbg2](https://hub.docker.com/r/staphb/wtdbg2/)
[![docker pulls](https://badgen.net/docker/pulls/staphb/wtdbg2)](https://hub.docker.com/r/staphb/wtdbg2) | | https://github.com/ruanjue/wtdbg2 |
@@ -362,3 +374,5 @@ Each Dockerfile lists the author(s)/maintainer(s) as a metadata `LABEL`, but the
* [@evagunawan](https://github.com/evagunawan)
* [@nawrockie](https://github.com/nawrockie)
* [@stephenturner](https://github.com/stephenturner)
+ * [@soejun](https://github.com/soejun)
+ * [@taylorpaisie](https://github.com/taylorpaisie)
diff --git a/aniclustermap/1.4.0/Dockerfile b/aniclustermap/1.4.0/Dockerfile
new file mode 100644
index 000000000..f8b27c271
--- /dev/null
+++ b/aniclustermap/1.4.0/Dockerfile
@@ -0,0 +1,61 @@
+ARG ANICLUSTERMAP_VER="1.4.0"
+ARG FASTANI_VER="1.34"
+ARG SKANI_VER="0.2.2"
+
+FROM staphb/fastani:${FASTANI_VER} as fastani
+FROM staphb/skani:${SKANI_VER} as skani
+
+## App ##
+FROM ubuntu:jammy as app
+
+ARG ANICLUSTERMAP_VER
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="ANIclustermap"
+LABEL software.version="${ANICLUSTERMAP_VER}"
+LABEL description="ANIclustermap draws ANI(Average Nucleotide Identity) clustermap between all-vs-all microbial genomes."
+LABEL website="https://github.com/moshi4/ANIclustermap"
+LABEL license="https://github.com/moshi4/ANIclustermap/blob/main/LICENSE"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+# install aniclustermap and its dependencies
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ python3-pip \
+ libgomp1 &&\
+ apt-get autoclean && rm -rf /var/lib/apt/lists/* &&\
+ pip install -vv --no-cache-dir aniclustermap==${ANICLUSTERMAP_VER}
+
+# import fastani & skani
+COPY --from=fastani /usr/local/bin/fastANI /usr/local/bin/
+COPY --from=skani /usr/local/bin/skani /usr/local/bin/
+
+# default run command
+CMD ANIclustermap -h
+
+# singularity compatibility
+ENV LC_ALL=C
+
+WORKDIR /data
+
+## Test ##
+FROM app as test
+
+ARG ANICLUSTERMAP_VER
+
+RUN apt-get update && apt-get install -y wget
+
+# download test data from repo
+RUN wget https://github.com/moshi4/ANIclustermap/archive/refs/tags/v${ANICLUSTERMAP_VER}.tar.gz &&\
+ tar -xvf v${ANICLUSTERMAP_VER}.tar.gz
+# run default fastani mode
+RUN ANIclustermap -i ANIclustermap-${ANICLUSTERMAP_VER}/example/input/minimal_dataset/ -o fastani
+# run skani mode
+RUN ANIclustermap -m skani -i ANIclustermap-${ANICLUSTERMAP_VER}/example/input/minimal_dataset/ -o skani
+# check results
+RUN ls fastani &&\
+ cat fastani/ANIclustermap_matrix.tsv &&\
+ ls skani &&\
+ cat skani/ANIclustermap_matrix.tsv
+
diff --git a/aniclustermap/1.4.0/README.md b/aniclustermap/1.4.0/README.md
new file mode 100644
index 000000000..7e33c2c97
--- /dev/null
+++ b/aniclustermap/1.4.0/README.md
@@ -0,0 +1,24 @@
+# ANIclustermap container
+
+Main tool: [ANIclustermap](https://github.com/moshi4/ANIclustermap)
+
+Code repository: https://github.com/moshi4/ANIclustermap
+
+Additional tools:
+- fastANI: 1.34
+- skani: 0.2.2
+
+Basic information on how to use this tool:
+- executable: ANIclustermap
+- help: -h, --help
+- version: -v, --version
+- description: ANIclustermap is easy-to-use tool for drawing ANI(Average Nucleotide Identity) clustermap between all-vs-all microbial genomes.
+
+
+Full documentation: https://github.com/moshi4/ANIclustermap
+
+## Example Usage
+
+```bash
+ANIclustermap -i input_directory -o output_directory
+```
diff --git a/augur/24.4.0/Dockerfile b/augur/24.4.0/Dockerfile
new file mode 100644
index 000000000..639ff75e2
--- /dev/null
+++ b/augur/24.4.0/Dockerfile
@@ -0,0 +1,63 @@
+FROM python:3.11-slim as app
+
+ARG AUGUR_VER="24.4.0"
+
+# LABEL instructions tag the image with metadata that might be important to the user
+# Optional, but highly recommended
+LABEL base.image="python:3.11-slim"
+LABEL dockerfile.version="1"
+LABEL software="augur"
+LABEL software.version=${AUGUR_VER}
+LABEL description="Augur is the bioinformatics toolkit we use to track evolution from sequence and serological data.The output of augur is a series of JSONs that can be used to visualize your results using Auspice."
+LABEL website="https://github.com/nextstrain/augur"
+LABEL license="https://github.com/nextstrain/augur/blob/master/LICENSE.txt"
+LABEL maintainer="John Arnn"
+LABEL maintainer.email="jarnn@utah.gov"
+
+# 'RUN' executes code during the build
+# Install dependencies via apt-get or yum if using a centos or fedora base
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ procps \
+ ca-certificates \
+ wget \
+ mafft \
+ iqtree \
+ raxml \
+ fasttree \
+ vcftools && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN wget -q https://github.com/nextstrain/augur/archive/refs/tags/${AUGUR_VER}.tar.gz && \
+ tar -xzvf ${AUGUR_VER}.tar.gz && \
+ rm ${AUGUR_VER}.tar.gz && \
+ cd augur-${AUGUR_VER} && \
+ python3 -m pip install '.[full]'
+
+CMD augur --help
+
+WORKDIR /data
+
+FROM app as test
+
+RUN augur --help
+
+WORKDIR /test
+
+RUN apt-get update && apt-get install -y --no-install-recommends git
+
+RUN git clone https://github.com/nextstrain/zika-tutorial && \
+ cd zika-tutorial && \
+ mkdir results && \
+ augur index --sequences data/sequences.fasta --output results/sequence_index.tsv && \
+ augur filter --sequences data/sequences.fasta \
+ --sequence-index results/sequence_index.tsv \
+ --metadata data/metadata.tsv \
+ --exclude config/dropped_strains.txt \
+ --output results/filtered.fasta \
+ --sequences-per-group 20 \
+ --min-date 2012 && \
+ augur align \
+ --sequences results/filtered.fasta \
+ --reference-sequence config/zika_outgroup.gb \
+ --output results/aligned.fasta \
+ --fill-gaps
diff --git a/augur/24.4.0/README.md b/augur/24.4.0/README.md
new file mode 100644
index 000000000..f8c0ee80c
--- /dev/null
+++ b/augur/24.4.0/README.md
@@ -0,0 +1,30 @@
+# Augur Container
+Main tool: [Augur](https://github.com/nextstrain/augur)
+
+Definition: One held to foretell events by omens.
+
+Augur is the bioinformatics toolkit we use to track evolution from sequence and serological data. It provides a collection of commands which are designed to be composable into larger processing pipelines.
+
+The output of augur is a series of JSONs that can be used to visualize your results using Auspice.
+
+Note: Auspice is a different tool.
+
+# Example Usage
+
+```
+augur index --sequences sequences.fasta --output sequence_index.tsv
+```
+
+```
+augur filter \
+ --sequences data/sequences.fasta \
+ --sequence-index results/sequence_index.tsv \
+ --metadata data/metadata.tsv \
+ --exclude config/dropped_strains.txt \
+ --output results/filtered.fasta \
+ --group-by country year month \
+ --sequences-per-group 20 \
+ --min-date 2012
+```
+
+Better documentation can be found [here.](https://docs.nextstrain.org/en/latest/tutorials/creating-a-workflow.html)
diff --git a/bakta/1.9.4/Dockerfile b/bakta/1.9.4/Dockerfile
new file mode 100644
index 000000000..982da23a0
--- /dev/null
+++ b/bakta/1.9.4/Dockerfile
@@ -0,0 +1,50 @@
+FROM mambaorg/micromamba:1.5.8 as app
+
+ARG BAKTA_VER="1.9.4"
+ARG DIAMOND_VER="2.1.8"
+
+# 'LABEL' instructions tag the image with metadata that might be important to the user
+LABEL base.image="mambaorg/micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="Bakta"
+LABEL software.version="${BAKTA_VER}"
+LABEL description="rapid & standardized annotation of bacterial genomes, MAGs & plasmids"
+LABEL website="https://github.com/oschwengers/bakta"
+LABEL license="https://github.com/oschwengers/bakta/blob/main/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+USER root
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ procps \
+ ca-certificates && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN micromamba install --name base -c conda-forge -c bioconda -c defaults bakta=${BAKTA_VER} diamond=${DIAMOND_VER} && \
+ micromamba clean -a -f -y && \
+ mkdir /data
+
+ENV PATH="/opt/conda/bin/:$PATH" \
+ LC_ALL=C
+
+CMD bakta --help
+
+WORKDIR /data
+
+FROM app as test
+
+WORKDIR /test
+
+RUN bakta --help && \
+ bakta_db --help && \
+ bakta --version
+
+RUN bakta_db list && \
+ bakta_db download --type light
+
+RUN wget -q https://ftp.ncbi.nlm.nih.gov/genomes/all/GCF/000/195/815/GCF_000195815.1_ASM19581v1/GCF_000195815.1_ASM19581v1_genomic.fna.gz && \
+ gunzip GCF_000195815.1_ASM19581v1_genomic.fna.gz && \
+ bakta --db /test/db-light GCF_000195815.1_ASM19581v1_genomic.fna && \
+ head GCF_000195815.1_ASM19581v1_genomic.gbff
diff --git a/bakta/1.9.4/README.md b/bakta/1.9.4/README.md
new file mode 100644
index 000000000..255820db5
--- /dev/null
+++ b/bakta/1.9.4/README.md
@@ -0,0 +1,25 @@
+# bakta container
+
+Main tool: [bakta](https://github.com/oschwengers/bakta)
+
+Code repository: https://github.com/oschwengers/bakta
+
+Basic information on how to use this tool:
+- executable: bakta
+- help: -h
+- version: -v
+- description: Annotates bacterial genomes
+
+> Bakta is a tool for the rapid & standardized annotation of bacterial genomes and plasmids from both isolates and MAGs. It provides dbxref-rich, sORF-including and taxon-independent annotations in machine-readable JSON & bioinformatics standard file formats for automated downstream analysis.
+
+Additional information:
+
+WARNING : Image does not contain a database.
+
+Full documentation: https://github.com/oschwengers/bakta/
+
+## Example Usage
+
+```bash
+bakta sample.fasta --db --threads 20 --prefix sample
+```
diff --git a/bcftools/1.20.c/Dockerfile b/bcftools/1.20.c/Dockerfile
new file mode 100644
index 000000000..7bfbdc5fb
--- /dev/null
+++ b/bcftools/1.20.c/Dockerfile
@@ -0,0 +1,117 @@
+# for easy upgrade later. ARG variables only persist during build time
+ARG BCFTOOLS_VER="1.20"
+
+FROM ubuntu:jammy as builder
+
+# re-instantiate variable
+ARG BCFTOOLS_VER
+
+# install dependencies, cleanup apt garbage
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ wget \
+ ca-certificates \
+ perl \
+ bzip2 \
+ autoconf \
+ automake \
+ make \
+ gcc \
+ zlib1g-dev \
+ libbz2-dev \
+ liblzma-dev \
+ libcurl4-gnutls-dev \
+ libssl-dev \
+ libperl-dev \
+ libgsl0-dev \
+ libdeflate-dev \
+ procps && \
+ rm -rf /var/lib/apt/lists/* && apt-get autoclean
+
+
+# download, compile, and install bcftools
+RUN wget https://github.com/samtools/bcftools/releases/download/${BCFTOOLS_VER}/bcftools-${BCFTOOLS_VER}.tar.bz2 && \
+ tar -xjf bcftools-${BCFTOOLS_VER}.tar.bz2 && \
+ rm -v bcftools-${BCFTOOLS_VER}.tar.bz2 && \
+ cd bcftools-${BCFTOOLS_VER} && \
+ ./configure --enable-libgsl --enable-perl-filters &&\
+ make && \
+ make install && \
+ make test
+
+### start of app stage ###
+FROM ubuntu:jammy as app
+
+# re-instantiate variable
+ARG BCFTOOLS_VER
+
+# putting the labels in
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="bcftools"
+LABEL software.version="${BCFTOOLS_VER}"
+LABEL description="Variant calling and manipulating files in the Variant Call Format (VCF) and its binary counterpart BCF"
+LABEL website="https://github.com/samtools/bcftools"
+LABEL license="https://github.com/samtools/bcftools/blob/develop/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+LABEL maintainer2="Curtis Kapsak"
+LABEL maintainer2.email="kapsakcj@gmail.com"
+
+# install dependencies required for running bcftools
+# https://github.com/samtools/bcftools/blob/develop/INSTALL#L29
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ perl\
+ zlib1g \
+ gsl-bin \
+ bzip2 \
+ liblzma5 \
+ libcurl3-gnutls \
+ libdeflate0 \
+ procps \
+ && apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# copy in bcftools executables from builder stage
+COPY --from=builder /usr/local/bin/* /usr/local/bin/
+# copy in bcftools plugins from builder stage
+COPY --from=builder /usr/local/libexec/bcftools/* /usr/local/libexec/bcftools/
+
+# set locale settings for singularity compatibility
+ENV LC_ALL=C
+
+# set final working directory
+WORKDIR /data
+
+# default command is to pull up help optoins
+CMD ["bcftools", "--help"]
+
+### start of test stage ###
+FROM app as test
+
+# running --help and listing plugins
+RUN bcftools --help && bcftools plugin -lv
+
+# install wget for downloading test files
+RUN apt-get update && apt-get install -y wget vcftools
+
+RUN echo "downloading test SC2 BAM and FASTA and running bcftools mpileup and bcftools call test commands..." && \
+ wget -q https://raw.githubusercontent.com/artic-network/artic-ncov2019/master/primer_schemes/nCoV-2019/V4/SARS-CoV-2.reference.fasta && \
+ wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/tests/SARS-CoV-2/SRR13957123.primertrim.sorted.bam && \
+ bcftools mpileup -A -d 200 -B -Q 0 -f SARS-CoV-2.reference.fasta SRR13957123.primertrim.sorted.bam | \
+ bcftools call -mv -Ov -o SRR13957123.vcf
+
+RUN echo "testing plugins..." && \
+ bcftools +counts SRR13957123.vcf
+
+RUN echo "testing polysomy..." && \
+ wget https://samtools.github.io/bcftools/howtos/cnv-calling/usage-example.tgz &&\
+ tar -xvf usage-example.tgz &&\
+ zcat test.fcr.gz | ./fcr-to-vcf -b bcftools -a map.tab.gz -o outdir/ &&\
+ bcftools cnv -o cnv/ outdir/test.vcf.gz &&\
+ bcftools polysomy -o psmy/ outdir/test.vcf.gz &&\
+ head psmy/dist.dat
+
+RUN echo "reading test data from Google Cloud to validate GCS support" && \
+ bcftools head -h 20 gs://genomics-public-data/references/hg38/v0/1000G_phase1.snps.high_confidence.hg38.vcf.gz
+
+RUN echo "reading test data from S3 to validate AWS support" && \
+ bcftools head -h 20 s3://human-pangenomics/T2T/CHM13/assemblies/variants/GATK_CHM13v2.0_Resource_Bundle/resources-broad-hg38-v0-1000G_phase1.snps.high_confidence.hg38.t2t-chm13-v2.0.vcf.gz
diff --git a/bcftools/1.20.c/README.md b/bcftools/1.20.c/README.md
new file mode 100644
index 000000000..473a0f659
--- /dev/null
+++ b/bcftools/1.20.c/README.md
@@ -0,0 +1,25 @@
+# bcftools container
+
+Main tool: [bcftools](https://github.com/samtools/bcftools)
+
+Code repository: https://github.com/samtools/bcftools
+
+Basic information on how to use this tool:
+- executable: bcftools
+- help: --help
+- version: --version
+- description: BCFtools is a program for variant calling and manipulating files in the Variant Call Format (VCF) and its binary counterpart BCF.
+
+Additional information:
+
+This container includes bcftools v1.20 compiled with **libdeflate** for a better cloud performance. Also, "polysomy" and plugins are enabled in this image.
+
+Full documentation: https://samtools.github.io/bcftools/howtos/index.html
+
+## Example Usage
+
+```bash
+bcftools mpileup -A -d 200 -B -Q 0 -f {reference_genome} {bam} | bcftools call -mv -Ov -o bcftools_variants/{sample}.vcf
+```
+
+
diff --git a/blast/2.16.0/Dockerfile b/blast/2.16.0/Dockerfile
new file mode 100644
index 000000000..4d022999a
--- /dev/null
+++ b/blast/2.16.0/Dockerfile
@@ -0,0 +1,62 @@
+FROM ubuntu:jammy as app
+
+ARG BLAST_VER="2.16.0"
+
+# LABEL instructions tag the image with metadata that might be important to the user
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="blast+"
+LABEL software.version=$BLAST_VER
+LABEL description="Finds matches in sequencing reads"
+LABEL website="https://blast.ncbi.nlm.nih.gov/Blast.cgi?PAGE_TYPE=BlastDocs&DOC_TYPE=Download"
+LABEL license="https://www.ncbi.nlm.nih.gov/IEB/ToolBox/CPP_DOC/lxr/source/scripts/projects/blast/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ libgomp1 && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# Install and/or setup more things. Make /data for use as a working dir
+RUN wget -q https://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/${BLAST_VER}/ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz && \
+ tar -xzf ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz && \
+ rm ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz
+
+# ENV instructions set environment variables that persist from the build into the resulting image
+# Use for e.g. $PATH and locale settings for compatibility with Singularity
+ENV PATH="/ncbi-blast-${BLAST_VER}+/bin:$PATH" \
+ LC_ALL=C
+
+# WORKDIR sets working directory
+WORKDIR /data
+
+# yes, there are more tools than blastn, but it's likely the most common one used
+CMD [ "blastn", "-help" ]
+
+# A second FROM insruction creates a new stage
+# We use `test` for the test image
+FROM app as test
+
+# getting all the exectubles in bin
+RUN ls /ncbi-blast-*/bin/
+
+# making sure PATH is set up
+RUN blastn -help
+
+# getting a genome
+RUN mkdir db && \
+ wget -q https://ftp.ncbi.nlm.nih.gov/genomes/all/GCF/000/005/845/GCF_000005845.2_ASM584v2/GCF_000005845.2_ASM584v2_genomic.fna.gz -P db && \
+ gunzip db/GCF_000005845.2_ASM584v2_genomic.fna.gz && \
+ makeblastdb -dbtype nucl -in db/GCF_000005845.2_ASM584v2_genomic.fna
+
+# getting a list of genes
+RUN wget -q https://raw.githubusercontent.com/rrwick/Unicycler/main/unicycler/gene_data/dnaA.fasta
+
+# getting some blast results
+RUN tblastn -query dnaA.fasta \
+ -db db/GCF_000005845.2_ASM584v2_genomic.fna \
+ -outfmt '6' \
+ -out blast_hits.txt && \
+ head blast_hits.txt
diff --git a/blast/2.16.0/README.md b/blast/2.16.0/README.md
new file mode 100644
index 000000000..79013d9bf
--- /dev/null
+++ b/blast/2.16.0/README.md
@@ -0,0 +1,60 @@
+# blast+ container
+
+Main tools:
+
+- [blast+](https://blast.ncbi.nlm.nih.gov/Blast.cgi?PAGE_TYPE=BlastDocs&DOC_TYPE=Download)
+
+This is meant to assist in local blast searches. No blast databases will be maintained in this container. Be sure to mount your relevant Volumes with `--volumes` or `-v` when using the command line.
+
+blast+ is actually a suite of tools. blast+ v.2.16.0 includes:
+
+```bash
+$ ls /ncbi-blast-2.15.0+/bin/
+blast_formatter
+blast_formatter_vdb
+blast_vdb_cmd
+blastdb_aliastool
+blastdbcheck
+blastdbcmd
+blastn
+blastn_vdb
+blastp
+blastx
+cleanup-blastdb-volumes.py
+convert2blastmask
+deltablast
+dustmasker
+get_species_taxids.sh
+legacy_blast.pl
+makeblastdb
+makembindex
+makeprofiledb
+psiblast
+rpsblast
+rpstblastn
+segmasker
+tblastn
+tblastn_vdb
+tblastx
+update_blastdb.pl
+windowmasker
+```
+
+Currently not supported, but could be:
+
+```bash
+get_species_taxids.sh # requires E-direct
+update_blastdb.pl # requires perl
+```
+
+## Example Usage
+
+```bash
+# making a blast database
+makeblastdb -dbtype nucl -in fasta.fa
+
+# query
+tblastn -query query.fasta -db fasta.fa -outfmt '6' -out blast_hits.txt
+```
+
+More documentation can be found at [https://www.ncbi.nlm.nih.gov/books/NBK569856/](https://www.ncbi.nlm.nih.gov/books/NBK569856/) and [https://www.ncbi.nlm.nih.gov/books/NBK279690/](https://www.ncbi.nlm.nih.gov/books/NBK279690/)
diff --git a/bowtie2/2.5.4/Dockerfile b/bowtie2/2.5.4/Dockerfile
new file mode 100644
index 000000000..6b14c604e
--- /dev/null
+++ b/bowtie2/2.5.4/Dockerfile
@@ -0,0 +1,56 @@
+# FROM defines the base docker image to start from. This command has to come first in the file
+
+FROM staphb/samtools:1.20 as samtools
+
+FROM ubuntu:jammy as app
+
+ARG BOWTIE2VER=2.5.4
+
+# metadata (there are a few other labels you can add, these are optional but preferred!)
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="Bowtie2"
+LABEL software.version=${BOWTIE2VER}
+LABEL description="Genome assembler using a reference and mapping"
+LABEL website="https://github.com/BenLangmead/bowtie2"
+LABEL documentation="http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml"
+LABEL maintainer="Holly Halstead"
+LABEL maintainer.email="holly.halstead@doh.wa.gov"
+
+# install dependencies, cleanup apt garbage.
+RUN apt-get update && apt-get install -y --no-install-recommends\
+ perl \
+ zlib1g \
+ libncurses5 \
+ bzip2 \
+ liblzma-dev \
+ bedtools \
+ unzip \
+ wget \
+ ca-certificates \
+ python3 && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# copy in samtools executables from builder stage
+COPY --from=samtools /usr/local/bin/* /usr/local/bin/
+
+# download, unpack Bowtie2
+RUN wget -q https://github.com/BenLangmead/bowtie2/releases/download/v${BOWTIE2VER}/bowtie2-${BOWTIE2VER}-linux-x86_64.zip && \
+ unzip bowtie2-${BOWTIE2VER}-linux-x86_64.zip && \
+ rm bowtie2-${BOWTIE2VER}-linux-x86_64.zip
+
+ENV PATH="/bowtie2-${BOWTIE2VER}-linux-x86_64:$PATH"
+
+CMD bowtie2 -h
+
+# set working directory
+WORKDIR /data
+
+FROM app as test
+
+RUN bowtie2 -h
+
+RUN wget -q https://raw.githubusercontent.com/BenLangmead/bowtie2/master/example/reads/longreads.fq && \
+ wget -q https://raw.githubusercontent.com/BenLangmead/bowtie2/master/example/reference/lambda_virus.fa && \
+ bowtie2-build lambda_virus.fa lambda_virus && \
+ bowtie2 -x lambda_virus -U longreads.fq
diff --git a/bowtie2/2.5.4/README.md b/bowtie2/2.5.4/README.md
new file mode 100644
index 000000000..7a0f5c6af
--- /dev/null
+++ b/bowtie2/2.5.4/README.md
@@ -0,0 +1,20 @@
+# bowtie2 container
+Main tool : [bowtie2](http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml)
+
+Bowtie2 : Genome assembler using a reference and mapping
+
+Additional tools:
+- Samtools version 1.20
+
+# Example Usage
+
+```
+bowtie2-build lambda_virus.fa /index/lambda_virus
+```
+```
+bowtie2 -x /index/lambda_virus -U longreads.fq
+```
+```
+bowtie2-inspect --summary /index/lambda_virus
+```
+Better documentation can be found at [https://github.com/BenLangmead/bowtie2](https://github.com/BenLangmead/bowtie2)
diff --git a/checkm/1.2.3/Dockerfile b/checkm/1.2.3/Dockerfile
new file mode 100644
index 000000000..e0666388c
--- /dev/null
+++ b/checkm/1.2.3/Dockerfile
@@ -0,0 +1,55 @@
+FROM ubuntu:jammy as app
+
+ARG CHECKM_VER="1.2.3"
+ARG PPLACER_VER="v1.1.alpha19"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="CheckM"
+LABEL software.version="${CHECKM_VER}"
+LABEL description="CheckM provides a set of tools for assessing the quality of genomes recovered from isolates, single cells, or metagenomes."
+LABEL website="https://github.com/Ecogenomics/CheckM"
+LABEL license="https://github.com/Ecogenomics/CheckM/blob/master/LICENSE"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+# install system requirements
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ unzip \
+ python3-pip \
+ hmmer \
+ prodigal && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# install checkm and its dependencies
+RUN pip install --no-cache-dir numpy matplotlib pysam checkm-genome==${CHECKM_VER}
+
+# download pplacer
+RUN wget -q https://github.com/matsen/pplacer/releases/download/${PPLACER_VER}/pplacer-linux-${PPLACER_VER}.zip && \
+ unzip pplacer-linux-${PPLACER_VER}.zip && \
+ rm pplacer-linux-${PPLACER_VER}.zip
+
+ENV PATH=$PATH:/pplacer-Linux-${PPLACER_VER} \
+ LC_ALL=C
+
+# 'CMD' instructions set a default command when the container is run.
+CMD [ "checkm", "-h" ]
+
+# 'WORKDIR' sets working directory
+WORKDIR /data
+
+## Test ##
+FROM app as test
+
+RUN checkm -h
+
+# download database and inform CheckM of where the files have been placed
+RUN wget -q https://data.ace.uq.edu.au/public/CheckM_databases/checkm_data_2015_01_16.tar.gz && \
+ mkdir checkm_db && \
+ tar -C checkm_db -xvf checkm_data_2015_01_16.tar.gz && \
+ checkm data setRoot checkm_db
+
+# run test with internal test data
+RUN checkm taxonomy_wf species "Escherichia coli" ./checkm_db/test_data/ ./checkm_test_results
+
diff --git a/checkm/1.2.3/README.md b/checkm/1.2.3/README.md
new file mode 100644
index 000000000..1063664f3
--- /dev/null
+++ b/checkm/1.2.3/README.md
@@ -0,0 +1,36 @@
+# CheckM container
+
+Main tool: [CheckM](https://github.com/Ecogenomics/CheckM)
+
+Code repository: https://github.com/Ecogenomics/CheckM
+
+Additional tools:
+- HHMER: 3.3.2+dfsg-1
+- prodigal: 1:2.6.3-5
+- pplacer: 1.1.alpha19-0-g807f6f3
+
+Basic information on how to use this tool:
+- executable: checkm
+- help: <-h>
+- version: <-h>
+- description: CheckM provides a set of tools for assessing the quality of genomes recovered from isolates, single cells, or metagenomes.
+
+Additional information:
+
+This container does not include precalculated data files that CheckM relies on
+Those files can be downloaded from either:
+- https://data.ace.uq.edu.au/public/CheckM_databases
+- https://zenodo.org/record/7401545#.Y44ymHbMJD8
+
+The reference data must be decompress into a directory. Inform CheckM of where the files have been placed with the following command:
+```
+checkm data setRoot
+```
+
+Full documentation: https://github.com/Ecogenomics/CheckM/wiki
+
+## Example Usage
+
+```bash
+checkm lineage_wf -t 8 -x fasta input_folder output_folder
+```
diff --git a/clair3/1.0.10/Dockerfile b/clair3/1.0.10/Dockerfile
new file mode 100644
index 000000000..050e7eaef
--- /dev/null
+++ b/clair3/1.0.10/Dockerfile
@@ -0,0 +1,122 @@
+ARG CLAIR3_VER="1.0.10"
+
+FROM mambaorg/micromamba:1.5.8 as builder
+
+ARG CLAIR3_VER
+
+USER root
+
+WORKDIR /
+
+RUN apt-get update && apt-get install -y \
+ wget \
+ bzip2 \
+ make \
+ g++ \
+ libboost-graph-dev
+
+RUN micromamba install --name base -c conda-forge -c bioconda \
+ python=3.9.0 \
+ pypy3.6 \
+ tensorflow-cpu=2.8.0 \
+ pytables \
+ pigz \
+ cffi=1.14.4 \
+ parallel=20191122 \
+ zstd \
+ samtools=1.15.1 \
+ whatshap=1.7 \
+ xz \
+ zlib \
+ bzip2 \
+ automake \
+ curl &&\
+ micromamba clean -a -y
+
+ENV PATH="$PATH:/opt/conda/bin/"
+
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+
+RUN pypy3 -m ensurepip && \
+ pypy3 -m pip install mpmath==1.2.1 &&\
+ pip install tensorflow-addons
+
+RUN wget -q https://github.com/HKU-BAL/Clair3/archive/refs/tags/v${CLAIR3_VER}.tar.gz &&\
+ tar -xvf v${CLAIR3_VER}.tar.gz &&\
+ cd Clair3-${CLAIR3_VER}/preprocess/realign/ &&\
+ g++ -std=c++14 -O1 -shared -fPIC -o realigner ssw_cpp.cpp ssw.c realigner.cpp && \
+ g++ -std=c++11 -shared -fPIC -o debruijn_graph -O3 debruijn_graph.cpp &&\
+ cd ../.. &&\
+ make &&\
+ mkdir /clair3 &&\
+ cp -rv clair3 preprocess postprocess scripts shared /clair3 &&\
+ cp clair3.py run_clair3.sh /clair3 &&\
+ cp longphase libclair3* /clair3 &&\
+ cp LICENSE.md /clair3
+
+RUN mkdir /clair3/models &&\
+ wget -q http://www.bio8.cs.hku.hk/clair3/clair3_models/clair3_models.tar.gz &&\
+ tar --no-same-owner -C /clair3/models -xvf clair3_models.tar.gz
+
+## App ##
+FROM mambaorg/micromamba:1.5.8 as app
+
+ARG CLAIR3_VER
+
+USER root
+
+WORKDIR /
+
+LABEL base.image="mambaorg/micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="CLAIR3"
+LABEL software.version="${CLAIR3_VER}"
+LABEL description="Clair3 is a germline small variant caller for long-reads."
+LABEL website="https://github.com/HKU-BAL/Clair3"
+LABEL license="https://github.com/HKU-BAL/Clair3/blob/main/LICENSE.md"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+RUN apt-get update && apt-get install --no-install-recommends -y\
+ procps && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+
+RUN micromamba install --name base -c conda-forge -c bioconda \
+ python=3.9.0 \
+ numpy=1.24.3 \
+ pypy3.6 \
+ tensorflow-cpu=2.8.0 \
+ pytables \
+ pigz \
+ cffi=1.14.4 \
+ parallel=20191122 \
+ zstd \
+ samtools=1.15.1 \
+ whatshap=1.7 &&\
+ micromamba clean -a -y &&\
+ rm -rf /opt/conda/pkgs/
+
+ENV PATH="/opt/conda/bin/:/clair3:${PATH}" \
+ LC_ALL=C.UTF-8
+
+RUN pypy3 -m ensurepip &&\
+ pypy3 -m pip install --no-cache mpmath==1.2.1 &&\
+ pip install --no-cache tensorflow-addons
+
+COPY --from=builder /clair3 /clair3
+
+CMD run_clair3.sh
+
+WORKDIR /data
+
+## Test ##
+FROM app as test
+
+RUN run_clair3.sh
+
+RUN apt-get update && apt-get install -y wget
+
+RUN wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/clair3/1.0.9/ont_quick_test.sh && \
+ chmod +x ont_quick_test.sh &&\
+ ./ont_quick_test.sh
diff --git a/clair3/1.0.10/README.md b/clair3/1.0.10/README.md
new file mode 100644
index 000000000..1925c747f
--- /dev/null
+++ b/clair3/1.0.10/README.md
@@ -0,0 +1,32 @@
+# Clair3 container
+
+Main tool: [clair3](https://github.com/HKU-BAL/Clair3)
+
+Code repository: https://github.com/HKU-BAL/Clair3
+
+Basic information on how to use this tool:
+- executable: run_clair3.sh
+- help: -h, --help
+- version: -v, --version
+- description:
+
+Additional information:
+
+This container includes models in `/clair3/models`
+
+Full documentation: https://github.com/HKU-BAL/Clair3
+
+## Example Usage
+
+```bash
+run_clair3.sh \
+ --bam_fn=${BAM} \
+ --ref_fn=${REF} \
+ --threads=${THREADS} \
+ --platform="ont" \ ## options: {ont,hifi,ilmn}
+ --model_path=${MODEL_PREFIX} \ ## absolute model path prefix
+ --output=${OUTPUT_DIR} ## absolute output path prefix
+## pileup output file: ${OUTPUT_DIR}/pileup.vcf.gz
+## full-alignment output file: ${OUTPUT_DIR}/full_alignment.vcf.gz
+## Clair3 final output file: ${OUTPUT_DIR}/merge_output.vcf.gz
+```
diff --git a/dnaapler/0.8.0/Dockerfile b/dnaapler/0.8.0/Dockerfile
new file mode 100644
index 000000000..4d5add3c1
--- /dev/null
+++ b/dnaapler/0.8.0/Dockerfile
@@ -0,0 +1,57 @@
+FROM mambaorg/micromamba:1.5.8 as app
+
+USER root
+
+WORKDIR /
+
+ARG DNAAPLER_VER="0.8.0"
+
+# metadata labels
+LABEL base.image="mambaorg/micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="dnaapler"
+LABEL software.version="${DNAAPLER_VER}"
+LABEL description="Rotates chromosomes and more"
+LABEL website="https://github.com/gbouras13/dnaapler"
+LABEL license="MIT"
+LABEL license.url="https://github.com/gbouras13/dnaapler/blob/main/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+# install dependencies; cleanup apt garbage
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ procps && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# create the conda environment, install mykrobe via bioconda package; cleanup conda garbage
+RUN micromamba create -n dnaapler -y -c bioconda -c defaults -c conda-forge dnaapler=${DNAAPLER_VER} && \
+ micromamba clean -a -f -y
+
+# set the PATH and LC_ALL for singularity compatibility
+ENV PATH="/opt/conda/envs/dnaapler/bin/:${PATH}" \
+ LC_ALL=C.UTF-8
+
+# set final working directory as /data
+WORKDIR /data
+
+# default command is to print help options
+CMD [ "dnaapler", "--help" ]
+
+# new base for testing
+FROM app as test
+
+# set working directory to /test
+WORKDIR /test
+
+# checking that tool is in PATH
+RUN dnaapler --help && dnaapler --version
+
+# downloads genome sequence and then extracts the last plasmid in the laziest way possible
+RUN wget -q https://ftp.ncbi.nlm.nih.gov/genomes/all/GCA/025/259/185/GCA_025259185.1_ASM2525918v1/GCA_025259185.1_ASM2525918v1_genomic.fna.gz && \
+ gunzip GCA_025259185.1_ASM2525918v1_genomic.fna.gz && \
+ grep "CP104365.1" GCA_025259185.1_ASM2525918v1_genomic.fna -A 50000 > CP104365.1.fasta && \
+ dnaapler mystery --prefix mystery_test --output mystery_test -i CP104365.1.fasta && \
+ dnaapler plasmid --prefix plasmid_test --output plasmid_test -i CP104365.1.fasta && \
+ ls mystery_test plasmid_test
diff --git a/dnaapler/0.8.0/README.md b/dnaapler/0.8.0/README.md
new file mode 100644
index 000000000..79e17f4a5
--- /dev/null
+++ b/dnaapler/0.8.0/README.md
@@ -0,0 +1,37 @@
+# dnaapler container
+
+Main tool : [dnappler](https://github.com/gbouras13/dnaapler)
+
+Full documentation: [https://github.com/gbouras13/dnaapler](https://github.com/gbouras13/dnaapler)
+
+> `dnaapler` is a simple python program that takes a single nucleotide input sequence (in FASTA format), finds the desired start gene using blastx against an amino acid sequence database, checks that the start codon of this gene is found, and if so, then reorients the chromosome to begin with this gene on the forward strand.
+
+dnaapler has several commands for chromosomes, plasmids, and more.
+
+```
+Usage: dnaapler [OPTIONS] COMMAND [ARGS]...
+
+Options:
+ -h, --help Show this message and exit.
+ -V, --version Show the version and exit.
+
+Commands:
+ chromosome Reorients your sequence to begin with the dnaA chromosomal...
+ citation Print the citation(s) for this tool
+ custom Reorients your sequence with a custom database
+ mystery Reorients your sequence with a random gene
+ phage Reorients your sequence to begin with the terL large...
+ plasmid Reorients your sequence to begin with the repA replication...
+```
+
+WARNING: Does not support multifasta files. Each sequence must be processed individually.
+
+## Example Usage
+
+```bash
+# for a fasta of a chromsome sequence
+dnaapler chromosome --input chromosome.fasta --output dnaapler_chr
+
+# for a fasta of a plasmid sequence
+dnaapler plasmid --input plasmid.fasta --output dnaapler_plasmid
+```
diff --git a/elgato/1.18.2/Dockerfile b/elgato/1.18.2/Dockerfile
new file mode 100644
index 000000000..c7f947d7d
--- /dev/null
+++ b/elgato/1.18.2/Dockerfile
@@ -0,0 +1,55 @@
+FROM staphb/ispcr:33 as app
+
+ARG ELGATO_VER="1.18.2"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="el_gato"
+LABEL software.version="${ELGATO_VER}"
+LABEL description="Epidemiology of Legionella : Genome-bAsed Typing"
+LABEL website="https://github.com/appliedbinf/el_gato"
+LABEL license="https://github.com/appliedbinf/el_gato/blob/main/LICENSE"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+WORKDIR /
+
+# dependencies
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ wget \
+ python3-pip \
+ minimap2 \
+ samtools \
+ ncbi-blast+ && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# install el_gato
+RUN wget --no-check-certificate https://github.com/appliedbinf/el_gato/archive/refs/tags/${ELGATO_VER}.tar.gz &&\
+ tar -xvf ${ELGATO_VER}.tar.gz && rm ${ELGATO_VER}.tar.gz &&\
+ cd el_gato-${ELGATO_VER} &&\
+ python3 -m pip install . -vv --no-cache-dir &&\
+ mv ./el_gato/db/ /usr/local/bin/db/
+
+# install fpd2 for pdf reports
+RUN pip install --no-cache-dir fpdf2 packaging
+
+ENV LC_ALL=C
+
+CMD el_gato.py -h
+
+WORKDIR /data
+
+## Test ##
+FROM app as test
+
+RUN apt-get update && apt-get install unzip
+
+# download Legionella pneumophila ST62 genome
+RUN wget -P /usr/local/bin/ https://ftp.ncbi.nlm.nih.gov/pub/datasets/command-line/v2/linux-amd64/datasets &&\
+ chmod 755 /usr/local/bin/datasets &&\
+ datasets download genome accession GCF_900119765.1 --include genome &&\
+ unzip -j ncbi_dataset.zip ncbi_dataset/data/GCF_900119765.1/GCF_900119765.1_2532STDY5467631_genomic.fna -d .
+
+# test el_gato
+RUN el_gato.py --assembly GCF_900119765.1_2532STDY5467631_genomic.fna --out test/ &&\
+ cat test/run.log
diff --git a/elgato/1.18.2/README.md b/elgato/1.18.2/README.md
new file mode 100644
index 000000000..8a83429f7
--- /dev/null
+++ b/elgato/1.18.2/README.md
@@ -0,0 +1,33 @@
+# el_gato container
+
+Main tool: [el_gato](https://github.com/appliedbinf/el_gato)
+
+Code repository: https://github.com/appliedbinf/el_gato
+
+Additional tools:
+- minimap2: 2.24-r1122
+- samtools: 1.13
+- ncbi-blast+: 2.12.0+
+- isPCR: v33x2
+
+Basic information on how to use this tool:
+- executable: el_gato.py
+- help: -h
+- version: -v
+- description: Epidemiology of Legionella : Genome-bAsed Typing
+
+Additional information:
+
+Container contains necessary database of Legionella sequence types
+
+Full documentation: https://github.com/appliedbinf/el_gato
+
+## Example Usage
+
+```bash
+# Paired-end:
+el_gato.py --read1 read1.fastq.gz --read2 read2.fastq.gz --out output_folder/
+
+# Assembly:
+el_gato.py --assembly assembly_file.fna --out output_folder/
+```
\ No newline at end of file
diff --git a/elgato/1.19.0/Dockerfile b/elgato/1.19.0/Dockerfile
new file mode 100644
index 000000000..7fd552df9
--- /dev/null
+++ b/elgato/1.19.0/Dockerfile
@@ -0,0 +1,55 @@
+FROM staphb/ispcr:33 as app
+
+ARG ELGATO_VER="1.19.0"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="el_gato"
+LABEL software.version="${ELGATO_VER}"
+LABEL description="Epidemiology of Legionella : Genome-bAsed Typing"
+LABEL website="https://github.com/appliedbinf/el_gato"
+LABEL license="https://github.com/appliedbinf/el_gato/blob/main/LICENSE"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+WORKDIR /
+
+# dependencies
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ wget \
+ python3-pip \
+ minimap2 \
+ samtools \
+ ncbi-blast+ && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# install el_gato
+RUN wget --no-check-certificate https://github.com/appliedbinf/el_gato/archive/refs/tags/${ELGATO_VER}.tar.gz &&\
+ tar -xvf ${ELGATO_VER}.tar.gz && rm ${ELGATO_VER}.tar.gz &&\
+ cd el_gato-${ELGATO_VER} &&\
+ python3 -m pip install . -vv --no-cache-dir &&\
+ mv ./el_gato/db/ /usr/local/bin/db/
+
+# install fpd2 for pdf reports
+RUN pip install --no-cache-dir fpdf2 packaging
+
+ENV LC_ALL=C
+
+CMD el_gato.py -h
+
+WORKDIR /data
+
+## Test ##
+FROM app as test
+
+RUN apt-get update && apt-get install unzip
+
+# download Legionella pneumophila ST62 genome
+RUN wget -P /usr/local/bin/ https://ftp.ncbi.nlm.nih.gov/pub/datasets/command-line/v2/linux-amd64/datasets &&\
+ chmod 755 /usr/local/bin/datasets &&\
+ datasets download genome accession GCF_900119765.1 --include genome &&\
+ unzip -j ncbi_dataset.zip ncbi_dataset/data/GCF_900119765.1/GCF_900119765.1_2532STDY5467631_genomic.fna -d .
+
+# test el_gato
+RUN el_gato.py --assembly GCF_900119765.1_2532STDY5467631_genomic.fna --out test/ &&\
+ cat test/run.log
diff --git a/elgato/1.19.0/README.md b/elgato/1.19.0/README.md
new file mode 100644
index 000000000..8a83429f7
--- /dev/null
+++ b/elgato/1.19.0/README.md
@@ -0,0 +1,33 @@
+# el_gato container
+
+Main tool: [el_gato](https://github.com/appliedbinf/el_gato)
+
+Code repository: https://github.com/appliedbinf/el_gato
+
+Additional tools:
+- minimap2: 2.24-r1122
+- samtools: 1.13
+- ncbi-blast+: 2.12.0+
+- isPCR: v33x2
+
+Basic information on how to use this tool:
+- executable: el_gato.py
+- help: -h
+- version: -v
+- description: Epidemiology of Legionella : Genome-bAsed Typing
+
+Additional information:
+
+Container contains necessary database of Legionella sequence types
+
+Full documentation: https://github.com/appliedbinf/el_gato
+
+## Example Usage
+
+```bash
+# Paired-end:
+el_gato.py --read1 read1.fastq.gz --read2 read2.fastq.gz --out output_folder/
+
+# Assembly:
+el_gato.py --assembly assembly_file.fna --out output_folder/
+```
\ No newline at end of file
diff --git a/genoflu/1.03/Dockerfile b/genoflu/1.03/Dockerfile
new file mode 100644
index 000000000..074535ea5
--- /dev/null
+++ b/genoflu/1.03/Dockerfile
@@ -0,0 +1,52 @@
+FROM mambaorg/micromamba:1.5.8 as app
+
+USER root
+WORKDIR /
+
+ARG GENOFLU_VER="1.03-0"
+
+# LABEL instructions tag the image with metadata that might be important to the user
+LABEL base.image="micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="genoflu"
+LABEL software.version=$GENOFLU_VER
+LABEL description="Uses BLAST to detect whole-genome flu genotype"
+LABEL website="https://github.com/USDA-VS/GenoFLU"
+LABEL license="https://github.com/USDA-VS/GenoFLU/blob/main/LICENSE"
+LABEL maintainer="Sage Wright"
+LABEL maintainer.email="sagemwright@gmail.com"
+
+# Install dependencies via apt-get; cleanup apt garbage
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ procps && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# install genoflu via bioconda; install into 'base' conda env
+# genoflu is not recognized in micromamba by the version tag
+RUN micromamba install --yes --name base --channel conda-forge --channel bioconda \
+ genoflu=${GENOFLU_VER} && \
+ micromamba clean --all --force-pkgs-dirs --yes
+
+
+# ENV instructions set environment variables that persist from the build into the resulting image
+# hardcode 'base' env bin into PATH, so conda env does not have to be "activated" at run time
+ENV PATH="/opt/conda/bin:${PATH}"
+
+# WORKDIR sets working directory
+WORKDIR /data
+
+# default command is to pull up help options for genoflu
+CMD [ "genoflu.py", "--help" ]
+
+# A second FROM insruction creates a new stage
+# We use `test` for the test image
+FROM app as test
+
+# getting all the exectubles in bin
+RUN genoflu.py --help && genoflu.py --version
+
+# testing a genome
+RUN wget -q https://raw.githubusercontent.com/USDA-VS/GenoFLU/main/test/test-genome-A1.fasta && \
+ genoflu.py -f test-genome-A1.fasta
\ No newline at end of file
diff --git a/genoflu/README.md b/genoflu/README.md
new file mode 100644
index 000000000..f24fd2c97
--- /dev/null
+++ b/genoflu/README.md
@@ -0,0 +1,24 @@
+# GenoFLU container
+
+Main tool: [GenoFLU](https://github.com/USDA-VS/GenoFLU)
+
+Code repository: https://github.com/USDA-VS/GenoFLU
+
+Additional tools:
+
+Basic information on how to use this tool:
+
+- executable: genoflu.py
+- help: --help
+- version: --version
+- description: "GenoFLU determines the genotype of a segmented flu sample by BLASTing a multi-segment FASTA against the BLAST database"
+
+Additional information:
+
+Full documentation: [https://github.com/USDA-VS/GenoFLU](https://github.com/USDA-VS/GenoFLU)
+
+## Example Usage
+
+```bash
+genoflu.py -f input.fasta
+```
diff --git a/htslib/1.20.c/Dockerfile b/htslib/1.20.c/Dockerfile
new file mode 100644
index 000000000..bc1b1d8ed
--- /dev/null
+++ b/htslib/1.20.c/Dockerfile
@@ -0,0 +1,97 @@
+# for easy upgrade later. ARG variables only persist during build time
+ARG HTSLIB_VER="1.20"
+
+FROM ubuntu:jammy as builder
+
+ARG HTSLIB_VER
+
+# install dependencies, cleanup apt garbage
+# It's helpful when they're all listed on https://github.com/samtools/htslib/blob/develop/INSTALL
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ wget \
+ ca-certificates \
+ make \
+ bzip2 \
+ autoconf \
+ automake \
+ make \
+ gcc \
+ perl \
+ zlib1g-dev \
+ libbz2-dev \
+ liblzma-dev \
+ libcurl4-gnutls-dev \
+ libssl-dev \
+ libdeflate-dev \
+ procps && \
+ rm -rf /var/lib/apt/lists/* && apt-get autoclean
+
+# get htslib, compile, install, run test suite
+RUN wget -q https://github.com/samtools/htslib/releases/download/${HTSLIB_VER}/htslib-${HTSLIB_VER}.tar.bz2 && \
+ tar -vxjf htslib-${HTSLIB_VER}.tar.bz2 && \
+ rm -v htslib-${HTSLIB_VER}.tar.bz2 && \
+ cd htslib-${HTSLIB_VER} && \
+ ./configure && \
+ make && \
+ make install && \
+ make test
+
+### start of app stage ###
+FROM ubuntu:jammy as app
+
+ARG HTSLIB_VER
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="htslib"
+LABEL software.version="${HTSLIB_VER}"
+LABEL description="A C library for reading/writing high-throughput sequencing data"
+LABEL website="https://github.com/samtools/htslib"
+LABEL license="https://github.com/samtools/htslib/blob/develop/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+LABEL maintainer2="Curtis Kapsak"
+LABEL maintainer2.email="kapsakcj@gmail.com"
+
+# install runtime dependencies & cleanup apt garbage
+# installed as recommend here: https://github.com/samtools/htslib/blob/develop/INSTALL#L31
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ bzip2 \
+ zlib1g \
+ libbz2-1.0 \
+ liblzma5 \
+ libcurl3-gnutls \
+ libdeflate0 \
+ ca-certificates \
+ && apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# copy in htslib executables from builder stage
+COPY --from=builder /usr/local/bin/* /usr/local/bin/
+COPY --from=builder /usr/local/lib/ /usr/local/lib/
+COPY --from=builder /usr/local/include/ /usr/local/include/
+
+# set locale settings for singularity compatibility
+ENV LC_ALL=C
+
+# set working directory
+WORKDIR /data
+
+# default command is to show help options
+CMD ["htsfile", "--help"]
+
+### start of test stage ###
+FROM app as test
+
+# check that these three executables are available
+RUN bgzip --help && tabix --help && htsfile --help
+
+RUN apt-get update && apt-get install --no-install-recommends -y wget
+
+# use on actual files
+RUN wget -q https://github.com/StaPH-B/docker-builds/raw/master/tests/SARS-CoV-2/SRR13957123_1.fastq.gz && \
+ gunzip SRR13957123_1.fastq.gz && \
+ bgzip SRR13957123_1.fastq
+
+# FYI Test suite "make test" now performed in the builder stage since app and
+# test stages do not include htslib source code.
+# This is to avoid having to re-download source code simply to run test suite
\ No newline at end of file
diff --git a/htslib/1.20.c/README.md b/htslib/1.20.c/README.md
new file mode 100644
index 000000000..a165c9212
--- /dev/null
+++ b/htslib/1.20.c/README.md
@@ -0,0 +1,35 @@
+# htslib container
+
+Main tool: [htslib](https://www.htslib.org/)
+
+Code repository: https://github.com/samtools/htslib
+
+Additional tools:
+
+* perl 5.34.0
+
+Basic information on how to use this tool:
+- executable: htsfile
+- help: --help
+- version: --version
+- description: The htsfile utility attempts to identify what kind of high-throughput sequencing data files the specified files are, and provides minimal viewing capabilities for some kinds of data file.
+
+Additional information:
+
+This container includes htslib v1.20 compiled with **libdeflate** for a better cloud performance.
+
+Full documentation: https://www.htslib.org/doc/samtools.html
+
+## Example Usage
+
+```bash
+# determine file formats for various BAM and SAM files
+$ htsfile tests/SARS-CoV-2/SRR13957123.primertrim.sorted.bam
+tests/SARS-CoV-2/SRR13957123.primertrim.sorted.bam: BAM version 1 compressed sequence data
+
+$ htsfile ce_tag_padded.sam
+ce_tag_padded.sam: SAM version 1.4 sequence text
+
+# compresses sample.fastq to sample.fastq.gz in BGZF format (blocked GNU Zip Format)
+$ bgzip sample.fastq
+```
diff --git a/ivar/1.4.3/Dockerfile b/ivar/1.4.3/Dockerfile
new file mode 100644
index 000000000..5ce2d06b0
--- /dev/null
+++ b/ivar/1.4.3/Dockerfile
@@ -0,0 +1,125 @@
+ARG SAMTOOLSVER=1.20
+ARG HTSLIB_VER=${SAMTOOLSVER}
+ARG IVARVER=1.4.3
+
+FROM ubuntu:jammy as builder
+
+ARG SAMTOOLSVER
+ARG HTSLIB_VER
+ARG IVARVER
+
+# installing htslib
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ build-essential \
+ wget \
+ ca-certificates \
+ make \
+ bzip2 \
+ autoconf \
+ automake \
+ make \
+ gcc \
+ perl \
+ zlib1g-dev \
+ libbz2-dev \
+ liblzma-dev \
+ libcurl4-gnutls-dev \
+ libssl-dev \
+ libncurses5-dev \
+ procps
+
+# get htslib, compile, install, run test suite
+RUN wget -q https://github.com/samtools/htslib/releases/download/${HTSLIB_VER}/htslib-${HTSLIB_VER}.tar.bz2 && \
+ tar -vxjf htslib-${HTSLIB_VER}.tar.bz2 && \
+ rm -v htslib-${HTSLIB_VER}.tar.bz2 && \
+ cd htslib-${HTSLIB_VER} && \
+ make && \
+ make install && \
+ make test
+
+# download, compile, and install samtools
+RUN wget -q https://github.com/samtools/samtools/releases/download/${SAMTOOLSVER}/samtools-${SAMTOOLSVER}.tar.bz2 && \
+ tar -xjf samtools-${SAMTOOLSVER}.tar.bz2 && \
+ cd samtools-${SAMTOOLSVER} && \
+ ./configure && \
+ make && \
+ make install && \
+ make test
+
+# installing iVar; make /data
+RUN wget -q https://github.com/andersen-lab/ivar/archive/v${IVARVER}.tar.gz && \
+ tar -xf v${IVARVER}.tar.gz && \
+ rm -rf v${IVARVER}.tar.gz && \
+ cd ivar-${IVARVER} && \
+ ./autogen.sh && \
+ ./configure && \
+ make && \
+ make install
+
+FROM ubuntu:jammy as app
+
+ARG IVARVER
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="iVar"
+LABEL software.version=${IVARVER}
+LABEL description="Computational package that contains functions broadly useful for viral amplicon-based sequencing."
+LABEL website="https://github.com/andersen-lab/ivar"
+LABEL license="https://github.com/andersen-lab/ivar/blob/master/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+LABEL maintainer2="Curtis Kapsak"
+LABEL maintainer2.email="kapsakcj@gmail.com"
+LABEL modified.from="https://github.com/andersen-lab/ivar/blob/master/Dockerfile"
+LABEL original.maintainer="Kathik G"
+LABEL original.maintainer.email="gkarthik@scripps.edu"
+
+# install dependencies, cleanup apt garbage.
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ ca-certificates \
+ procps \
+ libcurl3-gnutls && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+COPY --from=builder /usr/local/bin/ /usr/local/bin/
+COPY --from=builder /usr/local/lib/ /usr/local/lib/
+COPY --from=builder /usr/local/include/ /usr/local/include/
+
+WORKDIR /data
+
+ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib"
+
+CMD ivar help
+
+FROM app as test
+
+WORKDIR /test
+
+RUN ivar help && ivar version && samtools --version
+
+RUN apt-get update && apt-get install -y wget
+
+# getting files
+RUN wget -q https://github.com/StaPH-B/docker-builds/blob/master/tests/SARS-CoV-2/SRR13957123.sorted.bam?raw=true -O SRR13957123.sorted.bam && \
+ samtools view -s 0.25 -b SRR13957123.sorted.bam | samtools sort -o SRR13957123.subsampled.bam - && samtools index SRR13957123.subsampled.bam && \
+ wget -q https://raw.githubusercontent.com/artic-network/artic-ncov2019/master/primer_schemes/nCoV-2019/V3/nCoV-2019.reference.fasta -O MN908947.3.fasta && \
+ wget -q https://raw.githubusercontent.com/artic-network/artic-ncov2019/master/primer_schemes/nCoV-2019/V3/nCoV-2019.primer.bed && \
+ wget -q https://github.com/StaPH-B/docker-builds/blob/83ee344304794f4ffa162d1c082bb35f916badcf/tests/SARS-CoV-2/SRR13957123_1.fastq.gz?raw=true -O SRR13957123_1.fastq.gz && \
+ wget -q https://github.com/StaPH-B/docker-builds/blob/83ee344304794f4ffa162d1c082bb35f916badcf/tests/SARS-CoV-2/SRR13957123_2.fastq.gz?raw=true -O SRR13957123_2.fastq.gz
+
+# primer trimming
+RUN ivar trim -e -i SRR13957123.subsampled.bam -b nCoV-2019.primer.bed -p SRR13957123.primertrim && \
+ samtools sort SRR13957123.primertrim.bam -o SRR13957123.primertrim.sorted.bam
+
+# creating a consensus - using smaller -d for github actions
+RUN samtools mpileup -A -d 40 -B -Q 0 --reference MN908947.3.fasta SRR13957123.primertrim.sorted.bam | \
+ ivar consensus -q 20 -t 0.6 -n N -m 20 -p SRR13957123.consensus && \
+ wc -c SRR13957123.consensus*
+
+# piping into ivar takes too long, but here's what the test would be
+# RUN bwa index MN908947.3.fasta && \
+# bwa mem MN908947.3.fasta SRR13957123_1.fastq.gz SRR13957123_2.fastq.gz | \
+# ivar trim -b nCoV-2019.primer.bed -x 3 -m 30 | \
+# samtools sort | samtools mpileup -aa -A -Q 0 -B -d 200 --reference MN908947.3.fasta - | \
+# ivar consensus -p test_consensus -m 10 -n N -t 0.5
diff --git a/ivar/1.4.3/README.md b/ivar/1.4.3/README.md
new file mode 100644
index 000000000..214ef3a90
--- /dev/null
+++ b/ivar/1.4.3/README.md
@@ -0,0 +1,39 @@
+# iVar container
+
+Main tool : [iVar](https://andersen-lab.github.io/ivar/html/manualpage.html)
+
+> iVar is a computational package that contains functions broadly useful for viral amplicon-based sequencing
+
+Additional tools (required):
+
+* [HTSlib](https://github.com/samtools/htslib) 1.20
+* [samtools](http://www.htslib.org/) 1.20
+
+## Example Usage
+
+```bash
+ivar trim -e -i {bam} -b {primer bed} -p {sample}.primertrim
+```
+
+```bash
+samtools mpileup -A -d 8000 -B -Q 0 --reference {reference.fasta} {bam} | \
+ ivar variants -p {sample}.variants -q 20 -t 0.6 -r {reference.fasta} -g {reference.gff}
+```
+
+```bash
+samtools mpileup -A -d 8000 -B -Q 0 --reference {reference.fasta} {bam} | \
+ ivar consensus -t 0.6 -p {sample}.consensus -n N
+```
+
+Starting with iVar version 1.4.1, the output of an aligner such as minimap2 and bwa (both included) can be piped into ivar trim directly
+```bash
+# index reference
+bwa index reference.fasta
+
+# run bwa and pipe into ivar (single line is split with \ for clarity)
+bwa mem reference.fasta read_1.fastq.gz read_2.fastq.gz | \
+ ivar trim -b primer.bed -x 3 -m 30 | \
+ samtools sort | \
+ samtools mpileup -aa -A -Q 0 -B -d 2000 - | \
+ ivar consensus -p output_prefix -m 10 -n N -t 0.5
+```
diff --git a/metaphlan/4.1.1/Dockerfile b/metaphlan/4.1.1/Dockerfile
new file mode 100644
index 000000000..689e3b785
--- /dev/null
+++ b/metaphlan/4.1.1/Dockerfile
@@ -0,0 +1,44 @@
+FROM mambaorg/micromamba:1.5.8 as app
+
+ARG METAPHLAN_VER="4.1.1"
+
+USER root
+
+WORKDIR /
+
+LABEL base.image="mambaorg/micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="MetaPhlAn"
+LABEL software.version="${METAPHLAN_VER}"
+LABEL description="MetaPhlAn is a computational tool for species-level microbial profiling from metagenomic shotgun sequencing data"
+LABEL website="https://github.com/biobakery/MetaPhlAn"
+LABEL license="https://github.com/biobakery/MetaPhlAn/blob/master/license.txt"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ procps &&\
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN micromamba install --name base -c conda-forge -c bioconda metaphlan=${METAPHLAN_VER} &&\
+ micromamba clean -afy
+
+ENV PATH="/opt/conda/bin/:${PATH}" \
+ LC_ALL=C.UTF-8
+
+CMD [ "metaphlan", "--help" ]
+
+WORKDIR /data
+
+## Test ##
+FROM app as test
+
+RUN metaphlan --help
+
+# get test sample
+RUN wget https://github.com/biobakery/MetaPhlAn/releases/download/4.1.0/SRS014476-Supragingival_plaque.fasta.gz
+# download toy db
+RUN metaphlan --install --index mpa_vJan21_TOY_CHOCOPhlAnSGB_202103 --bowtie2db /toy
+# metaphlan test
+RUN metaphlan SRS014476-Supragingival_plaque.fasta.gz --input_type fasta --bowtie2db /toy --index mpa_vJan21_TOY_CHOCOPhlAnSGB_202103 > test.txt &&\
+ cat test.txt
diff --git a/metaphlan/4.1.1/README.md b/metaphlan/4.1.1/README.md
new file mode 100644
index 000000000..4e96e7dd4
--- /dev/null
+++ b/metaphlan/4.1.1/README.md
@@ -0,0 +1,26 @@
+# MetaPhlAn container
+
+Main tool: [MetaPhlAn](https://github.com/biobakery/MetaPhlAn/wiki/MetaPhlAn-4.1)
+
+Code repository: https://github.com/biobakery/MetaPhlAn
+
+Basic information on how to use this tool:
+- executable: metaphlan
+- help: -h
+- version: -v
+- description: MetaPhlAn is a computational tool for species-level microbial profiling (bacteria, archaea, eukaryotes, and viruses) from metagenomic shotgun sequencing data. StrainPhlAn (available within MetaPhlAn) allows strain-level microbial population genomics.
+
+Additional information:
+
+The MetaPhlAn 4 database has been substantially increased (~21GB) in comparison with the previous 3.1 version. Thus, for running MetaPhlAn 4, a minimum of 25GB or memory is needed.
+The database can be downloaded from [Segatalab FTP](http://cmprod1.cibio.unitn.it/biobakery4/metaphlan_databases/) or via metaphlan with the following command:
+
+```metaphlan --install```
+
+Full documentation: https://github.com/biobakery/MetaPhlAn/wiki/MetaPhlAn-4.1
+
+## Example Usage
+
+```bash
+metaphlan metagenome.fastq --input_type fastq -o profiled_metagenome.txt
+```
\ No newline at end of file
diff --git a/multiqc/1.22.3/Dockerfile b/multiqc/1.22.3/Dockerfile
new file mode 100644
index 000000000..3428584c4
--- /dev/null
+++ b/multiqc/1.22.3/Dockerfile
@@ -0,0 +1,48 @@
+FROM ubuntu:jammy as app
+
+ARG MULTIQC_VER="1.22.3"
+
+# metadata
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="MultiQC"
+LABEL software.version="${MULTIQC_VER}"
+LABEL description="Aggregate bioinformatics results across many samples into a single report."
+LABEL website="https://github.com/MultiQC/MultiQC"
+LABEL license="https://github.com/MultiQC/MultiQC/blob/main/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ python3-pip && \
+ apt-get clean && apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# install multiqc
+RUN pip3 install --no-cache-dir "multiqc==${MULTIQC_VER}"
+
+ENV LC_ALL='C.UTF-8' \
+ LANG='C.UTF-8'
+
+CMD multiqc --help
+
+RUN mkdir /data
+WORKDIR /data
+
+# testing layer starts here
+FROM app as test
+
+# getting git
+RUN apt-get update && apt-get install -y git
+
+# to ensure multiqc is in PATH
+RUN multiqc --help
+
+# set working directory so that all test inputs & outputs are kept in /test
+RUN mkdir /test
+WORKDIR /test
+
+# getting multiqc test data
+RUN git clone https://github.com/ewels/MultiQC_TestData && \
+ multiqc . && \
+ ls multiqc_report.html
+
diff --git a/multiqc/1.22.3/README.md b/multiqc/1.22.3/README.md
new file mode 100644
index 000000000..879cb9cd8
--- /dev/null
+++ b/multiqc/1.22.3/README.md
@@ -0,0 +1,13 @@
+# MultiQC container
+
+Main tool : [MultiQC](https://multiqc.info/)
+
+Aggregate results from bioinformatics analyses across many samples into a single report.
+
+Full documentation: [Docs] (https://multiqc.info/docs/)
+
+# Example Usage
+
+```
+multiqc -f --cl_config "prokka_fn_snames: True" .
+```
diff --git a/ncbi-amrfinderplus/3.12.8-2024-07-22.1/Dockerfile b/ncbi-amrfinderplus/3.12.8-2024-07-22.1/Dockerfile
new file mode 100755
index 000000000..3a1bde8fe
--- /dev/null
+++ b/ncbi-amrfinderplus/3.12.8-2024-07-22.1/Dockerfile
@@ -0,0 +1,99 @@
+FROM ubuntu:jammy as app
+
+ARG AMRFINDER_VER="3.12.8"
+ARG AMRFINDER_DB_VER="2024-07-22.1"
+ARG BLAST_VER="2.16.0"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="NCBI AMRFinderPlus"
+LABEL software.version="${AMRFINDER_VER}"
+LABEL description="NCBI resistance gene detection tool"
+LABEL website="https://github.com/ncbi/amr"
+LABEL license="https://github.com/ncbi/amr/blob/master/LICENSE"
+LABEL maintainer="Kelsey Florek"
+LABEL maintainer.email="kelsey.florek@slh.wisc.edu"
+LABEL maintainer2="Curtis Kapsak"
+LABEL maintainer2.email="kapsakcj@gmail.com"
+LABEL maintainer3="Anders Goncalves da Silva"
+LABEL maintainer3.email="andersgs@gmail.com"
+LABEL maintainer4="Erin Young"
+LABEL maintainer4.email="eriny@utah.gov"
+LABEL maintainer5="Holly McQueary"
+LABEL maintainer5.email="holly.c.mcqueary@mass.gov"
+
+# ncbi-blast+ installed via apt is v2.12.0 - DISABLING so that we can manually install v2.14.0
+# see here for reason why I'm manualy installing 2.14.0 instead of using apt-get: https://github.com/ncbi/amr/releases/tag/amrfinder_v3.11.8
+
+# hmmer installed via apt is v3.3.2
+# removed because likely unnecessary since we are not compiling from source: make g++
+# libgomp1 required for makeblastdb
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ ca-certificates \
+ wget \
+ curl \
+ libgomp1 \
+ hmmer \
+ procps \
+ gzip && \
+ apt-get autoclean && \
+ rm -rf /var/lib/apt/lists/*
+
+# download and install amrfinderplus pre-compiled binaries; make /data
+RUN mkdir amrfinder && cd /amrfinder && \
+ echo "downloading amrfinderplus v${AMRFINDER_VER} pre-compiled binaries from GitHub..." && \
+ wget -q https://github.com/ncbi/amr/releases/download/amrfinder_v${AMRFINDER_VER}/amrfinder_binaries_v${AMRFINDER_VER}.tar.gz && \
+ tar zxf amrfinder_binaries_v${AMRFINDER_VER}.tar.gz && \
+ rm amrfinder_binaries_v${AMRFINDER_VER}.tar.gz && \
+ mkdir /data
+
+# install ncbi-blast linux binaries
+RUN echo "downloading ncbi-blast-${BLAST_VER}+ linux binaries from NCBI FTP..." && \
+ wget -q https://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/${BLAST_VER}/ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz && \
+ tar -xzf ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz && \
+ rm -v ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz
+
+# set PATH and locale settings for singularity compatibiliity, set amrfinder and manually-installed blast as higher priority in PATH
+ENV PATH="/amrfinder:/ncbi-blast-${BLAST_VER}+/bin:$PATH" \
+ LC_ALL=C
+
+# download databases and index them
+# done in this manner to pin the database version instead of pulling the latest version with `amrfinder -u`
+# softlink is required for `amrfinder -l` and typical `amrfinder` use cases to work properly
+RUN mkdir -p /amrfinder/data/${AMRFINDER_DB_VER} && \
+ wget -q -P /amrfinder/data/${AMRFINDER_DB_VER} ftp://ftp.ncbi.nlm.nih.gov/pathogen/Antimicrobial_resistance/AMRFinderPlus/database/3.12/${AMRFINDER_DB_VER}/* && \
+ amrfinder_index /amrfinder/data/${AMRFINDER_DB_VER} && \
+ ln -s /amrfinder/data/${AMRFINDER_DB_VER} /amrfinder/data/latest
+
+# set final working directory
+WORKDIR /data
+
+# default command is to print help options
+CMD [ "amrfinder", "--help" ]
+
+## Test stage
+FROM app as test
+
+# list database version and available --organism options
+RUN amrfinder -l
+
+# run recommended tests from amrfinder
+RUN amrfinder --threads 1 --plus -p /amrfinder/test_prot.fa -g /amrfinder/test_prot.gff -O Escherichia --print_node > test_prot.got && \
+ diff /amrfinder/test_prot.expected test_prot.got && \
+ amrfinder --threads 1 --plus -n /amrfinder/test_dna.fa -O Escherichia --print_node > test_dna.got && \
+ diff /amrfinder/test_dna.expected test_dna.got && \
+ amrfinder --threads 1 --plus -n /amrfinder/test_dna.fa -p /amrfinder/test_prot.fa -g /amrfinder/test_prot.gff -O Escherichia --print_node > test_both.got && \
+ diff /amrfinder/test_both.expected test_both.got
+
+# run amrfinder on Salmonella, without and with --organism option
+RUN wget https://ftp.ncbi.nlm.nih.gov/genomes/all/GCA/010/941/835/GCA_010941835.1_PDT000052640.3/GCA_010941835.1_PDT000052640.3_genomic.fna.gz && \
+ amrfinder --threads 4 --plus --nucleotide GCA_010941835.1_PDT000052640.3_genomic.fna.gz --output test1.txt && \
+ amrfinder --threads 4 --plus --nucleotide GCA_010941835.1_PDT000052640.3_genomic.fna.gz --organism Salmonella --output test2.txt && \
+ cat test1.txt test2.txt
+
+# run amrfinder on Klebesiella oxytoca using --organism/-O flag
+RUN wget https://ftp.ncbi.nlm.nih.gov/genomes/all/GCA/003/812/925/GCA_003812925.1_ASM381292v1/GCA_003812925.1_ASM381292v1_genomic.fna.gz && \
+ amrfinder --threads 4 --plus --name GCA_003812925.1 -n GCA_003812925.1_ASM381292v1_genomic.fna.gz -O Klebsiella_oxytoca -o GCA_003812925.1-amrfinder.tsv
+
+# test that gunzip is installed
+RUN gunzip --help
diff --git a/ncbi-amrfinderplus/3.12.8-2024-07-22.1/README.md b/ncbi-amrfinderplus/3.12.8-2024-07-22.1/README.md
new file mode 100755
index 000000000..3e51dda5d
--- /dev/null
+++ b/ncbi-amrfinderplus/3.12.8-2024-07-22.1/README.md
@@ -0,0 +1,68 @@
+# NCBI AMRFinderPlus docker image
+
+Main tool : [NCBI AMRFinderPlus](https://github.com/ncbi/amr)
+
+Additional tools:
+
+- hmmer v3.3.2
+- ncbi-blast+ v2.16.0
+
+## Database information
+
+The database included at time of docker image build is **`2024-07-22.1`**. More information can be found in the [changelog.txt on NCBI's FTP](https://ftp.ncbi.nlm.nih.gov/pathogen/Antimicrobial_resistance/AMRFinderPlus/database/3.12/2024-07-22.1/changelog.txt).
+
+Full documentation: [https://github.com/ncbi/amr/wiki](https://github.com/ncbi/amr/wiki)
+
+## Docker Image Tags
+
+Beginning with AMRFinderPlus v3.11.2, we will include the version of AMRFinderPlus followed by the database version in the docker image tag so that it is more informative to users. The format is as follows:
+
+```bash
+# general format
+staphb/ncbi-amrfinderplus:-
+
+# example
+staphb/ncbi-amrfinderplus:3.11.14-2023-04-17.1
+```
+
+You can view all available docker images on [dockerhub](https://hub.docker.com/r/staphb/ncbi-amrfinderplus/tags) and [quay.io](https://quay.io/repository/staphb/ncbi-amrfinderplus?tab=tags)
+
+## Example Usage
+
+```bash
+# list out the available organisms for the -O/--organism flag
+$ amrfinder -l
+Running: amrfinder -l
+Software directory: '/amrfinder/'
+Software version: 3.12.8
+Database directory: '/amrfinder/data/2024-01-31.1'
+Database version: 2024-01-31.1
+
+Available --organism options: Acinetobacter_baumannii, Burkholderia_cepacia, Burkholderia_pseudomallei, Campylobacter,
+Citrobacter_freundii, Clostridioides_difficile, Enterobacter_asburiae, Enterobacter_cloacae, Enterococcus_faecalis,
+Enterococcus_faecium, Escherichia, Klebsiella_oxytoca, Klebsiella_pneumoniae, Neisseria_gonorrhoeae,
+Neisseria_meningitidis, Pseudomonas_aeruginosa, Salmonella, Serratia_marcescens, Staphylococcus_aureus,
+Staphylococcus_pseudintermedius, Streptococcus_agalactiae, Streptococcus_pneumoniae, Streptococcus_pyogenes,
+Vibrio_cholerae, Vibrio_parahaemolyticus, Vibrio_vulnificus
+
+# download Klebsiella oxytoca genome FASTA/FNA to use as a test
+$ wget "https://ftp.ncbi.nlm.nih.gov/genomes/all/GCA/003/812/925/GCA_003812925.1_ASM381292v1/GCA_003812925.1_ASM381292v1_genomic.fna.gz"
+
+# uncompress the FNA file
+$ gzip -d GCA_003812925.1_ASM381292v1_genomic.fna.gz
+
+# run amrfinder (nucleotide mode) on the uncompressed FNA file
+$ amrfinder --plus --name GCA_003812925.1 -n GCA_003812925.1_ASM381292v1_genomic.fna -O Klebsiella_oxytoca -o GCA_003812925.1-amrfinder.tsv
+
+# view output TSV
+$ column -t -s $'\t' -n GCA_003812925.1-amrfinder.tsv
+Name Protein identifier Contig id Start Stop Strand Gene symbol Sequence name Scope Element type Element subtype Class Subclass Method Target length Reference sequence length % Coverage of reference sequence % Identity to reference sequence Alignment length Accession of closest sequence Name of closest sequence HMM id HMM description
+GCA_003812925.1 NA CP033844.1 369234 370406 + oqxA multidrug efflux RND transporter periplasmic adaptor subunit OqxA core AMR AMR PHENICOL/QUINOLONE PHENICOL/QUINOLONE BLASTX 391 391 100.00 90.79 391 WP_002914189.1 multidrug efflux RND transporter periplasmic adaptor subunit OqxA NA NA
+GCA_003812925.1 NA CP033844.1 370433 373582 + oqxB multidrug efflux RND transporter permease subunit OqxB core AMR AMR PHENICOL/QUINOLONE PHENICOL/QUINOLONE BLASTX 1050 1050 100.00 96.86 1050 WP_023323140.1 multidrug efflux RND transporter permease subunit OqxB15 NA NA
+GCA_003812925.1 NA CP033844.1 636118 637917 - ybtQ yersiniabactin ABC transporter ATP-binding/permease protein YbtQ plus VIRULENCE VIRULENCE NA NA BLASTX 600 600 100.00 89.17 600 AAC69584.1 yersiniabactin ABC transporter ATP-binding/permease protein YbtQ NA NA
+GCA_003812925.1 NA CP033844.1 637913 639706 - ybtP yersiniabactin ABC transporter ATP-binding/permease protein YbtP plus VIRULENCE VIRULENCE NA NA BLASTX 598 600 99.67 89.30 598 CAA21388.1 yersiniabactin ABC transporter ATP-binding/permease protein YbtP NA NA
+GCA_003812925.1 NA CP033844.1 3473617 3474798 + emrD multidrug efflux MFS transporter EmrD plus AMR AMR EFFLUX EFFLUX BLASTX 394 394 100.00 94.16 394 ACN65732.1 multidrug efflux MFS transporter EmrD NA NA
+GCA_003812925.1 NA CP033844.1 5085488 5086357 - blaOXY-2-1 extended-spectrum class A beta-lactamase OXY-2-1 core AMR AMR BETA-LACTAM CEPHALOSPORIN ALLELEX 290 290 100.00 100.00 290 WP_032727905.1 extended-spectrum class A beta-lactamase OXY-2-1 NA NA
+GCA_003812925.1 NA CP033845.1 5102 5632 - ant(2'')-Ia aminoglycoside nucleotidyltransferase ANT(2'')-Ia core AMR AMR AMINOGLYCOSIDE GENTAMICIN/KANAMYCIN/TOBRAMYCIN BLASTX 177 177 100.00 98.31 177 WP_000381803.1 aminoglycoside nucleotidyltransferase ANT(2'')-Ia NA NA
+GCA_003812925.1 NA CP033846.1 748 1932 - tet(39) tetracycline efflux MFS transporter Tet(39) core AMR AMR TETRACYCLINE TETRACYCLINE EXACTX 395 395 100.00 100.00 395 WP_004856455.1 tetracycline efflux MFS transporter Tet(39)
+```
diff --git a/ncbi-datasets/16.22.1/Dockerfile b/ncbi-datasets/16.22.1/Dockerfile
new file mode 100644
index 000000000..5de0780ff
--- /dev/null
+++ b/ncbi-datasets/16.22.1/Dockerfile
@@ -0,0 +1,47 @@
+FROM ubuntu:jammy as app
+
+ARG DATASETS_VER="16.22.1"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="NCBI's datasets and dataformat"
+LABEL software.version="${DATASETS_VER}"
+LABEL description="Downloads biological sequence data from NCBI"
+LABEL website="https://github.com/ncbi/datasets"
+LABEL documentation="https://www.ncbi.nlm.nih.gov/datasets/docs/v1/"
+LABEL license="https://github.com/ncbi/datasets/blob/master/pkgs/ncbi-datasets-cli/LICENSE.md"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+# unzip isn't needed for datasets/dataformat, but it is often used after downloading files with datasets
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ unzip && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /usr/local/bin
+
+# install ncbi datasets tool (pre-compiled binary)
+RUN wget https://github.com/ncbi/datasets/releases/download/v${DATASETS_VER}/linux-amd64.cli.package.zip && \
+ unzip linux-amd64.cli.package.zip && \
+ rm linux-amd64.cli.package.zip && \
+ chmod +x dataformat datasets
+
+ENV LC_ALL=C
+
+WORKDIR /data
+
+# datasets is generally datasets --help, but just typing in 'datasets' should bring up a help menu
+CMD [ "datasets", "--help" ]
+
+FROM app as test
+
+RUN dataformat --help && datasets --help
+
+# stolen from Curtis at https://github.com/StaPH-B/docker-builds/blob/master/pangolin/4.1.2/Dockerfile
+RUN datasets download virus genome accession ON924087.1 --filename ON924087.1.zip && \
+ unzip ON924087.1.zip && \
+ rm ON924087.1.zip && \
+ cp ncbi_dataset/data/genomic.fna ON924087.1.fna && \
+ wc -c ON924087.1.fna
diff --git a/ncbi-datasets/16.22.1/README.md b/ncbi-datasets/16.22.1/README.md
new file mode 100644
index 000000000..6474b9282
--- /dev/null
+++ b/ncbi-datasets/16.22.1/README.md
@@ -0,0 +1,20 @@
+# NCBI datasets and dataformat container
+
+Main tool : [datasets](https://www.ncbi.nlm.nih.gov/datasets/docs/v1/download-and-install/#use-the-datasets-tool-to-download-biological-data) and [dataformat](https://www.ncbi.nlm.nih.gov/datasets/docs/v1/download-and-install/#use-the-dataformat-tool-to-convert-data-reports-to-other-formats)
+
+Full documentation: [https://www.ncbi.nlm.nih.gov/datasets/docs/v1/how-tos/](https://www.ncbi.nlm.nih.gov/datasets/docs/v1/how-tos/)
+
+> Use NCBI Datasets to gather metadata, download data packages, view reports and more
+
+## Example Usage
+
+```bash
+# will download the fasta for ON924087.1 in a zipped directory
+datasets download virus genome accession ON924087.1 --filename ON924087.1.zip
+
+# unzipping the directory and the fasta file will be located at ncbi_dataset/data/genomic.fna
+unzip ON924087.1.zip
+
+# copying the file into something with a better name
+cp ncbi_dataset/data/genomic.fna ncbi_dataset/data/ON924087.1.genomic.fna
+```
diff --git a/ncbi-table2asn/1.28.1021/Dockerfile b/ncbi-table2asn/1.28.1021/Dockerfile
new file mode 100644
index 000000000..0ae1521bf
--- /dev/null
+++ b/ncbi-table2asn/1.28.1021/Dockerfile
@@ -0,0 +1,98 @@
+ARG RELEASE_DATE="2024-06-18"
+
+FROM ubuntu:jammy as app
+
+# version doesn't show appear anywhere on ftp; only can be determined via command-line
+# this docker image was built 2022-12-13 and this was the current version
+ARG TABLE2ASN_VER="1.28.1021"
+ARG RELEASE_DATE
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="NCBI's table2asn"
+LABEL software.version="${TABLE2ASN_VER}"
+LABEL description="Converts files of various formats to ASN.1"
+LABEL website="https://www.ncbi.nlm.nih.gov/genbank/table2asn/"
+LABEL license="Public Domain"
+LABEL maintainer="Sage Wright"
+LABEL maintainer.email="sage.wright@theiagen.com"
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ gzip \
+ libidn12 && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/* && \
+ ln -s /usr/lib/x86_64-linux-gnu/libidn.so.12 /usr/lib/x86_64-linux-gnu/libidn.so.11
+
+WORKDIR /usr/local/bin
+
+ENV STAPHB_TOOLS="agp_validate \
+ asn2all \
+ asn2asn \
+ asn2fasta \
+ asn2flat \
+ asn2fsa \
+ asn2gb \
+ asn2idx \
+ asn2xml \
+ asndisc \
+ asnmacro \
+ asnval \
+ asnvalidate \
+ fastaedit_public \
+ gap_stats \
+ gene2xml \
+ insdseqget \
+ sqn2agp \
+ srcchk \
+ table2asn "
+
+RUN for tool in ${STAPHB_TOOLS} ; do \
+ echo "Downloading ${tool}" && \
+ wget -q https://ftp.ncbi.nlm.nih.gov/asn1-converters/versions/${RELEASE_DATE}/by_program/${tool}/linux64.${tool}.gz && \
+ gunzip linux64.${tool}.gz && \
+ mv linux64.${tool} ${tool} && \
+ chmod +x ${tool} ; done
+
+ENV LC_ALL=C
+
+CMD table2asn -help
+
+WORKDIR /data
+
+FROM app as test
+
+ARG RELEASE_DATE
+
+# NOTE: Not all of these tools support the '-help' or '-version flag'
+# This ensures that all tools are in $PATH and executable
+RUN for tool in ${STAPHB_TOOLS} ; do echo "Checking ${tool}" && ${tool} -help && ${tool} -version ; done
+
+WORKDIR /test
+
+ARG FILES="GFF3Guidance.docx \
+ README \
+ SubmissionTemplate.sbt \
+ VERSIONS \
+ asn2gb_readme.txt \
+ euk_NoLocusTagsORmRNAs.gff \
+ euk_NoLocusTagsORmRNAs.sqn \
+ euk_withLocusTag_and_mRNAs.gff \
+ euk_withLocusTag_and_mRNAs.sqn \
+ euk_withLocusTags_butNOmRNAs.gff \
+ euk_withLocusTags_butNOmRNAs.sqn \
+ fastaedit_public_readme.txt \
+ gene2xml_readme.txt \
+ prok_NoLocusTags.gff \
+ prok_NoLocusTags.sqn \
+ prok_withLocusTags.gff \
+ prok_withLocusTags.sqn \
+ short.fsa \
+ table2asn_readme.txt \
+ tbl2asn_readme.txt"
+
+RUN for file in ${FILES}; do wget -q https://ftp.ncbi.nlm.nih.gov/asn1-converters/versions/${RELEASE_DATE}/documentation/${file}; done && \
+ fastaedit_public -in short.fsa -trim_ambig_bases -out_seq_file trimmed.fasta -out trimmed.xml && \
+ table2asn -i short.fsa && \
+ table2asn -t SubmissionTemplate.sbt -i short.fsa -o helicase.sqn
diff --git a/ncbi-table2asn/1.28.1021/README.md b/ncbi-table2asn/1.28.1021/README.md
new file mode 100644
index 000000000..5cea961b2
--- /dev/null
+++ b/ncbi-table2asn/1.28.1021/README.md
@@ -0,0 +1,46 @@
+# NCBI table2asn
+
+Main tool : [table2asn](https://www.ncbi.nlm.nih.gov/genbank/table2asn/)
+
+Full documentation: [README](https://ftp.ncbi.nlm.nih.gov/asn1-converters/by_program/table2asn/DOCUMENTATION/table2asn_readme.txt)
+
+> table2asn is a command-line program that creates sequence records for submission to GenBank
+
+There are also a collection of related tools that are included in the image
+
+Versions pulled from https://ftp.ncbi.nlm.nih.gov/asn1-converters/versions/2024-06-18/documentation/VERSIONS
+agp_validate: 2.28.1021
+asn2all 14.7
+asn2asn: 1.28.1021
+asn2fasta: 1.0.3
+asn2flat: 6.28.1021
+asn2fsa 6.1
+asn2gb 18.7
+asn2idx 1.1
+asn2xml 1.0
+asndisc 2.3
+asnmacro 1.8
+asnval 15.7
+asnvalidate: 3.28.1021
+gap_stats: 4.2.1021 (Build for external use)
+gene2xml 1.6
+insdseqget 1.1
+sqn2agp 1.9
+srcchk: 0.0.1021
+table2asn: 1.28.1021
+
+## Example Usage
+
+```bash
+# Single non-genome submission: a particular .fsa file, and only 1 sequence in the .fsa file and the source information is in the definition line of the .fsa file:
+table2asn -t template.sbt -i x.fsa -V v
+
+# Batch non-genome submission: a directory that contains .fsa files, and multiple sequences per file, and the source information is in the definition line of the .fsa files:
+table2asn -t template.sbt -indir path_to_files -a s -V v
+
+# Genome submission: a directory that contains multiple .fsa files of a single genome, and one or more sequences per file and the source information is in the definition line of the .fsa files:
+table2asn -t template.sbt -indir path_to_files -M n -Z
+
+# Genome submission for the most common gapped situation (= runs of 10 or more Ns represent a gap, and there are no gaps of completely unknown size, and the evidence for linkage across the gaps is "paired-ends"), and the source information is in the definition line of the .fsa files:
+table2asn -t template -indir path_to_files -M n -Z -gaps-min 10 -l paired-ends
+```
diff --git a/pango_aliasor/0.3.0/Dockerfile b/pango_aliasor/0.3.0/Dockerfile
new file mode 100644
index 000000000..eb5ac5e57
--- /dev/null
+++ b/pango_aliasor/0.3.0/Dockerfile
@@ -0,0 +1,61 @@
+FROM ubuntu:jammy as app
+
+ARG PANGO_ALIASOR_VER="0.3.0"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="Pango Aliasor"
+LABEL software.version="${PANGO_ALIASOR_VER}"
+LABEL description="Links sublineages to parent pangolin lineages"
+LABEL website="https://github.com/corneliusroemer/pango_aliasor"
+LABEL license="https://github.com/corneliusroemer/pango_aliasor/blob/main/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ python3 \
+ python3-pip \
+ python-is-python3 \
+ wget \
+ procps && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN wget -q https://github.com/corneliusroemer/pango_aliasor/archive/refs/tags/v${PANGO_ALIASOR_VER}.tar.gz && \
+ pip install v${PANGO_ALIASOR_VER}.tar.gz && \
+ rm v${PANGO_ALIASOR_VER}.tar.gz && \
+ pip install --no-cache pandas && \
+ mkdir /data
+
+ENV PATH="$PATH" LC_ALL=C
+
+COPY aliasor.py /usr/bin/.
+
+WORKDIR /key
+
+RUN wget -q https://raw.githubusercontent.com/cov-lineages/pango-designation/master/pango_designation/alias_key.json
+
+WORKDIR /data
+
+CMD [ "aliasor.py", "--help" ]
+
+FROM staphb/pangolin:4.3.1-pdata-1.28 as pangolin
+
+RUN apt-get update && apt-get install -y --no-install-recommends zstd
+
+RUN wget -q https://github.com/corneliusroemer/pango-sequences/raw/main/data/pango-consensus-sequences_genome-nuc.fasta.zst && \
+ zstd -d pango-consensus-sequences_genome-nuc.fasta.zst && \
+ pangolin pango-consensus-sequences_genome-nuc.fasta
+
+FROM app as test
+
+WORKDIR /test
+
+RUN aliasor.py --help
+
+COPY --from=pangolin /data/lineage_report.csv .
+
+RUN aliasor.py --input lineage_report.csv --output aliased_lineage_report_github.tsv && \
+ aliasor.py --input lineage_report.csv --output aliased_lineage_report.tsv --alias-key /key/alias_key.json && \
+ wc -l aliased_lineage_report_github.tsv aliased_lineage_report.tsv && \
+ head aliased_lineage_report_github.tsv aliased_lineage_report.tsv
+
diff --git a/pango_aliasor/0.3.0/README.md b/pango_aliasor/0.3.0/README.md
new file mode 100644
index 000000000..ad264d465
--- /dev/null
+++ b/pango_aliasor/0.3.0/README.md
@@ -0,0 +1,78 @@
+
+# pango_aliasor container
+
+Main tool: [pango_aliasor](https://github.com/corneliusroemer/pango_aliasor)
+
+Code repository: https://github.com/corneliusroemer/pango_aliasor
+
+Basic information on how to use this tool:
+- executable: NA
+- help: NA
+- version: NA
+- description: pango_aliasor is a python library for determining parent pangolin lineages
+
+Additional information:
+- Although not an official use by any means, `aliasor.py` is included in this image. This python script was written by [@erinyoung](https://github.com/erinyoung) for some quick use cases of finding parent lineages from pangolin results. Usage is below.
+- A alias key is found at `/key/alias_key.json` in the containers spun from this image. When used, pango_aliasor does not download the latest key from github, which is useful for some cloud infrastructures.
+
+Full documentation: [https://github.com/corneliusroemer/pango_aliasor](https://github.com/corneliusroemer/pango_aliasor)
+
+## Example Usage
+
+```python
+import pandas as pd
+from pango_aliasor.aliasor import Aliasor
+import argparse
+
+
+def add_unaliased_column(tsv_file_path, pango_column='pango_lineage', unaliased_column='pango_lineage_unaliased'):
+ aliasor = Aliasor()
+ def uncompress_lineage(lineage):
+ if not lineage or pd.isna(lineage):
+ return "?"
+ return aliasor.uncompress(lineage)
+
+ df = pd.read_csv(tsv_file_path, sep='\t')
+ df[unaliased_column] = df[pango_column].apply(uncompress_lineage)
+ return df
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description='Add unaliased Pango lineage column to a TSV file.')
+ parser.add_argument('--input-tsv', required=True, help='Path to the input TSV file.')
+ parser.add_argument('--pango-column', default='pango_lineage', help='Name of the Pango lineage column in the input file.')
+ parser.add_argument('--unaliased-column', default='pango_lineage_unaliased', help='Name of the column to use for the unaliased Pango lineage column in output.')
+ args = parser.parse_args()
+ df = add_unaliased_column(args.input_tsv, args.pango_column, args.unaliased_column)
+ print(df.to_csv(sep='\t', index=False))
+```
+
+## Example Usage of aliasor.py
+
+The help message
+```bash
+usage: aliasor.py [-h] --input INPUT [--output OUTPUT] [--pango-column PANGO_COLUMN] [--unaliased-column UNALIASED_COLUMN] [--alias-key ALIAS_KEY]
+
+Add unaliased Pango lineage column to a TSV file.
+
+options:
+ -h, --help show this help message and exit
+ --input INPUT Path to the input file (should end in tsv or csv for best results).
+ --output OUTPUT Name of tab-delimited output file
+ --pango-column PANGO_COLUMN
+ Name of the Pango lineage column in the input file.
+ --unaliased-column UNALIASED_COLUMN
+ Name of the column to use for the unaliased Pango lineage column in output.
+ --alias-key ALIAS_KEY
+ Alias Key as json file. If none provided, will download the latest version from github.
+```
+
+Examples for using aliasor.py with the lineage_report.csv file generated via pangolin (lineage_report.csv)
+```bash
+# downloading the latest alias key from github
+aliasor.py --input lineage_report.csv --output unaliased_lineage_report.tsv
+
+# using included alias key
+aliasor.py --input lineage_report.csv --output unaliased_lineage_report.tsv --alias-key /key/alias_key.json
+```
+The unaliased column will be the last column in the output file.
\ No newline at end of file
diff --git a/pango_aliasor/0.3.0/aliasor.py b/pango_aliasor/0.3.0/aliasor.py
new file mode 100755
index 000000000..f34dcc061
--- /dev/null
+++ b/pango_aliasor/0.3.0/aliasor.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python3
+
+#####
+# Mostly stolen from https://github.com/corneliusroemer/pango_aliasor?tab=readme-ov-file#convenience-script
+# and https://github.com/UPHL-BioNGS/Wastewater-genomic-analysis/blob/pooja-dev/utils/freyja_custom_lin_processing.py
+#####
+
+import pandas as pd
+from pango_aliasor.aliasor import Aliasor
+import argparse
+
+def add_unaliased_column(tsv_file_path, pango_column='lineage', unaliased_column='unaliased_lineage', alias_key = ''):
+ if alias_key:
+ aliasor = Aliasor(alias_key)
+ else:
+ aliasor = Aliasor()
+
+ def uncompress_lineage(lineage):
+ if not lineage or pd.isna(lineage):
+ return "?"
+ return aliasor.uncompress(lineage)
+
+ df = pd.DataFrame()
+
+ if tsv_file_path.endswith('.tsv'):
+ df = pd.read_csv(tsv_file_path, sep='\t')
+ elif tsv_file_path.endswith('.csv'):
+ df = pd.read_csv(tsv_file_path, sep=',')
+ else:
+ df = pd.read_csv(tsv_file_path, sep='\t')
+
+ df[unaliased_column] = df[pango_column].apply(uncompress_lineage)
+ return df
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description='Add unaliased Pango lineage column to a TSV file.')
+ parser.add_argument('--input', required=True, help='Path to the input file (should end in tsv or csv for best results).')
+ parser.add_argument('--output', default='unaliased_lineage_report.tsv', help='Name of tab-delimited output file' )
+ parser.add_argument('--pango-column', default='lineage', help='Name of the Pango lineage column in the input file.')
+ parser.add_argument('--unaliased-column', default='unaliased_lineage', help='Name of the column to use for the unaliased Pango lineage column in output.')
+ parser.add_argument('--alias-key', default='', help="Alias Key as json file. If none provided, will download the latest version from github.")
+ args = parser.parse_args()
+
+ df = add_unaliased_column(args.input, args.pango_column, args.unaliased_column, args.alias_key)
+ df.to_csv(args.output, sep='\t', index=False)
\ No newline at end of file
diff --git a/pangolin/4.3.1-pdata-1.27/README.md b/pangolin/4.3.1-pdata-1.27/README.md
index 2ea93b560..1faa2cac8 100644
--- a/pangolin/4.3.1-pdata-1.27/README.md
+++ b/pangolin/4.3.1-pdata-1.27/README.md
@@ -25,7 +25,7 @@ As of pangolin version 4.3, pangoLEARN mode has been deprecated. [More info can
> If `--analysis-mode fast` or `--analysis-mode pangolearn` is given, pangolin v4.3 will print out a warning and use UShER mode instead, unless `--datadir` is also given specifying a directory with pangoLEARN model files. The next release of pangolin-data (v1.20) will no longer include the model files which have not been updated since v1.18.
-This docker image contains `pangolin-data` v1.26. The pangoLEARN model has not been updated since pangolin-data version 1.18. Only the the underlying UShER tree/protobuf file will be maintained for the forseeable future.
+This docker image contains `pangolin-data` v1.27. The pangoLEARN model has not been updated since pangolin-data version 1.18. Only the the underlying UShER tree/protobuf file will be maintained for the forseeable future.
**Please use the UShER mode of pangolin if you want to stay up-to-date with the most recent lineages.** [See pangolin-data release notes here for more details](https://github.com/cov-lineages/pangolin-data/releases)
diff --git a/pangolin/4.3.1-pdata-1.28.1/Dockerfile b/pangolin/4.3.1-pdata-1.28.1/Dockerfile
new file mode 100644
index 000000000..5afcc502f
--- /dev/null
+++ b/pangolin/4.3.1-pdata-1.28.1/Dockerfile
@@ -0,0 +1,199 @@
+FROM mambaorg/micromamba:1.5.8 AS app
+
+# build and run as root users since micromamba image has 'mambauser' set as the $USER
+USER root
+# set workdir to default for building; set to /data at the end
+WORKDIR /
+
+# ARG variables only persist during build time
+# had to include the v for some of these due to GitHub tags.
+# using pangolin-data github tag, NOT what is in the GH release title "v1.2.133"
+ARG PANGOLIN_VER="v4.3.1"
+ARG PANGOLIN_DATA_VER="v1.28.1"
+ARG SCORPIO_VER="v0.3.19"
+ARG CONSTELLATIONS_VER="v0.1.12"
+ARG USHER_VER="0.6.3"
+
+# metadata labels
+LABEL base.image="mambaorg/micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="pangolin"
+LABEL software.version=${PANGOLIN_VER}
+LABEL description="Conda environment for Pangolin. Pangolin: Software package for assigning SARS-CoV-2 genome sequences to global lineages."
+LABEL website="https://github.com/cov-lineages/pangolin"
+LABEL license="GNU General Public License v3.0"
+LABEL license.url="https://github.com/cov-lineages/pangolin/blob/master/LICENSE.txt"
+LABEL maintainer="Curtis Kapsak"
+LABEL maintainer.email="kapsakcj@gmail.com"
+
+# install dependencies; cleanup apt garbage
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ git \
+ procps \
+ bsdmainutils && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# get the pangolin repo
+RUN wget "https://github.com/cov-lineages/pangolin/archive/${PANGOLIN_VER}.tar.gz" && \
+ tar -xf ${PANGOLIN_VER}.tar.gz && \
+ rm -v ${PANGOLIN_VER}.tar.gz && \
+ mv -v pangolin-* pangolin
+
+# set the environment; PATH is unnecessary here, but leaving anyways. It's reset later in dockerfile
+ENV PATH="$PATH" \
+ LC_ALL=C.UTF-8
+
+# modify environment.yml to pin specific versions during install
+# pin specific versions of usher, scorpio, pangolin-data, constellations, and pulp
+# create the conda environment using modified environment.yml
+RUN sed -i "s|usher.*|usher=${USHER_VER}|" /pangolin/environment.yml && \
+ sed -i "s|scorpio.git|scorpio.git@${SCORPIO_VER}|" /pangolin/environment.yml && \
+ sed -i "s|pangolin-data.git|pangolin-data.git@${PANGOLIN_DATA_VER}|" /pangolin/environment.yml && \
+ sed -i "s|constellations.git|constellations.git@${CONSTELLATIONS_VER}|" /pangolin/environment.yml && \
+ sed -i "12 a\ - pulp=2.7.0" /pangolin/environment.yml && \
+ micromamba create -n pangolin -y -f /pangolin/environment.yml && \
+ micromamba clean -a -y -f
+
+# so that mamba/conda env is active when running below commands
+ENV ENV_NAME="pangolin"
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+
+WORKDIR /pangolin
+
+# run pip install step; download optional pre-computed assignment hashes for UShER (useful for running on large batches of samples)
+# best to skip using the assigment-cache if running on one sample for speed
+# print versions
+RUN pip install . && \
+ pangolin --add-assignment-cache && \
+ mkdir /data && \
+ pangolin --all-versions && \
+ usher --version
+
+# final working directory in "app" layer is /data for passing data in/out of container
+WORKDIR /data
+
+# hardcode pangolin executable into the PATH variable
+ENV PATH="${PATH}:/opt/conda/envs/pangolin/bin/" XDG_CACHE_HOME=/tmp
+
+# default command is to pull up help options for pangolin; can be overridden of course
+CMD ["pangolin", "-h"]
+
+# new base for testing
+FROM app AS test
+
+# so that mamba/conda env is active when running below commands
+ENV ENV_NAME="pangolin"
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+
+# test on test sequences supplied with Pangolin code
+RUN pangolin /pangolin/pangolin/test/test_seqs.fasta -o /data/test_seqs-output-pusher && \
+ column -t -s, /data/test_seqs-output-pusher/lineage_report.csv
+
+# test functionality of assignment-cache option
+RUN pangolin --use-assignment-cache /pangolin/pangolin/test/test_seqs.fasta
+
+# download B.1.1.7 genome from Utah
+ADD https://raw.githubusercontent.com/StaPH-B/docker-builds/master/tests/SARS-CoV-2/SRR13957123.consensus.fa /test-data/SRR13957123.consensus.fa
+
+# test on a B.1.1.7 genome
+RUN pangolin /test-data/SRR13957123.consensus.fa -o /test-data/SRR13957123-pusher && \
+ column -t -s, /test-data/SRR13957123-pusher/lineage_report.csv
+
+ # install unzip for unzipping zip archive from NCBI
+RUN apt-get update && apt-get install -y --no-install-recommends unzip
+
+# install ncbi datasets tool (pre-compiled binary); place in $PATH
+RUN wget https://ftp.ncbi.nlm.nih.gov/pub/datasets/command-line/LATEST/linux-amd64/datasets && \
+ chmod +x datasets && \
+ mv -v datasets /usr/local/bin
+
+# download assembly for a BA.1 from Florida (https://www.ncbi.nlm.nih.gov/biosample?term=SAMN29506515 and https://www.ncbi.nlm.nih.gov/nuccore/ON924087)
+# run pangolin in usher analysis mode
+RUN datasets download virus genome accession ON924087.1 --filename ON924087.1.zip && \
+ unzip ON924087.1.zip && rm ON924087.1.zip && \
+ mv -v ncbi_dataset/data/genomic.fna ON924087.1.genomic.fna && \
+ rm -vr ncbi_dataset/ README.md && \
+ pangolin ON924087.1.genomic.fna -o ON924087.1-usher && \
+ column -t -s, ON924087.1-usher/lineage_report.csv
+
+# test specific for new lineage, XBB.1.16, introduced in pangolin-data v1.19
+# using this assembly: https://www.ncbi.nlm.nih.gov/nuccore/2440446687
+# biosample here: https://www.ncbi.nlm.nih.gov/biosample?term=SAMN33060589
+# one of the sample included in initial pango-designation here: https://github.com/cov-lineages/pango-designation/issues/1723
+RUN datasets download virus genome accession OQ381818.1 --filename OQ381818.1.zip && \
+ unzip OQ381818.1.zip && rm OQ381818.1.zip && \
+ mv -v ncbi_dataset/data/genomic.fna OQ381818.1.genomic.fna && \
+ rm -vr ncbi_dataset/ README.md && \
+ pangolin OQ381818.1.genomic.fna -o OQ381818.1-usher && \
+ column -t -s, OQ381818.1-usher/lineage_report.csv
+
+# testing another XBB.1.16, trying to test scorpio functionality. Want pangolin to NOT assign lineage based on pango hash match.
+# this test runs as expected, uses scorpio to check for constellation of mutations, then assign using PUSHER placement
+RUN datasets download virus genome accession OR177999.1 --filename OR177999.1.zip && \
+unzip OR177999.1.zip && rm OR177999.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR177999.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR177999.1.genomic.fna -o OR177999.1-usher && \
+column -t -s, OR177999.1-usher/lineage_report.csv
+
+ ## test for BA.2.86
+ # virus identified in MI: https://www.ncbi.nlm.nih.gov/nuccore/OR461132.1
+RUN datasets download virus genome accession OR461132.1 --filename OR461132.1.zip && \
+unzip OR461132.1.zip && rm OR461132.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR461132.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR461132.1.genomic.fna -o OR461132.1-usher && \
+column -t -s, OR461132.1-usher/lineage_report.csv
+
+ ## test for JN.2 (BA.2.86 sublineage) JN.2 is an alias of B.1.1.529.2.86.1.2
+ # NY CDC Quest sample: https://www.ncbi.nlm.nih.gov/nuccore/OR598183
+RUN datasets download virus genome accession OR598183.1 --filename OR598183.1.zip && \
+unzip OR598183.1.zip && rm OR598183.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR598183.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR598183.1.genomic.fna -o OR598183.1-usher && \
+column -t -s, OR598183.1-usher/lineage_report.csv
+
+## test for JQ.1 (BA.2.86.3 sublineage); JQ.1 is an alias of B.1.1.529.2.86.3.1
+# THANK YOU ERIN AND UPHL!! https://www.ncbi.nlm.nih.gov/nuccore/OR716684
+# this test is important due to the fact that this lineage was included in the UShER tree, despite being designated after the pangolin-designation 1.23 release
+# it previously caused and error/bug in pangolin, but now is fixed
+RUN datasets download virus genome accession OR716684.1 --filename OR716684.1.zip && \
+unzip OR716684.1.zip && rm OR716684.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR716684.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR716684.1.genomic.fna -o OR716684.1-usher && \
+column -t -s, OR716684.1-usher/lineage_report.csv
+
+## test for JN.1.22 (BA.2.86.x sublineage; full unaliased lineage is B.1.1.529.2.86.1.1.22)
+# see here for commit where it was designated https://github.com/cov-lineages/pango-designation/commit/a90c8e31c154621ed86c985debfea09e17541cda
+# Here's the genome on NCBI, which was used to designate JN.1.22 lineage
+RUN datasets download virus genome accession PP189069.1 --filename PP189069.1.zip && \
+unzip PP189069.1.zip && rm PP189069.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP189069.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP189069.1.genomic.fna -o PP189069.1-usher && \
+column -t -s, PP189069.1-usher/lineage_report.csv
+
+## test for JN.1.48 (BA.2.86.x sublineage; full unaliased lineage is B.1.1.529.2.86.1.1.48)
+# this lineages which was designated in pango-designation v1.27: https://github.com/cov-lineages/pango-designation/releases/tag/v1.27
+# see here for commit where it was designated https://github.com/cov-lineages/pango-designation/commit/67f48bf24283999f1940f3aee8159f404124ff3f
+# Here's the genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/PP218754
+RUN datasets download virus genome accession PP218754.1 --filename PP218754.1.zip && \
+unzip PP218754.1.zip && rm PP218754.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP218754.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP218754.1.genomic.fna -o PP218754.1-usher && \
+column -t -s, PP218754.1-usher/lineage_report.csv
+
+# new lineage LK.1 that was introduced in pango-designation v1.28: https://github.com/cov-lineages/pango-designation/commit/922795c90de355e67200cf4d379e8e5ff22472e4
+# thank you Luis, Lorraine, Marcos & team from PR Sci Trust for sharing your data!
+# genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/2728145425
+RUN datasets download virus genome accession PP770375.1 --filename PP770375.1.zip && \
+unzip PP770375.1.zip && rm PP770375.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP770375.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP770375.1.genomic.fna -o PP770375.1-usher && \
+column -t -s, PP770375.1-usher/lineage_report.csv
diff --git a/pangolin/4.3.1-pdata-1.28.1/README.md b/pangolin/4.3.1-pdata-1.28.1/README.md
new file mode 100644
index 000000000..8652fc19f
--- /dev/null
+++ b/pangolin/4.3.1-pdata-1.28.1/README.md
@@ -0,0 +1,53 @@
+# pangolin docker image
+
+Main tool : [pangolin](https://github.com/cov-lineages/pangolin)
+
+Full documentation: [https://cov-lineages.org/resources/pangolin.html](https://cov-lineages.org/resources/pangolin.html)
+
+Phylogenetic Assignment of Named Global Outbreak LINeages
+
+Additional tools:
+
+- [pangolin-data](https://github.com/cov-lineages/pangolin-data) 1.28.1
+- [pangolin-assignment](https://github.com/cov-lineages/pangolin-assignment) 1.28.1
+- [minimap2](https://github.com/lh3/minimap2) 2.28-r1209
+- [usher](https://github.com/yatisht/usher) 0.6.3
+- [faToVcf](https://github.com/yatisht/usher) 448
+- [scorpio](https://github.com/cov-lineages/scorpio) 0.3.19
+- [constellations](https://github.com/cov-lineages/constellations) 0.1.12
+- [gofasta](https://github.com/virus-evolution/gofasta) 1.2.1
+- [mafft](https://mafft.cbrc.jp/alignment/software/) 7.526
+- python 3.8.19
+
+## pangoLEARN deprecation
+
+As of pangolin version 4.3, pangoLEARN mode has been deprecated. [More info can be found here on the v4.3 release page.](https://github.com/cov-lineages/pangolin/releases/tag/v4.3)
+
+> If `--analysis-mode fast` or `--analysis-mode pangolearn` is given, pangolin v4.3 will print out a warning and use UShER mode instead, unless `--datadir` is also given specifying a directory with pangoLEARN model files. The next release of pangolin-data (v1.20) will no longer include the model files which have not been updated since v1.18.
+
+This docker image contains `pangolin-data` v1.28.1. The pangoLEARN model has not been updated since pangolin-data version 1.18. Only the the underlying UShER tree/protobuf file will be maintained for the forseeable future.
+
+**Please use the UShER mode of pangolin if you want to stay up-to-date with the most recent lineages.** [See pangolin-data release notes here for more details](https://github.com/cov-lineages/pangolin-data/releases)
+
+## Example Usage
+
+```bash
+# run Pangolin in the default mode (usher). Can optionally supply --analysis-mode usher
+$ pangolin /pangolin/pangolin/test/test_seqs.fasta -o /data/test_seqs-output-pusher
+
+# view the output CSV
+$ column -t -s, /data/test_seqs-output-pusher/lineage_report.csv
+taxon lineage conflict ambiguity_score scorpio_call scorpio_support scorpio_conflict scorpio_notes version pangolin_version scorpio_version constellation_version is_designated qc_status qc_notes note
+India seq B.1.617.1 0.0 B.1.617.1-like 1.0 0.0 scorpio call: Alt alleles 11; Ref alleles 0; Amb alleles 0; Oth alleles 0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: B.1.617.1(1/1)
+b117 B.1.1.7 0.0 Alpha (B.1.1.7-like) 0.91 0.04 scorpio call: Alt alleles 21; Ref alleles 1; Amb alleles 1; Oth alleles 0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: B.1.1.7(2/2)
+outgroup_A A 0.0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: A(1/1)
+issue_57_torsten_seq Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_has_6000_Ns_in_18000_bases Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_has_no_seq Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_is_too_short Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail Ambiguous_content:0.9
+This_seq_has_lots_of_Ns Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail Ambiguous_content:0.98
+This_seq_is_literally_just_N Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+Japan_seq B 0.0 PANGO-v1.16 4.1.3 0.3.17 v0.1.10 True pass Ambiguous_content:0.02 Assigned from designation hash.
+USA_seq B.1.314 0.0 PANGO-v1.16 4.1.3 0.3.17 v0.1.10 True pass Ambiguous_content:0.02 Assigned from designation hash.
+Unassigned_omicron_seq BA.1 0.0 Probable Omicron (BA.1-like) 0.71 0.08 scorpio call: Alt alleles 42; Ref alleles 5; Amb alleles 9; Oth alleles 3 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.03 Usher placements: BA.1(1/1)
+```
diff --git a/pangolin/4.3.1-pdata-1.28/README.md b/pangolin/4.3.1-pdata-1.28/README.md
index 7196fe191..4de0d8653 100644
--- a/pangolin/4.3.1-pdata-1.28/README.md
+++ b/pangolin/4.3.1-pdata-1.28/README.md
@@ -25,7 +25,7 @@ As of pangolin version 4.3, pangoLEARN mode has been deprecated. [More info can
> If `--analysis-mode fast` or `--analysis-mode pangolearn` is given, pangolin v4.3 will print out a warning and use UShER mode instead, unless `--datadir` is also given specifying a directory with pangoLEARN model files. The next release of pangolin-data (v1.20) will no longer include the model files which have not been updated since v1.18.
-This docker image contains `pangolin-data` v1.27. The pangoLEARN model has not been updated since pangolin-data version 1.18. Only the the underlying UShER tree/protobuf file will be maintained for the forseeable future.
+This docker image contains `pangolin-data` v1.28. The pangoLEARN model has not been updated since pangolin-data version 1.18. Only the the underlying UShER tree/protobuf file will be maintained for the forseeable future.
**Please use the UShER mode of pangolin if you want to stay up-to-date with the most recent lineages.** [See pangolin-data release notes here for more details](https://github.com/cov-lineages/pangolin-data/releases)
diff --git a/pangolin/4.3.1-pdata-1.29/Dockerfile b/pangolin/4.3.1-pdata-1.29/Dockerfile
new file mode 100644
index 000000000..b79f2a6c5
--- /dev/null
+++ b/pangolin/4.3.1-pdata-1.29/Dockerfile
@@ -0,0 +1,208 @@
+FROM mambaorg/micromamba:1.5.8 AS app
+
+# build and run as root users since micromamba image has 'mambauser' set as the $USER
+USER root
+# set workdir to default for building; set to /data at the end
+WORKDIR /
+
+# ARG variables only persist during build time
+# had to include the v for some of these due to GitHub tags.
+# using pangolin-data github tag, NOT what is in the GH release title "v1.2.133"
+ARG PANGOLIN_VER="v4.3.1"
+ARG PANGOLIN_DATA_VER="v1.29"
+ARG SCORPIO_VER="v0.3.19"
+ARG CONSTELLATIONS_VER="v0.1.12"
+ARG USHER_VER="0.6.3"
+
+# metadata labels
+LABEL base.image="mambaorg/micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="pangolin"
+LABEL software.version=${PANGOLIN_VER}
+LABEL description="Conda environment for Pangolin. Pangolin: Software package for assigning SARS-CoV-2 genome sequences to global lineages."
+LABEL website="https://github.com/cov-lineages/pangolin"
+LABEL license="GNU General Public License v3.0"
+LABEL license.url="https://github.com/cov-lineages/pangolin/blob/master/LICENSE.txt"
+LABEL maintainer="Curtis Kapsak"
+LABEL maintainer.email="kapsakcj@gmail.com"
+
+# install dependencies; cleanup apt garbage
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ git \
+ procps \
+ bsdmainutils && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# get the pangolin repo
+RUN wget "https://github.com/cov-lineages/pangolin/archive/${PANGOLIN_VER}.tar.gz" && \
+ tar -xf ${PANGOLIN_VER}.tar.gz && \
+ rm -v ${PANGOLIN_VER}.tar.gz && \
+ mv -v pangolin-* pangolin
+
+# set the environment; PATH is unnecessary here, but leaving anyways. It's reset later in dockerfile
+ENV PATH="$PATH" \
+ LC_ALL=C.UTF-8
+
+# modify environment.yml to pin specific versions during install
+# pin specific versions of usher, scorpio, pangolin-data, constellations, and pulp
+# create the conda environment using modified environment.yml
+RUN sed -i "s|usher.*|usher=${USHER_VER}|" /pangolin/environment.yml && \
+ sed -i "s|scorpio.git|scorpio.git@${SCORPIO_VER}|" /pangolin/environment.yml && \
+ sed -i "s|pangolin-data.git|pangolin-data.git@${PANGOLIN_DATA_VER}|" /pangolin/environment.yml && \
+ sed -i "s|constellations.git|constellations.git@${CONSTELLATIONS_VER}|" /pangolin/environment.yml && \
+ sed -i "12 a\ - pulp=2.7.0" /pangolin/environment.yml && \
+ micromamba create -n pangolin -y -f /pangolin/environment.yml && \
+ micromamba clean -a -y -f
+
+# so that mamba/conda env is active when running below commands
+ENV ENV_NAME="pangolin"
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+
+WORKDIR /pangolin
+
+# run pip install step; download optional pre-computed assignment hashes for UShER (useful for running on large batches of samples)
+# best to skip using the assigment-cache if running on one sample for speed
+# print versions
+RUN pip install . && \
+ pangolin --add-assignment-cache && \
+ mkdir /data && \
+ pangolin --all-versions && \
+ usher --version
+
+# final working directory in "app" layer is /data for passing data in/out of container
+WORKDIR /data
+
+# hardcode pangolin executable into the PATH variable
+ENV PATH="${PATH}:/opt/conda/envs/pangolin/bin/" XDG_CACHE_HOME=/tmp
+
+# default command is to pull up help options for pangolin; can be overridden of course
+CMD ["pangolin", "-h"]
+
+# new base for testing
+FROM app AS test
+
+# so that mamba/conda env is active when running below commands
+ENV ENV_NAME="pangolin"
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+
+# test on test sequences supplied with Pangolin code
+RUN pangolin /pangolin/pangolin/test/test_seqs.fasta -o /data/test_seqs-output-pusher && \
+ column -t -s, /data/test_seqs-output-pusher/lineage_report.csv
+
+# test functionality of assignment-cache option
+RUN pangolin --use-assignment-cache /pangolin/pangolin/test/test_seqs.fasta
+
+# download B.1.1.7 genome from Utah
+ADD https://raw.githubusercontent.com/StaPH-B/docker-builds/master/tests/SARS-CoV-2/SRR13957123.consensus.fa /test-data/SRR13957123.consensus.fa
+
+# test on a B.1.1.7 genome
+RUN pangolin /test-data/SRR13957123.consensus.fa -o /test-data/SRR13957123-pusher && \
+ column -t -s, /test-data/SRR13957123-pusher/lineage_report.csv
+
+ # install unzip for unzipping zip archive from NCBI
+RUN apt-get update && apt-get install -y --no-install-recommends unzip
+
+# install ncbi datasets tool (pre-compiled binary); place in $PATH
+RUN wget https://ftp.ncbi.nlm.nih.gov/pub/datasets/command-line/LATEST/linux-amd64/datasets && \
+ chmod +x datasets && \
+ mv -v datasets /usr/local/bin
+
+# download assembly for a BA.1 from Florida (https://www.ncbi.nlm.nih.gov/biosample?term=SAMN29506515 and https://www.ncbi.nlm.nih.gov/nuccore/ON924087)
+# run pangolin in usher analysis mode
+RUN datasets download virus genome accession ON924087.1 --filename ON924087.1.zip && \
+ unzip ON924087.1.zip && rm ON924087.1.zip && \
+ mv -v ncbi_dataset/data/genomic.fna ON924087.1.genomic.fna && \
+ rm -vr ncbi_dataset/ README.md && \
+ pangolin ON924087.1.genomic.fna -o ON924087.1-usher && \
+ column -t -s, ON924087.1-usher/lineage_report.csv
+
+# test specific for new lineage, XBB.1.16, introduced in pangolin-data v1.19
+# using this assembly: https://www.ncbi.nlm.nih.gov/nuccore/2440446687
+# biosample here: https://www.ncbi.nlm.nih.gov/biosample?term=SAMN33060589
+# one of the sample included in initial pango-designation here: https://github.com/cov-lineages/pango-designation/issues/1723
+RUN datasets download virus genome accession OQ381818.1 --filename OQ381818.1.zip && \
+ unzip OQ381818.1.zip && rm OQ381818.1.zip && \
+ mv -v ncbi_dataset/data/genomic.fna OQ381818.1.genomic.fna && \
+ rm -vr ncbi_dataset/ README.md && \
+ pangolin OQ381818.1.genomic.fna -o OQ381818.1-usher && \
+ column -t -s, OQ381818.1-usher/lineage_report.csv
+
+# testing another XBB.1.16, trying to test scorpio functionality. Want pangolin to NOT assign lineage based on pango hash match.
+# this test runs as expected, uses scorpio to check for constellation of mutations, then assign using PUSHER placement
+RUN datasets download virus genome accession OR177999.1 --filename OR177999.1.zip && \
+unzip OR177999.1.zip && rm OR177999.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR177999.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR177999.1.genomic.fna -o OR177999.1-usher && \
+column -t -s, OR177999.1-usher/lineage_report.csv
+
+ ## test for BA.2.86
+ # virus identified in MI: https://www.ncbi.nlm.nih.gov/nuccore/OR461132.1
+RUN datasets download virus genome accession OR461132.1 --filename OR461132.1.zip && \
+unzip OR461132.1.zip && rm OR461132.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR461132.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR461132.1.genomic.fna -o OR461132.1-usher && \
+column -t -s, OR461132.1-usher/lineage_report.csv
+
+ ## test for JN.2 (BA.2.86 sublineage) JN.2 is an alias of B.1.1.529.2.86.1.2
+ # NY CDC Quest sample: https://www.ncbi.nlm.nih.gov/nuccore/OR598183
+RUN datasets download virus genome accession OR598183.1 --filename OR598183.1.zip && \
+unzip OR598183.1.zip && rm OR598183.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR598183.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR598183.1.genomic.fna -o OR598183.1-usher && \
+column -t -s, OR598183.1-usher/lineage_report.csv
+
+## test for JQ.1 (BA.2.86.3 sublineage); JQ.1 is an alias of B.1.1.529.2.86.3.1
+# THANK YOU ERIN AND UPHL!! https://www.ncbi.nlm.nih.gov/nuccore/OR716684
+# this test is important due to the fact that this lineage was included in the UShER tree, despite being designated after the pangolin-designation 1.23 release
+# it previously caused and error/bug in pangolin, but now is fixed
+RUN datasets download virus genome accession OR716684.1 --filename OR716684.1.zip && \
+unzip OR716684.1.zip && rm OR716684.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR716684.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR716684.1.genomic.fna -o OR716684.1-usher && \
+column -t -s, OR716684.1-usher/lineage_report.csv
+
+## test for JN.1.22 (BA.2.86.x sublineage; full unaliased lineage is B.1.1.529.2.86.1.1.22)
+# see here for commit where it was designated https://github.com/cov-lineages/pango-designation/commit/a90c8e31c154621ed86c985debfea09e17541cda
+# Here's the genome on NCBI, which was used to designate JN.1.22 lineage
+RUN datasets download virus genome accession PP189069.1 --filename PP189069.1.zip && \
+unzip PP189069.1.zip && rm PP189069.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP189069.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP189069.1.genomic.fna -o PP189069.1-usher && \
+column -t -s, PP189069.1-usher/lineage_report.csv
+
+## test for JN.1.48 (BA.2.86.x sublineage; full unaliased lineage is B.1.1.529.2.86.1.1.48)
+# this lineages which was designated in pango-designation v1.27: https://github.com/cov-lineages/pango-designation/releases/tag/v1.27
+# see here for commit where it was designated https://github.com/cov-lineages/pango-designation/commit/67f48bf24283999f1940f3aee8159f404124ff3f
+# Here's the genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/PP218754
+RUN datasets download virus genome accession PP218754.1 --filename PP218754.1.zip && \
+unzip PP218754.1.zip && rm PP218754.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP218754.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP218754.1.genomic.fna -o PP218754.1-usher && \
+column -t -s, PP218754.1-usher/lineage_report.csv
+
+# new lineage LK.1 that was introduced in pango-designation v1.28: https://github.com/cov-lineages/pango-designation/commit/922795c90de355e67200cf4d379e8e5ff22472e4
+# thank you Luis, Lorraine, Marcos & team from PR Sci Trust for sharing your data!
+# genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/2728145425
+RUN datasets download virus genome accession PP770375.1 --filename PP770375.1.zip && \
+unzip PP770375.1.zip && rm PP770375.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP770375.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP770375.1.genomic.fna -o PP770375.1-usher && \
+column -t -s, PP770375.1-usher/lineage_report.csv
+
+# new lineage KP.3.3.2 that was introduced in pango-designation v1.29: https://github.com/cov-lineages/pango-designation/commit/7125e606818312b78f0756d7fcab6dba92dd0a9e
+# genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/PQ073669
+RUN datasets download virus genome accession PQ073669.1 --filename PQ073669.1.zip && \
+unzip PQ073669.1.zip && rm PQ073669.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PQ073669.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PQ073669.1.genomic.fna -o PQ073669.1-usher && \
+column -t -s, PQ073669.1-usher/lineage_report.csv
\ No newline at end of file
diff --git a/pangolin/4.3.1-pdata-1.29/README.md b/pangolin/4.3.1-pdata-1.29/README.md
new file mode 100644
index 000000000..8542245a0
--- /dev/null
+++ b/pangolin/4.3.1-pdata-1.29/README.md
@@ -0,0 +1,53 @@
+# pangolin docker image
+
+Main tool : [pangolin](https://github.com/cov-lineages/pangolin)
+
+Full documentation: [https://cov-lineages.org/resources/pangolin.html](https://cov-lineages.org/resources/pangolin.html)
+
+Phylogenetic Assignment of Named Global Outbreak LINeages
+
+Additional tools:
+
+- [pangolin-data](https://github.com/cov-lineages/pangolin-data) 1.29
+- [pangolin-assignment](https://github.com/cov-lineages/pangolin-assignment) 1.29
+- [minimap2](https://github.com/lh3/minimap2) 2.28-r1209
+- [usher](https://github.com/yatisht/usher) 0.6.3
+- [faToVcf](https://github.com/yatisht/usher) 448
+- [scorpio](https://github.com/cov-lineages/scorpio) 0.3.19
+- [constellations](https://github.com/cov-lineages/constellations) 0.1.12
+- [gofasta](https://github.com/virus-evolution/gofasta) 1.2.1
+- [mafft](https://mafft.cbrc.jp/alignment/software/) 7.526
+- python 3.8.19
+
+## pangoLEARN deprecation
+
+As of pangolin version 4.3, pangoLEARN mode has been deprecated. [More info can be found here on the v4.3 release page.](https://github.com/cov-lineages/pangolin/releases/tag/v4.3)
+
+> If `--analysis-mode fast` or `--analysis-mode pangolearn` is given, pangolin v4.3 will print out a warning and use UShER mode instead, unless `--datadir` is also given specifying a directory with pangoLEARN model files. The next release of pangolin-data (v1.20) will no longer include the model files which have not been updated since v1.18.
+
+The pangoLEARN model has not been updated since pangolin-data version 1.18. Only the the underlying UShER tree/protobuf file will be maintained for the forseeable future.
+
+**Please use the UShER mode of pangolin if you want to stay up-to-date with the most recent lineages.** [See pangolin-data release notes here for more details](https://github.com/cov-lineages/pangolin-data/releases)
+
+## Example Usage
+
+```bash
+# run Pangolin in the default mode (usher). Can optionally supply --analysis-mode usher
+$ pangolin /pangolin/pangolin/test/test_seqs.fasta -o /data/test_seqs-output-pusher
+
+# view the output CSV
+$ column -t -s, /data/test_seqs-output-pusher/lineage_report.csv
+taxon lineage conflict ambiguity_score scorpio_call scorpio_support scorpio_conflict scorpio_notes version pangolin_version scorpio_version constellation_version is_designated qc_status qc_notes note
+India seq B.1.617.1 0.0 B.1.617.1-like 1.0 0.0 scorpio call: Alt alleles 11; Ref alleles 0; Amb alleles 0; Oth alleles 0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: B.1.617.1(1/1)
+b117 B.1.1.7 0.0 Alpha (B.1.1.7-like) 0.91 0.04 scorpio call: Alt alleles 21; Ref alleles 1; Amb alleles 1; Oth alleles 0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: B.1.1.7(2/2)
+outgroup_A A 0.0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: A(1/1)
+issue_57_torsten_seq Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_has_6000_Ns_in_18000_bases Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_has_no_seq Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_is_too_short Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail Ambiguous_content:0.9
+This_seq_has_lots_of_Ns Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail Ambiguous_content:0.98
+This_seq_is_literally_just_N Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+Japan_seq B 0.0 PANGO-v1.16 4.1.3 0.3.17 v0.1.10 True pass Ambiguous_content:0.02 Assigned from designation hash.
+USA_seq B.1.314 0.0 PANGO-v1.16 4.1.3 0.3.17 v0.1.10 True pass Ambiguous_content:0.02 Assigned from designation hash.
+Unassigned_omicron_seq BA.1 0.0 Probable Omicron (BA.1-like) 0.71 0.08 scorpio call: Alt alleles 42; Ref alleles 5; Amb alleles 9; Oth alleles 3 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.03 Usher placements: BA.1(1/1)
+```
diff --git a/pangolin/4.3.1-pdata-1.30/Dockerfile b/pangolin/4.3.1-pdata-1.30/Dockerfile
new file mode 100644
index 000000000..41078dd00
--- /dev/null
+++ b/pangolin/4.3.1-pdata-1.30/Dockerfile
@@ -0,0 +1,217 @@
+FROM mambaorg/micromamba:1.5.10 AS app
+
+# build and run as root users since micromamba image has 'mambauser' set as the $USER
+USER root
+# set workdir to default for building; set to /data at the end
+WORKDIR /
+
+# ARG variables only persist during build time
+# had to include the v for some of these due to GitHub tags.
+# using pangolin-data github tag, NOT what is in the GH release title "v1.2.133"
+ARG PANGOLIN_VER="v4.3.1"
+ARG PANGOLIN_DATA_VER="v1.30"
+ARG SCORPIO_VER="v0.3.19"
+ARG CONSTELLATIONS_VER="v0.1.12"
+ARG USHER_VER="0.6.3"
+
+# metadata labels
+LABEL base.image="mambaorg/micromamba:1.5.10"
+LABEL dockerfile.version="1"
+LABEL software="pangolin"
+LABEL software.version=${PANGOLIN_VER}
+LABEL description="Conda environment for Pangolin. Pangolin: Software package for assigning SARS-CoV-2 genome sequences to global lineages."
+LABEL website="https://github.com/cov-lineages/pangolin"
+LABEL license="GNU General Public License v3.0"
+LABEL license.url="https://github.com/cov-lineages/pangolin/blob/master/LICENSE.txt"
+LABEL maintainer="Curtis Kapsak"
+LABEL maintainer.email="kapsakcj@gmail.com"
+
+# install dependencies; cleanup apt garbage
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ git \
+ procps \
+ bsdmainutils && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# get the pangolin repo
+RUN wget "https://github.com/cov-lineages/pangolin/archive/${PANGOLIN_VER}.tar.gz" && \
+ tar -xf ${PANGOLIN_VER}.tar.gz && \
+ rm -v ${PANGOLIN_VER}.tar.gz && \
+ mv -v pangolin-* pangolin
+
+# set the environment; PATH is unnecessary here, but leaving anyways. It's reset later in dockerfile
+ENV PATH="$PATH" \
+ LC_ALL=C.UTF-8
+
+# modify environment.yml to pin specific versions during install
+# pin specific versions of usher, scorpio, pangolin-data, constellations, and pulp
+# create the conda environment using modified environment.yml
+RUN sed -i "s|usher.*|usher=${USHER_VER}|" /pangolin/environment.yml && \
+ sed -i "s|scorpio.git|scorpio.git@${SCORPIO_VER}|" /pangolin/environment.yml && \
+ sed -i "s|pangolin-data.git|pangolin-data.git@${PANGOLIN_DATA_VER}|" /pangolin/environment.yml && \
+ sed -i "s|constellations.git|constellations.git@${CONSTELLATIONS_VER}|" /pangolin/environment.yml && \
+ sed -i "12 a\ - pulp=2.7.0" /pangolin/environment.yml && \
+ micromamba create -n pangolin -y -f /pangolin/environment.yml && \
+ micromamba clean -a -y -f
+
+# so that mamba/conda env is active when running below commands
+ENV ENV_NAME="pangolin"
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+
+WORKDIR /pangolin
+
+# run pip install step; download optional pre-computed assignment hashes for UShER (useful for running on large batches of samples)
+# best to skip using the assigment-cache if running on one sample for speed
+# print versions
+RUN pip install . && \
+ pangolin --add-assignment-cache && \
+ mkdir /data && \
+ pangolin --all-versions && \
+ usher --version
+
+# final working directory in "app" layer is /data for passing data in/out of container
+WORKDIR /data
+
+# hardcode pangolin executable into the PATH variable
+ENV PATH="${PATH}:/opt/conda/envs/pangolin/bin/" XDG_CACHE_HOME=/tmp
+
+# default command is to pull up help options for pangolin; can be overridden of course
+CMD ["pangolin", "-h"]
+
+# new base for testing
+FROM app AS test
+
+# so that mamba/conda env is active when running below commands
+ENV ENV_NAME="pangolin"
+ARG MAMBA_DOCKERFILE_ACTIVATE=1
+
+# test on test sequences supplied with Pangolin code
+RUN pangolin /pangolin/pangolin/test/test_seqs.fasta -o /data/test_seqs-output-pusher && \
+ column -t -s, /data/test_seqs-output-pusher/lineage_report.csv
+
+# test functionality of assignment-cache option
+RUN pangolin --use-assignment-cache /pangolin/pangolin/test/test_seqs.fasta
+
+# download B.1.1.7 genome from Utah
+ADD https://raw.githubusercontent.com/StaPH-B/docker-builds/master/tests/SARS-CoV-2/SRR13957123.consensus.fa /test-data/SRR13957123.consensus.fa
+
+# test on a B.1.1.7 genome
+RUN pangolin /test-data/SRR13957123.consensus.fa -o /test-data/SRR13957123-pusher && \
+ column -t -s, /test-data/SRR13957123-pusher/lineage_report.csv
+
+ # install unzip for unzipping zip archive from NCBI
+RUN apt-get update && apt-get install -y --no-install-recommends unzip
+
+# install ncbi datasets tool (pre-compiled binary); place in $PATH
+RUN wget https://ftp.ncbi.nlm.nih.gov/pub/datasets/command-line/LATEST/linux-amd64/datasets && \
+ chmod +x datasets && \
+ mv -v datasets /usr/local/bin
+
+# download assembly for a BA.1 from Florida (https://www.ncbi.nlm.nih.gov/biosample?term=SAMN29506515 and https://www.ncbi.nlm.nih.gov/nuccore/ON924087)
+# run pangolin in usher analysis mode
+RUN datasets download virus genome accession ON924087.1 --filename ON924087.1.zip && \
+ unzip ON924087.1.zip && rm ON924087.1.zip && \
+ mv -v ncbi_dataset/data/genomic.fna ON924087.1.genomic.fna && \
+ rm -vr ncbi_dataset/ README.md && \
+ pangolin ON924087.1.genomic.fna -o ON924087.1-usher && \
+ column -t -s, ON924087.1-usher/lineage_report.csv
+
+# test specific for new lineage, XBB.1.16, introduced in pangolin-data v1.19
+# using this assembly: https://www.ncbi.nlm.nih.gov/nuccore/2440446687
+# biosample here: https://www.ncbi.nlm.nih.gov/biosample?term=SAMN33060589
+# one of the sample included in initial pango-designation here: https://github.com/cov-lineages/pango-designation/issues/1723
+RUN datasets download virus genome accession OQ381818.1 --filename OQ381818.1.zip && \
+ unzip -o OQ381818.1.zip && rm OQ381818.1.zip && \
+ mv -v ncbi_dataset/data/genomic.fna OQ381818.1.genomic.fna && \
+ rm -vr ncbi_dataset/ README.md && \
+ pangolin OQ381818.1.genomic.fna -o OQ381818.1-usher && \
+ column -t -s, OQ381818.1-usher/lineage_report.csv
+
+# testing another XBB.1.16, trying to test scorpio functionality. Want pangolin to NOT assign lineage based on pango hash match.
+# this test runs as expected, uses scorpio to check for constellation of mutations, then assign using PUSHER placement
+RUN datasets download virus genome accession OR177999.1 --filename OR177999.1.zip && \
+unzip -o OR177999.1.zip && rm OR177999.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR177999.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR177999.1.genomic.fna -o OR177999.1-usher && \
+column -t -s, OR177999.1-usher/lineage_report.csv
+
+ ## test for BA.2.86
+ # virus identified in MI: https://www.ncbi.nlm.nih.gov/nuccore/OR461132.1
+RUN datasets download virus genome accession OR461132.1 --filename OR461132.1.zip && \
+unzip -o OR461132.1.zip && rm OR461132.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR461132.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR461132.1.genomic.fna -o OR461132.1-usher && \
+column -t -s, OR461132.1-usher/lineage_report.csv
+
+ ## test for JN.2 (BA.2.86 sublineage) JN.2 is an alias of B.1.1.529.2.86.1.2
+ # NY CDC Quest sample: https://www.ncbi.nlm.nih.gov/nuccore/OR598183
+RUN datasets download virus genome accession OR598183.1 --filename OR598183.1.zip && \
+unzip -o OR598183.1.zip && rm OR598183.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR598183.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR598183.1.genomic.fna -o OR598183.1-usher && \
+column -t -s, OR598183.1-usher/lineage_report.csv
+
+## test for JQ.1 (BA.2.86.3 sublineage); JQ.1 is an alias of B.1.1.529.2.86.3.1
+# THANK YOU ERIN AND UPHL!! https://www.ncbi.nlm.nih.gov/nuccore/OR716684
+# this test is important due to the fact that this lineage was included in the UShER tree, despite being designated after the pangolin-designation 1.23 release
+# it previously caused and error/bug in pangolin, but now is fixed
+RUN datasets download virus genome accession OR716684.1 --filename OR716684.1.zip && \
+unzip -o OR716684.1.zip && rm OR716684.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna OR716684.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin OR716684.1.genomic.fna -o OR716684.1-usher && \
+column -t -s, OR716684.1-usher/lineage_report.csv
+
+## test for JN.1.22 (BA.2.86.x sublineage; full unaliased lineage is B.1.1.529.2.86.1.1.22)
+# see here for commit where it was designated https://github.com/cov-lineages/pango-designation/commit/a90c8e31c154621ed86c985debfea09e17541cda
+# Here's the genome on NCBI, which was used to designate JN.1.22 lineage
+RUN datasets download virus genome accession PP189069.1 --filename PP189069.1.zip && \
+unzip -o PP189069.1.zip && rm PP189069.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP189069.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP189069.1.genomic.fna -o PP189069.1-usher && \
+column -t -s, PP189069.1-usher/lineage_report.csv
+
+## test for JN.1.48 (BA.2.86.x sublineage; full unaliased lineage is B.1.1.529.2.86.1.1.48)
+# this lineages which was designated in pango-designation v1.27: https://github.com/cov-lineages/pango-designation/releases/tag/v1.27
+# see here for commit where it was designated https://github.com/cov-lineages/pango-designation/commit/67f48bf24283999f1940f3aee8159f404124ff3f
+# Here's the genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/PP218754
+RUN datasets download virus genome accession PP218754.1 --filename PP218754.1.zip && \
+unzip -o PP218754.1.zip && rm PP218754.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP218754.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP218754.1.genomic.fna -o PP218754.1-usher && \
+column -t -s, PP218754.1-usher/lineage_report.csv
+
+# new lineage LK.1 that was introduced in pango-designation v1.28: https://github.com/cov-lineages/pango-designation/commit/922795c90de355e67200cf4d379e8e5ff22472e4
+# thank you Luis, Lorraine, Marcos & team from PR Sci Trust for sharing your data!
+# genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/2728145425
+RUN datasets download virus genome accession PP770375.1 --filename PP770375.1.zip && \
+unzip -o PP770375.1.zip && rm PP770375.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PP770375.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PP770375.1.genomic.fna -o PP770375.1-usher && \
+column -t -s, PP770375.1-usher/lineage_report.csv
+
+# new lineage KP.3.3.2 that was introduced in pango-designation v1.29: https://github.com/cov-lineages/pango-designation/commit/7125e606818312b78f0756d7fcab6dba92dd0a9e
+# genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/PQ073669
+RUN datasets download virus genome accession PQ073669.1 --filename PQ073669.1.zip && \
+unzip -o PQ073669.1.zip && rm PQ073669.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PQ073669.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PQ073669.1.genomic.fna -o PQ073669.1-usher && \
+column -t -s, PQ073669.1-usher/lineage_report.csv
+
+# new lineage MC.2 that was introduced in pango-designation v1.30: https://github.com/cov-lineages/pango-designation/commit/c64dbc47fbfbfd7f4da011deeb1a88dd6baa45f1#diff-a121ea4b8cbeb4c0020511b5535bf24489f0223cc83511df7b8209953115d329R2564181
+# genome on NCBI: https://www.ncbi.nlm.nih.gov/nuccore/PQ034842.1
+RUN datasets download virus genome accession PQ034842.1 --filename PQ034842.1.zip && \
+unzip -o PQ034842.1.zip && rm PQ034842.1.zip && \
+mv -v ncbi_dataset/data/genomic.fna PQ034842.1.genomic.fna && \
+rm -vr ncbi_dataset/ README.md && \
+pangolin PQ034842.1.genomic.fna -o PQ034842.1-usher && \
+column -t -s, PQ034842.1-usher/lineage_report.csv
diff --git a/pangolin/4.3.1-pdata-1.30/README.md b/pangolin/4.3.1-pdata-1.30/README.md
new file mode 100644
index 000000000..16c0da511
--- /dev/null
+++ b/pangolin/4.3.1-pdata-1.30/README.md
@@ -0,0 +1,53 @@
+# pangolin docker image
+
+Main tool : [pangolin](https://github.com/cov-lineages/pangolin)
+
+Full documentation: [https://cov-lineages.org/resources/pangolin.html](https://cov-lineages.org/resources/pangolin.html)
+
+Phylogenetic Assignment of Named Global Outbreak LINeages
+
+Additional tools:
+
+- [pangolin-data](https://github.com/cov-lineages/pangolin-data) 1.30
+- [pangolin-assignment](https://github.com/cov-lineages/pangolin-assignment) 1.30
+- [minimap2](https://github.com/lh3/minimap2) 2.28-r1209
+- [usher](https://github.com/yatisht/usher) 0.6.3
+- [faToVcf](https://github.com/yatisht/usher) 448
+- [scorpio](https://github.com/cov-lineages/scorpio) 0.3.19
+- [constellations](https://github.com/cov-lineages/constellations) 0.1.12
+- [gofasta](https://github.com/virus-evolution/gofasta) 1.2.1
+- [mafft](https://mafft.cbrc.jp/alignment/software/) 7.526
+- python 3.8.19
+
+## pangoLEARN deprecation
+
+As of pangolin version 4.3, pangoLEARN mode has been deprecated. [More info can be found here on the v4.3 release page.](https://github.com/cov-lineages/pangolin/releases/tag/v4.3)
+
+> If `--analysis-mode fast` or `--analysis-mode pangolearn` is given, pangolin v4.3 will print out a warning and use UShER mode instead, unless `--datadir` is also given specifying a directory with pangoLEARN model files. The next release of pangolin-data (v1.20) will no longer include the model files which have not been updated since v1.18.
+
+The pangoLEARN model has not been updated since pangolin-data version 1.18. Only the the underlying UShER tree/protobuf file will be maintained for the forseeable future.
+
+**Please use the UShER mode of pangolin if you want to stay up-to-date with the most recent lineages.** [See pangolin-data release notes here for more details](https://github.com/cov-lineages/pangolin-data/releases)
+
+## Example Usage
+
+```bash
+# run Pangolin in the default mode (usher). Can optionally supply --analysis-mode usher
+$ pangolin /pangolin/pangolin/test/test_seqs.fasta -o /data/test_seqs-output-pusher
+
+# view the output CSV
+$ column -t -s, /data/test_seqs-output-pusher/lineage_report.csv
+taxon lineage conflict ambiguity_score scorpio_call scorpio_support scorpio_conflict scorpio_notes version pangolin_version scorpio_version constellation_version is_designated qc_status qc_notes note
+India seq B.1.617.1 0.0 B.1.617.1-like 1.0 0.0 scorpio call: Alt alleles 11; Ref alleles 0; Amb alleles 0; Oth alleles 0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: B.1.617.1(1/1)
+b117 B.1.1.7 0.0 Alpha (B.1.1.7-like) 0.91 0.04 scorpio call: Alt alleles 21; Ref alleles 1; Amb alleles 1; Oth alleles 0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: B.1.1.7(2/2)
+outgroup_A A 0.0 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.02 Usher placements: A(1/1)
+issue_57_torsten_seq Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_has_6000_Ns_in_18000_bases Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_has_no_seq Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+This_seq_is_too_short Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail Ambiguous_content:0.9
+This_seq_has_lots_of_Ns Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail Ambiguous_content:0.98
+This_seq_is_literally_just_N Unassigned PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False fail failed to map
+Japan_seq B 0.0 PANGO-v1.16 4.1.3 0.3.17 v0.1.10 True pass Ambiguous_content:0.02 Assigned from designation hash.
+USA_seq B.1.314 0.0 PANGO-v1.16 4.1.3 0.3.17 v0.1.10 True pass Ambiguous_content:0.02 Assigned from designation hash.
+Unassigned_omicron_seq BA.1 0.0 Probable Omicron (BA.1-like) 0.71 0.08 scorpio call: Alt alleles 42; Ref alleles 5; Amb alleles 9; Oth alleles 3 PUSHER-v1.16 4.1.3 0.3.17 v0.1.10 False pass Ambiguous_content:0.03 Usher placements: BA.1(1/1)
+```
diff --git a/panqc/0.4.0/Dockerfile b/panqc/0.4.0/Dockerfile
new file mode 100644
index 000000000..30f28e586
--- /dev/null
+++ b/panqc/0.4.0/Dockerfile
@@ -0,0 +1,48 @@
+FROM ubuntu:jammy as app
+
+ARG PANQC_VER="0.4.0"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="panqc"
+LABEL software.version="${PANQC_VER}"
+LABEL description="A pan-genome quality control toolkit for evaluating nucleotide redundancy in pan-genome analyses."
+LABEL website="https://github.com/maxgmarin/panqc"
+LABEL license="https://github.com/maxgmarin/panqc/blob/main/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ procps \
+ python3 \
+ python3-pip \
+ python3-dev \
+ gcc && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN pip install cython
+
+RUN wget -q https://github.com/maxgmarin/panqc/archive/refs/tags/${PANQC_VER}.tar.gz && \
+ pip install --no-cache-dir ${PANQC_VER}.tar.gz && \
+ rm ${PANQC_VER}.tar.gz && \
+ mkdir /data
+
+ENV LC_ALL=C
+
+CMD panqc nrc --help && panqc utils --help
+
+WORKDIR /data
+
+FROM app as test
+
+WORKDIR /test
+
+RUN panqc nrc --help && \
+ panqc utils --help
+
+RUN wget -q https://github.com/maxgmarin/panqc/archive/refs/tags/${PANQC_VER}.tar.gz && \
+ tar -xvf ${PANQC_VER}.tar.gz && \
+ cd panqc-${PANQC_VER}/tests/data && \
+ panqc nrc -a TestSet1.InputAsmPaths.tsv -r TestSet1.pan_genome_reference.fa.gz -m TestSet1.gene_presence_absence.csv -o test_results/
diff --git a/panqc/0.4.0/README.md b/panqc/0.4.0/README.md
new file mode 100644
index 000000000..d399d8394
--- /dev/null
+++ b/panqc/0.4.0/README.md
@@ -0,0 +1,22 @@
+# panqc container
+
+Main tool: [panqc](https://github.com/maxgmarin/panqc)
+
+Code repository: https://github.com/maxgmarin/panqc
+
+Basic information on how to use this tool:
+- executable: panqc nrc || panqc utils
+- help: --help
+- version: NA
+- description: |
+
+> The panqc Nucleotide Redundancy Correction (NRC) pipeline adjusts for redundancy at the DNA level within pan-genome estimates in two steps. In step one, all genes predicted to be absent at the Amino Acid (AA) level are compared to their corresponding assembly at the nucleotide level. In cases where the nucleotide sequence is found with high coverage and sequence identity (Query Coverage & Sequence Identity > 90%), the gene is marked as “present at the DNA level”. Next, all genes are clustered and merged using a k-mer based metric of nucleotide similarity. Cases where two or more genes are divergent at the AA level but highly similar at the nucleotide level will be merged into a single “nucleotide similarity gene cluster”. After applying this method the pan-genome gene presence matrix is readjusted according to these results.
+
+
+Full documentation: [https://github.com/maxgmarin/panqc](https://github.com/maxgmarin/panqc)
+
+## Example Usage
+
+```bash
+panqc nrc --asms InputAsmPaths.tsv --pg-ref pan_genome_reference.fa --is-rtab gene_presence_absence.Rtab --results_dir results/
+```
diff --git a/pasty/2.2.1/Dockerfile b/pasty/2.2.1/Dockerfile
new file mode 100644
index 000000000..054fbd5c5
--- /dev/null
+++ b/pasty/2.2.1/Dockerfile
@@ -0,0 +1,50 @@
+ARG PASTY_VER="2.2.1"
+
+FROM mambaorg/micromamba:1.5.8 AS app
+
+ARG PASTY_VER
+
+LABEL base.image="mambaorg/micromamba:1.5.8"
+LABEL dockerfile.version="1"
+LABEL software="pasty"
+LABEL software.version="${PASTY_VER}"
+LABEL description="In silico serogrouping of Pseudomonas aeruginosa isolates from genome assemblies"
+LABEL website="https://github.com/rpetit3/pasty"
+LABEL license="https://github.com/rpetit3/pasty/blob/main/LICENSE"
+LABEL maintainer="Curtis Kapsak"
+LABEL maintainer.email="curtis.kapsak@theiagen.com"
+LABEL maintainer2="Kutluhan Incekara"
+LABEL maintainer2.email="kutluhan.incekara@ct.gov"
+
+USER root
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ procps && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN micromamba install --name base -c conda-forge -c bioconda pasty=${PASTY_VER} &&\
+ micromamba clean -afy
+
+ENV PATH="/opt/conda/bin/:$PATH" \
+ LC_ALL=C
+
+CMD ["pasty", "--help"]
+
+WORKDIR /data
+
+## Test ##
+FROM app AS test
+
+ARG PASTY_VER
+
+RUN apt-get update && apt-get install -y wget
+
+# adapted from https://github.com/rpetit3/pasty/blob/main/test/README.md
+RUN wget -q https://github.com/rpetit3/pasty/archive/refs/tags/v${PASTY_VER}.tar.gz &&\
+ tar -xvf v${PASTY_VER}.tar.gz &&\
+ cd pasty-${PASTY_VER}/test/ &&\
+ for i in $(ls data | grep "fna.gz"); do pasty --input ./data/$i --prefix $(basename $i .fna.gz) --outdir results/ --force; done &&\
+ head -n 1 results/O1-GCF_001420225.tsv > staphb-test.tsv &&\
+ ls results/ | grep -v "details" | grep -v "blastn" | xargs -I {} grep -v "schema_version" results/{} | sort -k1 >> staphb-test.tsv &&\
+ cat staphb-test.tsv
+
diff --git a/pasty/2.2.1/README.md b/pasty/2.2.1/README.md
new file mode 100644
index 000000000..7e7eb7e16
--- /dev/null
+++ b/pasty/2.2.1/README.md
@@ -0,0 +1,42 @@
+# pasty container
+
+Main tool : [pasty](https://github.com/rpetit3/pasty)
+
+Additional tools:
+
+- camlhmp 1.0.0
+- blast 2.16.0+
+
+Full documentation: [https://github.com/rpetit3/pasty](https://github.com/rpetit3/pasty)
+
+A tool easily taken advantage of for in silico serogrouping of Pseudomonas aeruginosa isolates from genome assemblies
+
+## Example Usage
+
+```bash
+pasty --input ./data/O1-GCF_001420225.fna.gz --prefix O1-GCF_001420225 --outdir results/
+
+Running camlhmp with following parameters:
+ --input ./data/O1-GCF_001420225.fna.gz
+ --yaml /opt/conda/bin/../share/pasty/pa-osa.yaml
+ --targets /opt/conda/bin/../share/pasty/pa-osa.fasta
+ --outdir results/
+ --prefix O1-GCF_001420225
+ --min-pident 95
+ --min-coverage 95
+
+Starting camlhmp for P. aeruginosa serogrouping...
+Running blastn...
+Processing hits...
+Final Results...
+ P. aeruginosa serogrouping
+┏━━━━━━━━━━━━━━━━━━┳━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━┳━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┓
+┃ sample ┃ type ┃ targets ┃ coverages ┃ hits ┃ schema ┃ schema_version ┃ camlhmp_version ┃ params ┃ comment ┃
+┡━━━━━━━━━━━━━━━━━━╇━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━╇━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━┩
+│ O1-GCF_001420225 │ O1 │ O1 │ 100.00 │ 1 │ pasty │ 2.1.0 │ 1.0.0 │ min-coverage=95;min-pident=95 │ │
+└──────────────────┴──────┴─────────┴───────────┴──────┴────────┴────────────────┴─────────────────┴───────────────────────────────┴─────────┘
+Writing outputs...
+Final predicted type written to results/O1-GCF_001420225.tsv
+Results against each type written to results/O1-GCF_001420225.details.tsv
+blastn results written to results/O1-GCF_001420225.blastn.tsv
+```
diff --git a/pycirclize/1.6.0/Dockerfile b/pycirclize/1.6.0/Dockerfile
new file mode 100644
index 000000000..020f84643
--- /dev/null
+++ b/pycirclize/1.6.0/Dockerfile
@@ -0,0 +1,43 @@
+FROM python:3.9.17-slim as app
+
+ARG PYCIRCLIZE_VER="1.6.0"
+
+# 'LABEL' instructions tag the image with metadata that might be important to the user
+LABEL base.image="python:3.9.17-slim"
+LABEL dockerfile.version="1"
+LABEL software="pyCirclize"
+LABEL software.version="${PYCIRCLIZE_VER}"
+LABEL description="Circular visualization in Python"
+LABEL website="https://github.com/moshi4/pyCirclize"
+LABEL license="https://github.com/moshi4/pyCirclize/blob/main/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ procps && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN pip install --no-cache pycirclize==${PYCIRCLIZE_VER}
+
+ENV PATH=$PATH \
+ LC_ALL=C
+
+CMD pip show pycirclize
+
+WORKDIR /data
+
+FROM app as test
+
+WORKDIR /test
+
+RUN pip show pycirclize
+
+RUN apt-get update && apt-get install -y --no-install-recommends wget
+
+RUN wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/pycirclize/1.5.0/tests/example1.py && \
+ wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/pycirclize/1.5.0/tests/example2.py && \
+ wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/pycirclize/1.5.0/tests/example3.py
+
+RUN python example1.py && ls example01.png && \
+ python example2.py && ls example02.png && \
+ python example3.py && ls example03.png
diff --git a/pycirclize/1.6.0/README.md b/pycirclize/1.6.0/README.md
new file mode 100644
index 000000000..39f05ac66
--- /dev/null
+++ b/pycirclize/1.6.0/README.md
@@ -0,0 +1,61 @@
+# pyCirclize container
+
+Main tool: [pyCirclize](https://pypi.org/project/pyCirclize/)
+
+Code repository: https://github.com/moshi4/pyCirclize
+
+Basic information on how to use this tool:
+- executable: NA
+- help: NA
+- version: NA
+- description: pyCirclize is a python package for creating visual images of circular genomes (like those of bacteria)
+
+Full documentation: https://pypi.org/project/pyCirclize/
+
+## Example Usage
+
+This is for running containers with a specific python package, and is not really meant to be run from the command line. Instead, a bioinformatician could create a python script that uses pycirclize.
+
+example1.py:
+```python
+from pycirclize import Circos
+import numpy as np
+np.random.seed(0)
+
+# Initialize Circos sectors
+sectors = {"A": 10, "B": 15, "C": 12, "D": 20, "E": 15}
+circos = Circos(sectors, space=5)
+
+for sector in circos.sectors:
+ # Plot sector name
+ sector.text(f"Sector: {sector.name}", r=110, size=15)
+ # Create x positions & random y values
+ x = np.arange(sector.start, sector.end) + 0.5
+ y = np.random.randint(0, 100, len(x))
+ # Plot lines
+ track1 = sector.add_track((80, 100), r_pad_ratio=0.1)
+ track1.xticks_by_interval(interval=1)
+ track1.axis()
+ track1.line(x, y)
+ # Plot points
+ track2 = sector.add_track((55, 75), r_pad_ratio=0.1)
+ track2.axis()
+ track2.scatter(x, y)
+ # Plot bars
+ track3 = sector.add_track((30, 50), r_pad_ratio=0.1)
+ track3.axis()
+ track3.bar(x, y)
+
+# Plot links
+circos.link(("A", 0, 3), ("B", 15, 12))
+circos.link(("B", 0, 3), ("C", 7, 11), color="skyblue")
+circos.link(("C", 2, 5), ("E", 15, 12), color="chocolate", direction=1)
+circos.link(("D", 3, 5), ("D", 18, 15), color="lime", ec="black", lw=0.5, hatch="//", direction=2)
+circos.link(("D", 8, 10), ("E", 2, 8), color="violet", ec="red", lw=1.0, ls="dashed")
+
+circos.savefig("example01.png")
+```
+
+```bash
+python example1.py
+```
diff --git a/pygenomeviz/1.1.0/Dockerfile b/pygenomeviz/1.1.0/Dockerfile
new file mode 100644
index 000000000..a7d1ec8c2
--- /dev/null
+++ b/pygenomeviz/1.1.0/Dockerfile
@@ -0,0 +1,67 @@
+FROM python:3.9.17-slim as app
+
+ARG PYGENOMEVIZ_VER="1.1.0"
+
+LABEL base.image="python:3.9.17-slim"
+LABEL dockerfile.version="1"
+LABEL software="pyGenomeViz"
+LABEL software.version=$PYGENOMEVIZ_VER
+LABEL description="genome visualization python package for comparative genomics"
+LABEL website="https://moshi4.github.io/pyGenomeViz"
+LABEL license="MIT License"
+LABEL license.url="https://github.com/moshi4/pyGenomeViz/blob/main/LICENSE"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+#mmseqs2=14-7e284+ds-1+b2
+#mummer=3.23+dfsg-8
+#progressivemauve=1.2.0+4713+dfsg-5+b1
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ ca-certificates \
+ procps \
+ ncbi-blast+ \
+ mmseqs2 \
+ mummer \
+ progressivemauve && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN pip install --no-cache-dir pygenomeviz==$PYGENOMEVIZ_VER
+
+ENV LC_ALL=C.UTF-8
+
+CMD pgv-mmseqs --help && pgv-mummer --help && pgv-pmauve --help && pgv-blast --help
+
+WORKDIR /data
+
+FROM app as test
+
+WORKDIR /test
+
+RUN pgv-mmseqs --help && pgv-mummer --help && pgv-pmauve --help && pgv-blast --help
+
+RUN \
+ # Download example dataset
+ pgv-download yersinia_phage && \
+ # Run BLAST CLI workflow
+ pgv-blast NC_070914.gbk NC_070915.gbk NC_070916.gbk NC_070918.gbk \
+ -o pgv-blast_example --seqtype protein --show_scale_bar --curve \
+ --feature_linewidth 0.3 --length_thr 100 --identity_thr 30 && \
+ # Download example dataset
+ pgv-download mycoplasma_mycoides && \
+ # Run MUMmer CLI workflow
+ pgv-mummer GCF_000023685.1.gbff GCF_000800785.1.gbff GCF_000959055.1.gbff GCF_000959065.1.gbff \
+ -o pgv-mummer_example --show_scale_bar --curve \
+ --feature_type2color CDS:blue rRNA:lime tRNA:magenta && \
+ # Download example dataset
+ pgv-download enterobacteria_phage && \
+ # Run MMseqs CLI workflow
+ pgv-mmseqs NC_013600.gbk NC_016566.gbk NC_019724.gbk NC_024783.gbk NC_028901.gbk NC_031081.gbk \
+ -o pgv-mmseqs_example --show_scale_bar --curve --feature_linewidth 0.3 \
+ --feature_type2color CDS:skyblue --normal_link_color chocolate --inverted_link_color limegreen && \
+ # Download example dataset
+ pgv-download escherichia_coli && \
+ # Run progressiveMauve CLI workflow
+ pgv-pmauve NC_000913.gbk.gz NC_002695.gbk.gz NC_011751.gbk.gz NC_011750.gbk.gz \
+ -o pgv-pmauve_example --show_scale_bar && \
+ # Check final files
+ ls pgv-blast_example/result.png pgv-mummer_example/result.png pgv-mmseqs_example/result.png pgv-pmauve_example/result.png
diff --git a/pygenomeviz/1.1.0/README.md b/pygenomeviz/1.1.0/README.md
new file mode 100644
index 000000000..e0dd7807c
--- /dev/null
+++ b/pygenomeviz/1.1.0/README.md
@@ -0,0 +1,69 @@
+# pyGenomeViz container
+
+Main tool : [pyGenomeViz](https://moshi4.github.io/pyGenomeViz/)
+
+Additional tools:
+- ncbi-blast+ (2.12.0+ds-3+b1)
+- mmseqs2 (14-7e284+ds-1+b2)
+- mummer (3.23+dfsg-8)
+- progressivemauve (1.2.0+4713+dfsg-5+b1)
+
+Full documentation: https://moshi4.github.io/pyGenomeViz/
+
+> pyGenomeViz is a genome visualization python package for comparative genomics implemented based on matplotlib. This package is developed for the purpose of easily and beautifully plotting genomic features and sequence similarity comparison links between multiple genomes.
+
+## Example Usage
+
+Using the CLI
+
+```bash
+# Download example dataset
+pgv-download yersinia_phage
+
+# Run BLAST CLI workflow
+pgv-blast NC_070914.gbk NC_070915.gbk NC_070916.gbk NC_070918.gbk \
+ -o pgv-blast_example --seqtype protein --show_scale_bar --curve \
+ --feature_linewidth 0.3 --length_thr 100 --identity_thr 30
+
+# Download example dataset
+pgv-download mycoplasma_mycoides
+
+# Run MUMmer CLI workflow
+pgv-mummer GCF_000023685.1.gbff GCF_000800785.1.gbff GCF_000959055.1.gbff GCF_000959065.1.gbff \
+ -o pgv-mummer_example --show_scale_bar --curve \
+ --feature_type2color CDS:blue rRNA:lime tRNA:magenta
+
+# Download example dataset
+pgv-download enterobacteria_phage
+
+# Run MMseqs CLI workflow
+pgv-mmseqs NC_013600.gbk NC_016566.gbk NC_019724.gbk NC_024783.gbk NC_028901.gbk NC_031081.gbk \
+ -o pgv-mmseqs_example --show_scale_bar --curve --feature_linewidth 0.3 \
+ --feature_type2color CDS:skyblue --normal_link_color chocolate --inverted_link_color limegreen
+
+# Download example dataset
+pgv-download escherichia_coli
+
+# Run progressiveMauve CLI workflow
+pgv-pmauve NC_000913.gbk.gz NC_002695.gbk.gz NC_011751.gbk.gz NC_011750.gbk.gz \
+ -o pgv-pmauve_example --show_scale_bar
+```
+
+This container contains the pygenomeviz python package, so custom scripts can import pygenomeviz
+
+```python
+from pygenomeviz import GenomeViz
+
+gv = GenomeViz()
+gv.set_scale_xticks(ymargin=0.5)
+
+track = gv.add_feature_track("tutorial", 1000)
+track.add_sublabel()
+
+track.add_feature(50, 200, 1)
+track.add_feature(250, 460, -1, fc="blue")
+track.add_feature(500, 710, 1, fc="lime")
+track.add_feature(750, 960, 1, fc="magenta", lw=1.0)
+
+gv.savefig("features.png")
+```
diff --git a/rdp/2.14/Dockerfile b/rdp/2.14/Dockerfile
new file mode 100644
index 000000000..dc2fe2a0b
--- /dev/null
+++ b/rdp/2.14/Dockerfile
@@ -0,0 +1,59 @@
+# set global variables
+ARG RDP_VER="2.14"
+
+# build Dockerfile
+FROM ubuntu:jammy as app
+ARG RDP_VER
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="RDP Classifier"
+LABEL software.version=${RDP_VER}
+LABEL description="The RDP Classifier is a naive Bayesian classifier which was developed to provide rapid taxonomic placement based on rRNA sequence data."
+LABEL website="https://github.com/rdpstaff/classifier"
+LABEL documentation="https://sourceforge.net/projects/rdp-classifier/"
+LABEL license.url="https://github.com/rdpstaff/classifier/blob/master/LICENSE"
+LABEL maintainer="Taylor K. Paisie"
+LABEL maintainer.email='ltj8@cdc.gov'
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ openjdk-11-jre \
+ wget \
+ unzip && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# Install rdp_classifer
+RUN wget -q https://sourceforge.net/projects/rdp-classifier/files/rdp-classifier/rdp_classifier_${RDP_VER}.zip &&\
+ unzip rdp_classifier_${RDP_VER}.zip &&\
+ mv /rdp_classifier_${RDP_VER} /rdp_classifier &&\
+ chmod +x /rdp_classifier/dist/classifier.jar &&\
+ echo "#!/bin/bash" >> /rdp_classifier/dist/classifier &&\
+ echo "exec java -jar /rdp_classifier/dist/classifier.jar """"$""@"""" " >> /rdp_classifier/dist/classifier &&\
+ chmod +x /rdp_classifier/dist/classifier
+
+ENV PATH="${PATH}:/rdp_classifier/dist" LC_ALL=C
+
+CMD classifier
+
+RUN mkdir data/
+WORKDIR /data
+
+# Running RDP on test controls
+FROM app as test
+
+WORKDIR /test
+
+# running help to ensure executable is in path
+RUN classifier
+
+# testing on real files
+RUN apt-get update && apt-get install -y \
+ python3 \
+ wget
+
+RUN mkdir ../tests/
+COPY tests/ ../tests/
+RUN python3 -m unittest discover -v -s ../tests
diff --git a/rdp/2.14/README.md b/rdp/2.14/README.md
new file mode 100644
index 000000000..7d6f650e9
--- /dev/null
+++ b/rdp/2.14/README.md
@@ -0,0 +1,53 @@
+# RDP Classifier
+
+Main tool: [RDP Classifier](https://sourceforge.net/projects/rdp-classifier/)
+
+Code repository: https://github.com/rdpstaff/classifier
+
+Basic information on how to use this tool:
+- executable: |
+```
+ classify - classify one or multiple samples
+ crossvalidate - cross validate accuracy testing
+ libcompare - compare two samples
+ loot - leave one (sequence or taxon) out accuracy testing
+ merge-detail - merge classification detail result files to create a taxon assignment counts file
+ merge-count - merge multiple taxon assignment count files to into one count file
+ random-sample - random select a subset or subregion of sequences
+ rm-dupseq - remove identical or any sequence contained by another sequence
+ rm-partialseq - remove partial sequences
+ taxa-sim - calculate and plot the similarities within taxa
+ train - retrain classifier
+```
+
+- help: classify # with no flags
+- version: NA
+- description: |
+> The RDP Classifier is a naive Bayesian classifier which was developed to provide rapid taxonomic placement based on rRNA sequence data.
+
+
+Full documentation: https://sourceforge.net/projects/rdp-classifier/
+
+
+## Example analysis
+Get test data:
+```
+# Download test data
+wget -nv https://raw.githubusercontent.com/taylorpaisie/docker_containers/main/rdp/2.14/16S_rRNA_gene.Burkholderia_pseudomallei.2002721184.AY305776.1.fasta -O 16S_test.fa
+wget -nv https://raw.githubusercontent.com/taylorpaisie/docker_containers/main/rdp/2.14/18S_rRNA_gene.Homo_sapiens.T2T-CHM13v2.0.Chromosome13.fasta -O 18S_test.fa
+```
+
+Use RDP Classifier to get taxonomic assignments for bacterial and archaeal 16S rRNA sequences:
+```
+classifier classify -o taxa_16S_test.txt 16S_test.fa
+classifier classify -o taxa_18S_test.txt 18S_test.fa
+```
+
+## Output
+```
+head -2 taxa_16S_test.txt
+
+AY305776.1 Root rootrank 1.0 Bacteria domain 1.0 Pseudomonadota phylum 1.0 Betaproteobacteria class 1.0 Burkholderiales order 1.0 Burkholderiaceae family 1.0 Burkholderia genus 1.0
+```
+
+
diff --git a/rdp/2.14/tests/scripts/run_controls.sh b/rdp/2.14/tests/scripts/run_controls.sh
new file mode 100644
index 000000000..e71914dc5
--- /dev/null
+++ b/rdp/2.14/tests/scripts/run_controls.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+# Download test data
+wget -nv https://raw.githubusercontent.com/taylorpaisie/docker_containers/main/rdp/2.14/16S_rRNA_gene.Burkholderia_pseudomallei.2002721184.AY305776.1.fasta -O 16S_test.fa
+wget -nv https://raw.githubusercontent.com/taylorpaisie/docker_containers/main/rdp/2.14/18S_rRNA_gene.Homo_sapiens.T2T-CHM13v2.0.Chromosome13.fasta -O 18S_test.fa
+
+# Get taxonomic assignments for your data
+classifier classify -o taxa_16S_test.txt 16S_test.fa
+classifier classify -o taxa_18S_test.txt 18S_test.fa
+
+# run checksum on files
+sha256sum 16S_test.fa > 16S_checksum.txt
+sha256sum 18S_test.fa > 18S_checksum.txt
+
+
+
+
diff --git a/rdp/2.14/tests/test_controls.py b/rdp/2.14/tests/test_controls.py
new file mode 100644
index 000000000..391319951
--- /dev/null
+++ b/rdp/2.14/tests/test_controls.py
@@ -0,0 +1,31 @@
+import unittest
+import subprocess
+from subprocess import PIPE
+
+
+class TestControls(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ command = "bash /tests/scripts/run_controls.sh"
+ subprocess.run(command, shell=True, stdout=PIPE)
+
+
+ def test_rdp16S(self):
+ with open("16S_checksum.txt") as f:
+ rdp_checksum = f.readlines()[0].split(" ")[0]
+ self.assertEqual(
+ rdp_checksum,
+ "a38342a9ba63946ffb4324c7858f5cc43b873673cb08080437f7500dda351f65",
+ )
+
+ def test_rdp18S(self):
+ with open("18S_checksum.txt") as f:
+ rdp_checksum = f.readlines()[0].split(" ")[0]
+ self.assertEqual(
+ rdp_checksum,
+ "44bf9c60750ff3b804b3e3a56969dab982307a16faee63f0928b2f54e70b02f7",
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
\ No newline at end of file
diff --git a/rdp/2.14/tests/test_versions.py b/rdp/2.14/tests/test_versions.py
new file mode 100644
index 000000000..6475123c2
--- /dev/null
+++ b/rdp/2.14/tests/test_versions.py
@@ -0,0 +1,14 @@
+import unittest
+import subprocess
+import sys
+import re
+
+
+class TestVersion(unittest.TestCase):
+ def test_python(self):
+ version = f"{sys.version_info.major}.{sys.version_info.minor}"
+ self.assertEqual(version, "3.10") # Update this with the expected Python version
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/samtools/1.20.c/Dockerfile b/samtools/1.20.c/Dockerfile
new file mode 100644
index 000000000..1bda46dfb
--- /dev/null
+++ b/samtools/1.20.c/Dockerfile
@@ -0,0 +1,100 @@
+ARG SAMTOOLS_VER="1.20"
+
+FROM ubuntu:jammy as builder
+
+ARG SAMTOOLS_VER
+
+# install dependencies required for compiling samtools
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ libncurses5-dev \
+ libbz2-dev \
+ liblzma-dev \
+ libcurl4-gnutls-dev \
+ zlib1g-dev \
+ libssl-dev \
+ libdeflate-dev \
+ gcc \
+ wget \
+ make \
+ perl \
+ bzip2 \
+ gnuplot \
+ ca-certificates
+
+# download, compile, and install samtools
+RUN wget -q https://github.com/samtools/samtools/releases/download/${SAMTOOLS_VER}/samtools-${SAMTOOLS_VER}.tar.bz2 && \
+ tar -xjf samtools-${SAMTOOLS_VER}.tar.bz2 && \
+ cd samtools-${SAMTOOLS_VER} && \
+ ./configure && \
+ make && \
+ make install && \
+ make test
+
+### start of app stage ###
+FROM ubuntu:jammy as app
+
+ARG SAMTOOLS_VER
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="samtools"
+LABEL software.version="${SAMTOOLS_VER}"
+LABEL description="Tools (written in C using htslib) for manipulating next-generation sequencing data"
+LABEL website="https://github.com/samtools/samtools"
+LABEL license="https://github.com/samtools/samtools/blob/develop/LICENSE"
+LABEL maintainer="Shelby Bennett"
+LABEL maintainer.email="shelby.bennett@dgs.virginia.gov"
+LABEL maintainer2="Curtis Kapsak"
+LABEL maintainer2.email="kapsakcj@gmail.com"
+LABEL maintainer3="Erin Young"
+LABEL maintainer3.email="eriny@utah.gov"
+LABEL maintainer4="Kutluhan Incekara"
+LABEL maintainer4.email="kutluhan.incekara@ct.gov"
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# install dependencies required for running samtools
+# 'gnuplot' required for plot-ampliconstats
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ perl \
+ zlib1g \
+ libncurses5 \
+ bzip2 \
+ liblzma5 \
+ libcurl3-gnutls \
+ libdeflate0 \
+ gnuplot \
+ && apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# copy in samtools executables from builder stage
+COPY --from=builder /usr/local/bin/* /usr/local/bin/
+
+ENV LC_ALL=C
+
+# final working directory is /data
+WORKDIR /data
+
+# default command is to pull up help options
+CMD ["samtools", "--help"]
+
+### start of test stage ###
+FROM app as test
+
+ARG SAMTOOLS_VER
+
+# check PATH
+RUN samtools --help
+
+# install make and wget for downloading test files
+RUN apt-get update && apt-get install --no-install-recommends -y wget ca-certificates
+
+WORKDIR /test
+
+RUN wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/tests/SARS-CoV-2/SRR13957123.consensus.fa && \
+ wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/tests/SARS-CoV-2/SRR13957123.primertrim.sorted.bam && \
+ wget -q https://raw.githubusercontent.com/artic-network/artic-ncov2019/master/primer_schemes/nCoV-2019/V3/nCoV-2019.primer.bed && \
+ samtools stats SRR13957123.primertrim.sorted.bam && \
+ samtools faidx SRR13957123.consensus.fa && \
+ samtools ampliconstats nCoV-2019.primer.bed SRR13957123.primertrim.sorted.bam > SRR13957123_ampliconstats.txt && \
+ plot-ampliconstats plot SRR13957123_ampliconstats.txt && \
+ ls
diff --git a/samtools/1.20.c/README.md b/samtools/1.20.c/README.md
new file mode 100644
index 000000000..1317ccb5d
--- /dev/null
+++ b/samtools/1.20.c/README.md
@@ -0,0 +1,30 @@
+# samtools container
+
+Main tool: [samtools](https://www.htslib.org/)
+
+Code repository: https://github.com/samtools/samtools
+
+Additional tools:
+
+* perl 5.34.0
+
+Basic information on how to use this tool:
+- executable: samtools
+- help: --help
+- version: --version
+- description: Utilities for the Sequence Alignment/Map (SAM) format
+
+Additional information:
+
+This container includes samtools v1.20 compiled with **libdeflate** for a better cloud performance.
+
+Full documentation: https://www.htslib.org/doc/samtools.html
+
+## Example Usage
+
+```bash
+samtools ampliconclip -b bed.file input.bam
+
+samtools sort -T /tmp/aln.sorted -o aln.sorted.bam aln.bam
+```
+
diff --git a/seqkit/2.8.2/Dockerfile b/seqkit/2.8.2/Dockerfile
new file mode 100644
index 000000000..f3ecc6e2c
--- /dev/null
+++ b/seqkit/2.8.2/Dockerfile
@@ -0,0 +1,53 @@
+FROM ubuntu:jammy as app
+
+# ARG sets environment variables during the build stage
+ARG SEQKIT_VER="2.8.2"
+
+# LABEL instructions tag the image with metadata that might be important to the user
+# Optional, but highly recommended
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="SeqKit"
+LABEL software.version=${SEQKIT_VER}
+LABEL description="SeqKit - a cross-platform and ultrafast toolkit for FASTA/Q file manipulation"
+LABEL website="https://github.com/shenwei356/seqkit"
+LABEL license="https://github.com/shenwei356/seqkit/blob/master/LICENSE"
+LABEL maintainer="Henry Kunerth"
+LABEL maintainer.email="henrykunerth@gmail.com"
+LABEL maintainer2="Erin Young"
+LABEL maintainer2.email="eriny@utah.gov"
+
+# Install dependences (update as needed)
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ procps \
+ unzip && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# download SEQKIT and organize directories
+RUN wget -q https://github.com/shenwei356/seqkit/releases/download/v${SEQKIT_VER}/seqkit_linux_amd64.tar.gz && \
+ tar -xzf seqkit_linux_amd64.tar.gz && \
+ mv seqkit /usr/local/bin/. && \
+ rm seqkit_linux_amd64.tar.gz && \
+ mkdir /data
+
+# singularity compatibility
+ENV LC_ALL=C
+
+CMD seqkit --help
+
+# WORKDIR sets working directory
+WORKDIR /data
+
+# A second FROM insruction creates a new stage
+# We use `test` for the test image
+FROM app as test
+
+WORKDIR /test
+
+RUN seqkit --help
+
+#download test .fasta and check that SEQKIT can run to generate stats
+RUN wget -q https://raw.githubusercontent.com/StaPH-B/docker-builds/master/tests/SARS-CoV-2/SRR13957123.consensus.fa && \
+ seqkit stat *
diff --git a/seqkit/2.8.2/README.md b/seqkit/2.8.2/README.md
new file mode 100644
index 000000000..de048f057
--- /dev/null
+++ b/seqkit/2.8.2/README.md
@@ -0,0 +1,36 @@
+# SeqKit container
+
+Main tool : [SeqKit](https://github.com/shenwei356/seqkit)
+
+SeqKit is a cross-platform and ultrafast toolkit for FASTA/Q file manipulation.
+
+Citation:
+
+W Shen, S Le, Y Li*, F Hu*. SeqKit: a cross-platform and ultrafast toolkit for FASTA/Q file manipulation. PLOS ONE. doi:10.1371/journal.pone.0163962.
+
+
+- **Documents:** [http://bioinf.shenwei.me/seqkit](http://bioinf.shenwei.me/seqkit)
+([**Usage**](http://bioinf.shenwei.me/seqkit/usage/),
+[**FAQ**](http://bioinf.shenwei.me/seqkit/faq/),
+[**Tutorial**](http://bioinf.shenwei.me/seqkit/tutorial/),
+and
+[**Benchmark**](http://bioinf.shenwei.me/seqkit/benchmark/))
+
+## Example Usage
+
+```bash
+# get simple statistics from FASTA/Q files
+
+seqkit stats
+
+# or with flags
+
+seqkit stats --all --tabular
+
+# conversion from FASTA to FASTQ
+
+seqkit fa2fq
+
+
+
+```
diff --git a/shigapass/1.5.0/Dockerfile b/shigapass/1.5.0/Dockerfile
new file mode 100644
index 000000000..681ea73d9
--- /dev/null
+++ b/shigapass/1.5.0/Dockerfile
@@ -0,0 +1,75 @@
+FROM ubuntu:jammy as app
+
+# List all software versions are ARGs near the top of the dockerfile
+ARG SHIGAPASS_VER=1.5.0
+ARG BLAST_VER=2.12.0
+
+# 'LABEL' instructions tag the image with metadata that might be important to the user
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="ShigaPass"
+LABEL software.version="${SHIGAPASS_VER}"
+LABEL description="In silico tool used to predict Shigella serotypes and to differentiate between Shigella, EIEC (Enteroinvasive E. coli), and non Shigella/EIEC using assembled whole genomes."
+LABEL website="https://github.com/imanyass/ShigaPass/"
+LABEL license="https://github.com/imanyass/ShigaPass/blob/main/LICENSE"
+LABEL maintainer="Jill Hagey"
+LABEL maintainer.email="qpk9@cdc.gov"
+
+# 'RUN' executes code during the build
+# Install dependencies via apt-get or yum if using a centos or fedora base
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ git \
+ libgomp1 && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# install ncbi-blast+ 2.12.0 pre-compiled linux binaries
+ARG BLAST_VER=2.12.0
+
+#creating variable for referencing database
+ENV DB_PATH=/ShigaPass-${SHIGAPASS_VER}/SCRIPT/ShigaPass_DataBases/
+
+RUN wget ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/${BLAST_VER}/ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz && \
+ tar -xzf ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz && \
+ rm ncbi-blast-${BLAST_VER}+-x64-linux.tar.gz
+
+# install Shigapass
+RUN wget https://github.com/imanyass/ShigaPass/archive/refs/tags/v${SHIGAPASS_VER}.tar.gz && \
+ tar -xzf v${SHIGAPASS_VER}.tar.gz && \
+ rm -r v${SHIGAPASS_VER}.tar.gz && \
+ chmod +x /ShigaPass-${SHIGAPASS_VER}/SCRIPT/ShigaPass.sh && \
+ chmod -R a+rw ${DB_PATH} && \
+ mkdir /data
+
+# 'ENV' instructions set environment variables that persist from the build into the resulting image
+# Use for e.g. $PATH and locale settings for compatibility with Singularity
+ENV PATH="/ncbi-blast-${BLAST_VER}+/bin/:/ShigaPass-${SHIGAPASS_VER}/SCRIPT:$PATH" \
+ LC_ALL=C
+
+#creating variable for referencing database
+ENV DB_PATH=/ShigaPass-${SHIGAPASS_VER}/SCRIPT/ShigaPass_DataBases/
+
+# running test to index the database
+RUN gunzip /ShigaPass-${SHIGAPASS_VER}/Example/Input/*.gz && \
+sed -i "s/^/\/ShigaPass-${SHIGAPASS_VER}\//" /ShigaPass-${SHIGAPASS_VER}/Example/Input/ShigaPass_test.txt && \
+ShigaPass.sh -l /ShigaPass-${SHIGAPASS_VER}/Example/Input/ShigaPass_test.txt -o ShigaPass_Results -p ${DB_PATH} -u
+
+# 'CMD' instructions set a default command when the container is run. This is typically 'tool --help.'
+CMD [ "ShigaPass.sh" ]
+
+# 'WORKDIR' sets working directory
+WORKDIR /data
+
+# A second FROM insruction creates a new stage
+FROM app as test
+
+# set working directory so that all test inputs & outputs are kept in /test
+WORKDIR /test
+
+## print help and version info to ensure ShigaPass is in path and is executable
+RUN ShigaPass.sh -h && \
+ ShigaPass.sh -v
+
+# Testing a script - need to unzip the test files and correct the path for the container
+RUN ShigaPass.sh -l /ShigaPass-${SHIGAPASS_VER}/Example/Input/ShigaPass_test.txt -o ShigaPass_Results -p ${DB_PATH}
\ No newline at end of file
diff --git a/shigapass/1.5.0/README.md b/shigapass/1.5.0/README.md
new file mode 100644
index 000000000..deadd3b45
--- /dev/null
+++ b/shigapass/1.5.0/README.md
@@ -0,0 +1,42 @@
+# ShigaPass container
+
+Main tool: [ShigaPass](https://github.com/imanyass/ShigaPass)
+
+Code repository: https://github.com/imanyass/ShigaPass
+
+Additional tools:
+
+- ncbi-blast+ 2.12.0
+
+Basic information on how to use this tool:
+
+````
+###### This tool is used to predict Shigella serotypes #####
+ Usage : ShigaPass.sh [options]
+
+ options :
+ -l List of input file(s) (FASTA) with their path(s) (mandatory)
+ -o Output directory (mandatory)
+ -p Path to databases directory (mandatory)
+ -t Number of threads (optional, default: 2)
+ -u Call the makeblastdb utility for databases initialisation (optional, but required when running the script for the first time)
+ -k Do not remove subdirectories (optional)
+ -v Display the version and exit
+ -h Display this help and exit
+ Example: ShigaPass.sh -l list_of_fasta.txt -o ShigaPass_Results -p ShigaPass/ShigaPass_DataBases -t 4 -u -k
+ Please note that the -u option should be used when running the script for the first time and after databases updates
+````
+
+> ShigaPass is a new in silico tool used to predict Shigella serotypes and to differentiate between Shigella, EIEC (Enteroinvasive E. coli), and non Shigella/EIEC using assembled whole genomes.
+
+Additional information:
+
+Full documentation: https://github.com/imanyass/ShigaPass
+
+Included Database: Found at `/ShigaPass-${version}/SCRIPT/ShigaPass_DataBases/` so for v1.5.0 use `-p /ShigaPass-1.5.0/SCRIPT/ShigaPass_DataBases/`. This database has already been indexed so there is no need to pass `-u` with your command. If you do a permissions error will occur.
+
+## Example Usage
+
+```bash
+ShigaPass.sh -l ShigaPass_test.txt -o ShigaPass_Results -p /ShigaPass-1.5.0/SCRIPT/ShigaPass_DataBases/
+```
diff --git a/skani/0.2.2/Dockerfile b/skani/0.2.2/Dockerfile
new file mode 100644
index 000000000..1a4ceb37e
--- /dev/null
+++ b/skani/0.2.2/Dockerfile
@@ -0,0 +1,47 @@
+ARG SKANI_VER="0.2.2"
+
+## Builder ##
+FROM rust:1.80.1 AS builder
+
+ARG SKANI_VER
+
+RUN wget https://github.com/bluenote-1577/skani/archive/refs/tags/v${SKANI_VER}.tar.gz &&\
+ tar -xvf v${SKANI_VER}.tar.gz &&\
+ cd skani-${SKANI_VER} &&\
+ cargo install --path . --root ~/.cargo &&\
+ chmod +x /root/.cargo/bin/skani
+
+## App ##
+FROM ubuntu:jammy AS app
+
+ARG SKANI_VER
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="skani"
+LABEL software.version=${SKANI_VER}
+LABEL description="skani is a program for calculating average nucleotide identity (ANI) from DNA sequences (contigs/MAGs/genomes) for ANI > ~80%."
+LABEL website="https://github.com/bluenote-1577/skani"
+LABEL license="https://github.com/bluenote-1577/skani/blob/main/LICENSE"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+# copy app from builder stage
+COPY --from=builder /root/.cargo/bin/skani /usr/local/bin/skani
+
+# default run command
+CMD ["skani", "-h"]
+
+# singularity compatibility
+ENV LC_ALL=C
+
+WORKDIR /data
+
+## Test ##
+FROM app AS test
+
+RUN apt-get update && apt-get install -y wget &&\
+ wget https://github.com/bluenote-1577/skani/raw/v0.2.0/refs/e.coli-EC590.fasta &&\
+ wget https://github.com/bluenote-1577/skani/raw/v0.2.0/refs/e.coli-K12.fasta
+
+RUN skani dist e.coli-EC590.fasta e.coli-K12.fasta
\ No newline at end of file
diff --git a/skani/0.2.2/README.md b/skani/0.2.2/README.md
new file mode 100644
index 000000000..f51996225
--- /dev/null
+++ b/skani/0.2.2/README.md
@@ -0,0 +1,33 @@
+# skani container
+
+Main tool: [skani](https://github.com/bluenote-1577/skani)
+
+Code repository: https://github.com/bluenote-1577/skani
+
+Basic information on how to use this tool:
+- executable: skani
+- help: -h, --help
+- version: -V, --version
+- description: skani is a program for calculating average nucleotide identity (ANI) from DNA sequences (contigs/MAGs/genomes) for ANI > ~80%.
+
+Additional information:
+
+This container does not contain any database or reference genome.
+
+Full documentation: https://github.com/bluenote-1577/skani/wiki
+
+## Example Usage
+
+Quick ANI calculation:
+```bash
+skani dist genome1.fa genome2.fa
+```
+Memory-efficient database search:
+```bash
+skani sketch genomes/* -o database
+skani search -d database query1.fa query2.fa ...
+```
+All-to-all comparison:
+```bash
+skani triangle genomes/*
+```
diff --git a/stxtyper/1.0.24/Dockerfile b/stxtyper/1.0.24/Dockerfile
new file mode 100644
index 000000000..2950769ad
--- /dev/null
+++ b/stxtyper/1.0.24/Dockerfile
@@ -0,0 +1,55 @@
+FROM ubuntu:jammy AS app
+
+ARG STXTYPER_VER="1.0.24"
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="stxtyper"
+LABEL software.version="${STXTYPER_VER}"
+LABEL description="Search for Shiga toxin genes within bacterial genome assemblies"
+LABEL website="https://github.com/ncbi/stxtyper"
+LABEL license="https://github.com/ncbi/stxtyper/blob/main/LICENSE"
+LABEL maintainer="Curtis Kapsak"
+LABEL maintainer.email="kapsakcj@gmail.com"
+
+# install dependencies via apt; cleanup apt garbage
+# blast from ubuntu:jammy is v2.12.0 (as of 2024-09-06)
+# procps is for ps command (required for nextflow)
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ wget \
+ ca-certificates \
+ ncbi-blast+ \
+ procps && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+# download pre-compiled binary
+RUN mkdir /stxtyper && \
+wget -q https://github.com/ncbi/stxtyper/releases/download/v${STXTYPER_VER}/stxtyper_v${STXTYPER_VER}_x86_64_Linux.tar.gz && \
+tar -xzf stxtyper_v${STXTYPER_VER}_x86_64_Linux.tar.gz --strip-components=1 -C /stxtyper && \
+rm -r stxtyper_v${STXTYPER_VER}_x86_64_Linux.tar.gz && \
+ls -lh /stxtyper
+
+# set PATH to include where stxtyper & fasta_check executables exist
+ENV PATH="${PATH}:/stxtyper" \
+TERM=xterm-256color
+
+# set final working directory as /data for passing data in/out of container
+WORKDIR /data
+
+FROM app AS test
+
+# print version and help options & run the supplied tests
+RUN tblastn -version && \
+stxtyper --version && \
+stxtyper --help && \
+cd /stxtyper && \
+bash test_stxtyper.sh
+
+# download genome from NCBI and test stxtyper with it
+# expect 1 perfect match to stx2o subtype, with COMPLETE operon designation
+# https://www.ncbi.nlm.nih.gov/datasets/genome/GCA_012224845.2/
+# grep below requires both strings to be present in the same line
+RUN echo "downloading test genome & running through stxtyper..." && \
+wget -q https://ftp.ncbi.nlm.nih.gov/genomes/all/GCA/012/224/845/GCA_012224845.2_ASM1222484v2/GCA_012224845.2_ASM1222484v2_genomic.fna.gz && \
+stxtyper -n GCA_012224845.2_ASM1222484v2_genomic.fna.gz | tee test-result.tsv && \
+grep 'stx2o' test-result.tsv | grep 'COMPLETE'
diff --git a/stxtyper/1.0.24/README.md b/stxtyper/1.0.24/README.md
new file mode 100644
index 000000000..6871353a8
--- /dev/null
+++ b/stxtyper/1.0.24/README.md
@@ -0,0 +1,29 @@
+# stxtyper container
+
+Main tool: [stxtyper](https://github.com/ncbi/stxtyper)
+
+Additional tools:
+
+- ncbi-blast+ 2.12.0
+
+Basic information on how to use this tool:
+
+- executable: `stxtyper`
+- help: `stxtyper --help`
+- version: `stxtyper --version`
+- description: Detects and types Shiga toxin genes in nucleotide sequences
+
+Full documentation: [https://github.com/ncbi/stxtyper](https://github.com/ncbi/stxtyper)
+
+Note: This software will soon be incorporated into NCBI's AMRFinderPlus software. Stxtyper will run under-the-hood of AMRFinderPlus when using the `amrfinder --organism Escherichia` option.
+
+## Example Usage
+
+```bash
+# stxtyper accepts both gzipped and uncompressed FASTA files
+stxtyper -n GCA_012224845.2_ASM1222484v2_genomic.fna.gz -o stxtyper-results.tsv
+
+$ column -t test-result.tsv
+#target_contig stx_type operon identity target_start target_stop target_strand A_reference A_reference_subtype A_identity A_coverage B_reference B_reference_subtype B_identity B_coverage
+CP113091.1 stx2o COMPLETE 100.00 2085533 2086768 + WAK52085.1 stxA2o 100.00 100.00 QZL10983.1 stxB2o 100.00 100.00
+```
diff --git a/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 1 - Lecture.pptx.pdf b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 1 - Lecture.pptx.pdf
new file mode 100644
index 000000000..7b67b54d1
Binary files /dev/null and b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 1 - Lecture.pptx.pdf differ
diff --git a/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 2 - Lecture.pptx.pdf b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 2 - Lecture.pptx.pdf
new file mode 100644
index 000000000..f4079abd3
Binary files /dev/null and b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 2 - Lecture.pptx.pdf differ
diff --git a/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 3 - Lecture.pptx.pdf b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 3 - Lecture.pptx.pdf
new file mode 100644
index 000000000..e71ae33bb
Binary files /dev/null and b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 3 - Lecture.pptx.pdf differ
diff --git a/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 4 - Lecture.pptx.pdf b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 4 - Lecture.pptx.pdf
new file mode 100644
index 000000000..b51903e59
Binary files /dev/null and b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Intro to Docker for PH Bioinfo Week 4 - Lecture.pptx.pdf differ
diff --git a/training/NE-BRR-docker-for-PH-bioinformatics-May2023/README.md b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/README.md
new file mode 100644
index 000000000..1eac4116b
--- /dev/null
+++ b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/README.md
@@ -0,0 +1,468 @@
+# Intro To Docker for Public Health Bioinformatics Training
+
+## Table of Contents
+
+- [Prerequisites](#prerequisites)
+- [Docker basics](#docker-basics)
+ - [Downloading docker images](#downloading-docker-images)
+ - [Docker images vs containers](#docker-images-vs-containers)
+ - [Running docker images](#running-docker-images)
+ - [File permissions \& passing files in and out of containers](#file-permissions--passing-files-in-and-out-of-containers)
+ - [File permissions](#file-permissions)
+ - [Passing files in and out of containers](#passing-files-in-and-out-of-containers)
+- [Week 1 Exercise 1](#week-1-exercise-1)
+- [Week 1 Exercise 2](#week-1-exercise-2)
+- [Week 2 Exercises](#week-2-exercises)
+ - [NCBI `datasets`](#ncbi-datasets)
+ - [SPAdes](#spades)
+ - [Experiment](#experiment)
+- [Week 3 Exercises](#week-3-exercises)
+- [Resources](#resources)
+
+## Prerequisites
+
+- GitHub Account
+ - If you don't already have a GitHub account, please go to github.com and register (it's free!)
+ - Once you have registered ensure you are signed into your account on github.com
+- Navigate to GitPod & start a Workspace:
+ - https://gitpod.io/
+ - Select "Continue with GitHub", authorize Gitpod to access your GitHub account, and sign in with your credentials
+ - Start New Workspace
+ - Set the "Context URL": `https://github.com/theiagen/docker-builds`
+ - Select the default Editor and default Class, click "Continue"
+- :rotating_light: Once you're finished working, make sure to shut down your workspace to save your allotted free hours in GitPod :rotating_light:. There are a few ways to stop your workspace from running:
+ - In GitPod environment (VSCode interface), click on the orange Gidpod in bottom left corner. Select "Stop Workspace"
+ - OR you can navigate to https://gitpod.io/, find your workspace in the list, click on the three-dot button, and select "Stop"
+
+## Docker basics
+
+Check to ensure docker is installed, look at help options for `docker` and `docker pull`. Run the following commands in the Terminal:
+
+```
+docker --version
+docker --help
+command -v docker
+docker pull --help
+```
+
+The `hello world` exercise for docker! What happens when you run this command?
+
+```bash
+docker run hello-world
+```
+
+`docker images` = command used to list all docker images available on your computer. What docker images are available on your computer now?
+
+```bash
+docker images
+```
+
+### Downloading docker images
+
+Let's download another docker image to practice with.
+
+We're going to download a docker image from this docker hub repository: [https://hub.docker.com/r/staphb/ncbi-datasets](https://hub.docker.com/r/staphb/ncbi-datasets). Run the following command to download the StaPH-B docker image that contains the NCBI `datasets` command-line tool. [More info on the command line tool `datasets` can be found here](https://www.ncbi.nlm.nih.gov/datasets/docs/v2/getting_started/)
+
+```bash
+docker pull staphb/ncbi-datasets:14.20.0
+```
+
+Now run the command `docker images` - what has changed?
+
+QUIZ: What command would I use to download the StaPH-B docker image for the `mlst` software? (hint: see https://hub.docker.com/r/staphb/mlst/tags)
+
+
+ Answer can be found here. Click to show answer:
+
+ ```bash
+ docker pull staphb/mlst:latest
+ ```
+
+
+
+### Docker images vs containers
+
+When you ran the command `docker run hello-world` earlier, docker started a container using the docker image called `hello-world`, ran some things, and then the container automatically exited. The container did not delete itself, and we can see that by running the command `docker ps --all`. Run the command:
+
+```
+docker ps --all
+```
+
+You can tell `docker` to automatically delete the container after exiting with the `docker run --rm` flag. Let's try it out. Run the following command which will download and run a command in a new docker container:
+
+```
+docker run --rm ubuntu:focal echo "hello from inside the container!"
+```
+
+Now, run `docker ps --all` - do we see a container listed for the command we just ran? We should **not see it listed**, as long as the `docker run --rm` flag was included.
+
+Now try the same command without the `--rm` flag:
+
+```bash
+docker run ubuntu:focal echo "hello from inside the container!"
+
+# look at the list of containers again:
+docker ps --all
+```
+
+### Running docker images
+
+Interactive mode: You can launch into a container interactively, and be given a pseudo-shell from which you can run commands. You will need to use the `docker run -it` flags to do this:
+
+```bash
+# your command prompt will change after launching interactive mode
+$ docker run -it staphb/ncbi-datasets:14.20.0
+Unable to find image 'staphb/ncbi-datasets:14.20.0' locally
+14.20.0: Pulling from staphb/ncbi-datasets
+2ab09b027e7f: Already exists
+d6391dffcf79: Pull complete
+4f4fb700ef54: Pull complete
+eaa7a46260b5: Pull complete
+05a7178bde59: Pull complete
+Digest: sha256:4441c4556698fce13a2612889d467435eb8cec096565233da0e7b74fbae4a3fb
+Status: Downloaded newer image for staphb/ncbi-datasets:14.20.0
+root@d1c3f33f5fc9:/data
+
+# now run the following commands:
+pwd
+ls
+cd /
+ls
+
+# you can exit the interactive container with the 'exit' command
+exit
+```
+
+Non-interactive mode: The default method of running a container does not launch an interactive terminal, but rather, a command or set of commands is passed into the container. The format is as follows:
+
+```bash
+# format
+docker run :
+
+# example
+docker run ubuntu:focal echo "hello!"
+```
+
+### File permissions & passing files in and out of containers
+
+#### File permissions
+
+By default, when you launch a container with `docker run`, any commands run will be run as the `root` linux user. Thus, any files created will be owned by the `root` user, limiting the downstream use of these files by non-`root` linux users. Non-`root` users may encounter file permission errors if attempting to delete or edit files owned by `root`.
+
+To avoid these issues, you can pass in your linux user ID (UID) and group ID (GID) so that the container runs with your user and group IDs. The flag to pass in user and group IDs is `docker run --user :` or `docker run -u :` for short
+
+To test let's run the StaPH-B NCBI `datasets` docker container and create some files. Run the following commands:
+
+```bash
+# change directories so that we have a relatively clean working environment
+cd training/
+
+# launch the container in interactive mode
+docker run -it staphb/ncbi-datasets:14.20.0
+
+# create a file called "test.txt"
+touch test.txt
+
+# look at the permissions for this file
+ls -lh
+
+# see what the current user you're logged in as
+whoami
+
+# exit the container with the command 'exit'
+exit
+```
+
+Now let's launch the container in interactive mode, but pass in our user and group id. Instead of looking up your user and group IDs, you can look up these IDs on-the-fly with `id -u` and `id -g`
+
+```bash
+# launch the container in interactive mode again, but this time pass in our user and group IDs
+docker run -u $(id -u):$(id -g) -it staphb/ncbi-datasets:14.20.0
+
+# change to /tmp
+cd /tmp
+
+# create a file called "test.txt"
+touch test.txt
+
+# look at the permissions for this file
+ls -lh
+
+# see what the current user you're logged in as
+whoami
+
+# exit the container with the command 'exit'. Your command prompt should go back to it's default
+exit
+```
+
+#### Passing files in and out of containers
+
+Docker containers have filesystems that are completely separate and isolated from your host computer's filesystem. You can mount a "volume" between your host computer's filesystem and the container filesystem to pass files in and out.
+
+Files created inside of containers are not automatically exported from the container so we must use the `--volume :` or `-v :` (short hand). You must tell `docker` which path to mount from your host system, and the path on the container's filesystem to mount the volume to. It's OK if the path inside the container does not exist, it will be created.
+
+If my present working directory (`$PWD`) is `/home/curtis_kapsak`, then I can mount my $PWD to `/data` in the container's filesystem like so:
+
+```
+docker run --volume $PWD:/data ubuntu:focal
+```
+
+Let's pass a file into a container. First make an test text file, and then pass into the container:
+
+```bash
+# create the text file which contains the text "this is a test
+echo "this is a test">text.txt
+
+# mount your PWD to /data inside the container and list the contents of /data in the container filesystem
+docker run --volume $PWD:/data ubuntu:focal ls /data
+
+# same as before, but cat the test.txt file from inside the container
+docker run --volume $PWD:/data ubuntu:focal cat /data/text.txt
+```
+
+Now let's create a file inside the docker container, and then save it to our host computer for usage later:
+
+```bash
+# create a file inside the container using the "touch" command, writing to /data in the container
+docker run --volume $PWD:/data ubuntu:focal touch /data/file-made-inside-container.txt
+
+# let's check and make sure that new file exists on our host computer filesystem:
+ls
+
+# what happens when we don't mount a volume and create a file inside the container?
+docker run ubuntu:focal touch file-made-inside-container-NO-VOLUME.txt
+
+# is this new file present on our host filesystem? Run this command to check:
+ls file-made-inside-container-NO-VOLUME.txt
+```
+
+## Week 1 Exercise 1
+
+Now that we've covered some of the basics of `docker` on the command line, let's download a *Klebsiella pneumoniae* genome FASTA file from NCBI.
+
+End goal: Use NCBI `datasets` to download a genome FASTA file - Klebsiella pneumoniae
+
+We downloaded the docker image called earlier, so now let's use it to download the FASTA file.
+
+We're going to download this genome: https://www.ncbi.nlm.nih.gov/data-hub/genome/GCF_000240185.1/
+
+Let's launch the `datasets` docker image interactively, and use the flags you learned about earlier:
+
+```bash
+### launch the container in interactive mode ###
+# --rm is to remove the container after it exits (i.e. delete the container)
+# -v is for mounting my filesystem to the container filesystem
+# -u is for passing in my user ID and group ID
+# -it is 2 flags for launching interactive mode
+docker run --rm -v $PWD:/data -u $(id -u):$(id -g) -it staphb/ncbi-datasets:14.20.0
+
+# run the datasets tool to download the FASTA file (along with some other metadata)
+datasets download genome accession GCF_000240185.1
+
+# unzip the .zip archive
+unzip ncbi_dataset.zip
+
+# exit the container
+exit
+
+# take a peek at the top of the FASTA file
+head -n 4 ncbi_dataset/data/GCF_000240185.1/GCF_000240185.1_ASM24018v2_genomic.fna
+```
+
+## Week 1 Exercise 2
+
+Goal: Run `kleborate` on FASTA file for subtyping, serotyping, virulence and AMR prediction
+
+Now that we have downloaded our FASTA file, let's launch into the container in interactive mode, and run `kleborate`
+
+```bash
+# download the docker image "staphb/kleborate:2.3.2-2023-05" and launch interactive mode
+docker run --rm -v $PWD:/data -u $(id -u):$(id -g) -it staphb/kleborate:2.3.2-2023-05
+
+# run kleborate using the FASTA file as input (be patient, this step may take a few minutes to run....)
+kleborate --all -o results.tsv -a ncbi_dataset/data/GCF_000240185.1/GCF_000240185.1_ASM24018v2_genomic.fna
+
+# exit the container
+exit
+
+# view results of Kleborate
+cat results.tsv
+```
+
+## Week 2 Exercises
+
+Let's build some docker images! In today's exercise we are going to practice building docker image in various ways. Let's tweak the dockerfiles and see how those changes impact the final docker image.
+
+### NCBI `datasets`
+
+Let's build the most recent version of NCBI `datasets` using the dockerfile located here: `ncbi-datasets/14.20.0/Dockerfile`
+
+Reminder - the basic `docker build` command structure:
+
+```bash
+docker build --tag :
+```
+
+Now with real values filled in, run the following command:
+
+```bash
+docker build --tag ncbi-datasets:14.20.0 ncbi-datasets/14.20.0/
+```
+
+What happened when we ran this command?
+
+- Docker daemon read & interpreted the dockerfile. It also ensured correct syntax & format.
+- The base image `ubuntu:jammy` was downloaded if not already present on computer.
+- Daemon began running subsequent Dockerfile instructions (`RUN`, `WORKDIR`, etc) in order through to the end of the dockerfile.
+- Docker image was given a sha256 hash & we received a message saying docker image was named & built successfully.
+
+When we ran the previous command, it built all stages of the dockerfile. BUT we can tell `docker` to build to a specific stage using `docker build --target `. Let's try building to the `test` stage specifically:
+
+```bash
+# note the new option '--target test'
+docker build --target test --tag ncbi-datasets:14.20.0-test-stage ncbi-datasets/14.20.0/
+```
+
+That command finished running almost instantaneously, why?
+
+
+ Answer can be found here. Click to show answer:
+
+**Answer: The image has already been built previously (with last `docker build` command we ran) and thus the layers are "cached" or stored locally to be re-used. No need to spend time & resources building an image when it has already been built!**
+
+
+
+Now lets build to only the `app` stage, as this will be the final docker image that we share via dockerhub, quay, whatever container registry. This means we will use `--target app` which will skip building the layers in the `test` stage:
+
+```bash
+# note the new option '--target app'
+docker build --target app --tag ncbi-datasets:14.20.0-app-stage ncbi-datasets/14.20.0/
+```
+
+### SPAdes
+
+The SPAdes dockerfile is a bit more complex as it's test stage runs the SPAdes software on a toy dataset and assembles a plasmid sequence.
+
+Let's build the most recent version of SPAdes, but this time start with only building the `app` stage. We are going to use the dockerfile located at `spades/3.15.5/Dockerfile`:
+
+```bash
+# note the new option '--target app'
+docker build --target app --tag spades:3.15.5-app-stage spades/3.15.5/
+```
+
+Now let's build all the way through the `test` stage and see the test assembly process run:
+
+```bash
+# note the new option '--target test'
+docker build --target test --tag spades:3.15.5-test-stage spades/3.15.5/
+```
+
+### Experiment
+
+Let's try changing a few things in the dockerfile to see the effect of the changes.
+
+_Hypothetical (& false) scenario 1 - SPAdes produced a `.zip` output file and my bioinformatics pipeline needs to `unzip` this file to extract the contents. I have to install the `unzip` software in the SPAdes docker image to accomplish this_
+
+Steps:
+
+1. Open up the SPAdes dockerfile in the GitPod/VSCode editor
+2. Navigate to the top of dockerfile and look for the `apt-get install` step on lines 18-22.
+3. Add a return and `unzip \` after line 21. The entire `RUN` layer should look like this:
+```Dockerfile
+RUN apt-get update && apt-get install --no-install-recommends -y python3 \
+ python3-distutils \
+ wget \
+ pigz \
+ unzip \
+ ca-certificates && \
+ apt-get autoclean && rm -rf /var/lib/apt/lists/* && \
+ update-alternatives --install /usr/bin/python python /usr/bin/python3 10
+```
+
+4. Once added, save the file (CTRL + S) and let's rebuild the image with a new tag (`spades:3.15.5-test-unzip-added`):
+
+```bash
+# same command as before, but now with an updated dockerfile
+docker build --target test --tag spades:3.15.5-test-stage-unzip-added spades/3.15.5/
+```
+
+5. Launch an interactive container to see if `unzip` is actually installed:
+
+```bash
+# launch into interactive mode in container
+docker run -ti spades:3.15.5-test-stage-unzip-added
+
+# pull up unzip help options
+unzip --help
+```
+
+_Hypothetical scenario 2 - I don't think the SPAdes test (`spades.py --test`) is good enough, I want to make sure it works on some real data. Let's try using SPAdes to assemble some real bacterial WGS data_
+
+The current test is sufficient, but let's add another layer to the dockerfile to bolster the `test` stage. We are going to add some lines to download some real bacterial WGS data and assemble in the test stage. We are going to download some E. coli WGS data, Illumina paired-end, and assemble the genome with SPAdes. Here's the dataset on ENA: [https://www.ebi.ac.uk/ena/browser/view/SRR6903006](https://www.ebi.ac.uk/ena/browser/view/SRR6903006)
+
+Steps:
+
+1. Open up the SPAdes dockerfile in the GitPod/VSCode editor
+2. Navigate to bottom of dockerfile and add these lines of code:
+
+```Dockerfile
+# download test FASTQ files from ENA
+# run SPAdes on FASTQ files
+RUN wget ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR690/006/SRR6903006/SRR6903006_1.fastq.gz && \
+wget ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR690/006/SRR6903006/SRR6903006_2.fastq.gz && \
+spades.py -t 4 --isolate --only-assembler -1 SRR6903006_1.fastq.gz -2 SRR6903006_2.fastq.gz -o spades-output
+```
+
+3. Once added, save the file (CTRL + S) and let's rebuild the image:
+
+```bash
+# same command as before, but now with an updated dockerfile
+docker build --target test --tag spades:3.15.5-test-stage-added-test spades/3.15.5/
+```
+
+Adding this test will ensure the robustness of the docker image, but the tradeoff is that it takes longer to run the test. The assembly process can take a while (5-15 min or longer), especially if the input dataset is large.
+
+## Week 3 Exercises
+
+Currently in this repository, the latest available version of NCBI `datasets` is 14.20.0 (located at `ncbi-datasets/14.20.0/Dockerfile`). As of today 2023-06-01, more versions have been released, up to 15.2.0 and the repository is out of date. Let's change that and create another dockerfile for version 15.2.0.
+
+Let's navigate to the `datasets` GitHub & documentation to ensure that the installation process is still the same as before. We don't want to be caught by any surprises.
+
+- [https://github.com/ncbi/datasets/releases](https://github.com/ncbi/datasets/releases)
+- [https://www.ncbi.nlm.nih.gov/datasets/docs/v2/download-and-install/](https://www.ncbi.nlm.nih.gov/datasets/docs/v2/download-and-install/)
+
+:warning: **Spoiler alert!** Installation has not changed from previous versions. We will continue using the current installation method from the 14.20.0 dockerfile. This installation method downloads a pre-compiled binary (AKA executable file) that requires little-to-no setup.
+
+Steps:
+
+1. In the file explorer on the left-hand side, right-click on the directory `ncbi-datasets/14.20.0/` and select Copy.
+2. Create a copy by pressing CTRL + V to paste the directory.
+3. Rename the directory (F2 or right-click on copied directory and select rename) to the new version `15.2.0`.
+4. Open the Dockerfile located at `ncbi-datasets/15.2.0/Dockerfile` in the editor.
+5. On line 3, replace the old version `14.20.0` with the new version `15.2.0`. Do not change anything else!
+6. Save the file by pressing CTRL + S, or selecting File :arrow_right: Save
+7. Open a terminal if one isn't already open by navigating to the hamburger menu in the top left, select Terminal, and then select New Terminal.
+8. Build your new docker image by using `docker build` in the terminal. You can use the following command:
+
+```bash
+docker build -t ncbi-datasets:15.2.0 ncbi-datasets/15.2.0/
+```
+
+9. Verify that the new version has been installed by running the `datasets --version` command inside the container:
+
+```bash
+# non-interactive mode:
+docker run ncbi-datasets:15.2.0 datasets --version
+
+# interactive mode:
+docker run -it ncbi-datasets:15.2.0
+
+# once interactive container is launched, run the same command:
+datasets --version
+```
+
+## Resources
+
+- You can find all of StaPH-B's dockerfiles & documentation here: https://github.com/StaPH-B/docker-builds
+- StaPH-B's docker images on dockerhub here: https://hub.docker.com/u/staphb
+- StaPH-B's docker image on quay.io here: https://quay.io/organization/staphb
diff --git a/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Week 3 Exercise - Contribute Dockerfile to StaPH-B.exported2023-06-05.pptx.pdf b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Week 3 Exercise - Contribute Dockerfile to StaPH-B.exported2023-06-05.pptx.pdf
new file mode 100644
index 000000000..96d85a1c0
Binary files /dev/null and b/training/NE-BRR-docker-for-PH-bioinformatics-May2023/Week 3 Exercise - Contribute Dockerfile to StaPH-B.exported2023-06-05.pptx.pdf differ
diff --git a/verkko/2.2/Dockerfile b/verkko/2.2/Dockerfile
new file mode 100644
index 000000000..280bf0910
--- /dev/null
+++ b/verkko/2.2/Dockerfile
@@ -0,0 +1,48 @@
+FROM mambaorg/micromamba:1.5.9 AS app
+
+ARG VERKKO_VER="2.2"
+
+USER root
+
+WORKDIR /
+
+LABEL base.image="mambaorg/micromamba:1.5.9"
+LABEL dockerfile.version="1"
+LABEL software="Verkko"
+LABEL software.version="${VERKKO_VER}"
+LABEL description="Verkko is a hybrid genome assembly pipeline developed for telomere-to-telomere assembly of PacBio HiFi or Oxford Nanopore Duplex and Oxford Nanopore simplex reads."
+LABEL website="https://github.com/marbl/verkko"
+LABEL license="https://github.com/marbl/verkko/blob/master/README.licenses"
+LABEL maintainer="Kutluhan Incekara"
+LABEL maintainer.email="kutluhan.incekara@ct.gov"
+
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ procps &&\
+ apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+RUN micromamba install --name base -c conda-forge -c bioconda verkko=${VERKKO_VER} && \
+ micromamba clean -afy
+
+ENV PATH="/opt/conda/bin/:${PATH}" \
+ LC_ALL=C.UTF-8
+
+CMD [ "verkko", "--help" ]
+
+WORKDIR /data
+
+## Test ##
+FROM app AS test
+
+RUN verkko --help
+
+RUN apt-get update && apt-get install --no-install-recommends -y curl
+
+# test data
+RUN curl -L https://obj.umiacs.umd.edu/sergek/shared/ecoli_hifi_subset24x.fastq.gz -o hifi.fastq.gz &&\
+ curl -L https://obj.umiacs.umd.edu/sergek/shared/ecoli_ont_subset50x.fastq.gz -o ont.fastq.gz
+
+# verkko test run
+RUN verkko -d asm --hifi ./hifi.fastq.gz --nano ./ont.fastq.gz &&\
+ head asm/assembly.hifi-coverage.csv &&\
+ head -c 1000 asm/assembly.fasta
+
diff --git a/verkko/2.2/README.md b/verkko/2.2/README.md
new file mode 100644
index 000000000..ee3b2e499
--- /dev/null
+++ b/verkko/2.2/README.md
@@ -0,0 +1,21 @@
+# Verkko container
+
+Main tool: [verkko](https://github.com/marbl/verkko)
+
+Code repository: https://github.com/marbl/verkko
+
+Basic information on how to use this tool:
+- executable: verkko
+- help: --help
+- version: --version
+- description: Verkko is a hybrid genome assembly pipeline developed for telomere-to-telomere assembly of PacBio HiFi or Oxford Nanopore Duplex and Oxford Nanopore simplex reads.
+
+Full documentation: https://github.com/marbl/verkko
+
+## Example Usage
+
+```bash
+verkko -d --hifi [--nano ]
+```
+
+
\ No newline at end of file
diff --git a/vigor4/4.1.20200702/Dockerfile b/vigor4/4.1.20200702/Dockerfile
new file mode 100644
index 000000000..cee77e354
--- /dev/null
+++ b/vigor4/4.1.20200702/Dockerfile
@@ -0,0 +1,84 @@
+FROM ubuntu:jammy as app
+
+ARG VIGOR4_VER='4.1.20200702'
+ARG VIGOR4_COMMIT='23852472af871b6c05bd5abf2022d140b9cecd3b'
+ARG VIGOR4_DB_COMMIT='390582955049b9ddc989510672fbde90cf3387c7'
+
+LABEL base.image="ubuntu:jammy"
+LABEL container.version="1"
+LABEL dockerfile.version="1"
+LABEL software="VIGOR4"
+LABEL software.version="${VIGOR4_VER}"
+LABEL description="VIGOR - Viral Genome ORF Reader"
+LABEL website="https://github.com/JCVenterInstitute/VIGOR4"
+LABEL lisence="https://github.com/JCVenterInstitute/VIGOR4/blob/master/LICENSE.txt"
+LABEL maintainer="Erin Young"
+LABEL maintainer.email="eriny@utah.gov"
+
+RUN apt-get update && apt-get -y upgrade && apt-get -y --no-install-recommends install \
+ exonerate \
+ maven \
+ openjdk-11-jre-headless \
+ unzip \
+ wget && \
+ apt-get autoclean && \
+ rm -rf /var/lib/apt/lists/*
+
+#get VIGOR4
+RUN wget -q https://github.com/JCVenterInstitute/VIGOR4/archive/${VIGOR4_COMMIT}.zip && \
+ unzip ${VIGOR4_COMMIT}.zip && \
+ mv VIGOR4-${VIGOR4_COMMIT} /VIGOR4 && \
+ rm ${VIGOR4_COMMIT}.zip && \
+ cd /VIGOR4 && \
+ mvn -DskipTests clean package && \
+ unzip /VIGOR4/target/vigor-${VIGOR4_VER}.zip && \
+ rm /VIGOR4/target/vigor-${VIGOR4_VER}.zip && \
+ # create temp directory
+ mkdir -p /VIGOR4/tmp/vigor-temp && \
+ # make executable
+ chmod 777 -R /VIGOR4/vigor-${VIGOR4_VER}/bin/vigor4 && \
+ #set paths for refs, exonerate, and tmp directory in the config file
+ cd /VIGOR4/vigor-${VIGOR4_VER}/config && \
+ (echo "\nreference_database_path=/VIGOR_DB/Reference_DBs/\nexonerate_path=/usr/bin/exonerate\ntemporary_directory=/VIGOR4/tmp/vigor-temp\n" > vigor.ini)
+
+#get databases
+RUN wget -q https://github.com/JCVenterInstitute/VIGOR_DB/archive/${VIGOR4_DB_COMMIT}.zip && \
+ unzip ${VIGOR4_DB_COMMIT}.zip && \
+ rm ${VIGOR4_DB_COMMIT}.zip && \
+ mv VIGOR_DB-${VIGOR4_DB_COMMIT} /VIGOR_DB
+
+#set paths
+ENV PATH="/usr/bin/exonerate:/VIGOR_DB/Reference_DBs:/VIGOR4/vigor-${VIGOR4_VER}/bin/:$PATH"\
+ LC_ALL=C
+
+CMD vigor4 -h
+
+RUN mkdir /data
+WORKDIR /data
+
+# testing stage
+FROM app as test
+
+WORKDIR /test
+
+# checking help and version
+RUN vigor4 -h && vigor4 --version
+
+# list databases
+RUN ls /VIGOR_DB/Reference_DBs | grep -v ini
+
+# running on test files
+RUN vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/rtva/rtva.ungapped.fasta -o test_rtva_db -d /VIGOR_DB/Reference_DBs/rtva_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/rtvb/rtvb.ungapped.fasta -o test_rtvb_db -d /VIGOR_DB/Reference_DBs/rtvb_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/rtvc/rtvc.ungapped.fasta -o test_rtvc_db -d /VIGOR_DB/Reference_DBs/rtvc_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/rtvf/rtvf.ungapped.fasta -o test_rtvf_db -d /VIGOR_DB/Reference_DBs/rtvf_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/rtvg/rtvg.ungapped.fasta -o test_rtvg_db -d /VIGOR_DB/Reference_DBs/rtvg_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/sapo/sapo.ungapped.fasta -o test_sapo_db -d /VIGOR_DB/Reference_DBs/sapo_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/wnvI/wnvI.ungapped.fasta -o test_wnvI_db -d /VIGOR_DB/Reference_DBs/wnvI_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/wnvII/wnvII.ungapped.fasta -o test_wnvII_db -d /VIGOR_DB/Reference_DBs/wnvII_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/zikv/zikv.ungapped.fasta -o test_zikv_db -d /VIGOR_DB/Reference_DBs/zikv_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/flua/flua.ungapped.fasta -o test_flua_db -d /VIGOR_DB/Reference_DBs/flua_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/flub/flub.ungapped.fasta -o test_flub_db -d /VIGOR_DB/Reference_DBs/flub_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/fluc/fluc.ungapped.fasta -o test_fluc_db -d /VIGOR_DB/Reference_DBs/fluc_db && \
+ vigor4 -i /VIGOR4/src/test/resources/vigor4ReferenceOutput/veev/veev.ungapped.fasta -o test_veev_db -d /VIGOR_DB/Reference_DBs/veev_db && \
+ head test*.tbl
diff --git a/vigor4/4.1.20200702/README.md b/vigor4/4.1.20200702/README.md
new file mode 100644
index 000000000..74c359388
--- /dev/null
+++ b/vigor4/4.1.20200702/README.md
@@ -0,0 +1,110 @@
+# VIGOR4 container
+
+Main tool: [vigor4](https://github.com/JCVenterInstitute/VIGOR4)
+
+Code repository: https://github.com/JCVenterInstitute/VIGOR4
+
+Additional tools:
+- exonerate: 2.4.0
+
+Basic information on how to use this tool:
+- executable: vigor4
+- help: -h
+- version: --version
+- description: |
+> VIGOR4 (Viral Genome ORF Reader) is a Java application to predict protein sequences encoded in viral genomes.
+> VIGOR4 determines the protein coding sequences by sequence similarity searching against curated viral protein databases.
+
+Additional information:
+
+All databases in VIGOR4_DB have been downloaded to /VIGOR_DB and can be found at /VIGOR_DB/Reference_DBs.
+
+Currently included databases:
+```bash
+antennavirus_txid2560091_db
+bandavirus_txid2733256_db
+beidivirus_txid2501981_db
+cicadellivirus_txid2948664_db
+coguvirus_txid2560118_db
+dengue_GP_mp
+dengue_txid12637_db
+embe_db
+embe_orf1a_mp
+embe_orf1ab_mp
+entovirus_txid2733257_db
+feravirus_txid2501995_db
+flua_db
+flua_ha_mp
+flub_db
+fluc_db
+goukovirus_txid1980420_db
+hantaviridae_db
+hartmanivirus_txid2169607_db
+hibeco_db
+hibeco_orf1a_mp
+hibeco_orf1ab_mp
+horwuvirus_txid2501976_db
+hudivirus_txid2501977_db
+hudovirus_txid2501978_db
+inshuvirus_txid2501996_db
+ixovirus_txid2733258_db
+jonvirus_txid2501997_db
+lassa_db
+laulavirus_txid2560166_db
+lentinuvirus_txid2733259_db
+merbe_db
+merbe_orf1a_mp
+merbe_orf1ab_mp
+mobuvirus_txid2501979_db
+monkeypox_db
+nairoviridae_db
+nobeco_db
+nobeco_orf1a_mp
+nobeco_orf1ab_mp
+orthophasmavirus_txid1980538_db
+peribunyaviridae_db
+phasivirus_txid1980421_db
+phlebovirus_txid11584_db
+pidchovirus_txid2501975_db
+reptarenavirus_txid1653395_db
+rsv_db
+rtva_db
+rtvb_db
+rtvc_db
+rtvf_db
+rtvg_db
+rubodvirus_txid2733260_db
+sapo_db
+sapo_mp
+sarbe_db
+sarbe_orf1a_mp
+sarbe_orf1ab_mp
+sarscov2_db
+sarscov2_orf1a_mp
+sarscov2_orf1ab_mp
+sawastrivirus_txid2560224_db
+tenuivirus_txid12329_db
+uukuvirus_txid2734594_db
+veev_db
+veev_nsp_mp
+veev_sp_mp
+wenrivirus_txid2560254_db
+wnvII_db
+wnvI_db
+wnv_GP1_mp
+wnv_GP2_mp
+wnv_GP3_mp
+wuhivirus_txid2501998_db
+zikv_db
+zikv_mp
+```
+
+VIGOR4_DB is up-to-date with commit [390582955049b9ddc989510672fbde90cf3387c7](https://github.com/JCVenterInstitute/VIGOR_DB/commits/master/) on Dec 1, 2022.
+
+Full documentation: [https://github.com/JCVenterInstitute/VIGOR4](https://github.com/JCVenterInstitute/VIGOR4)
+
+## Example Usage
+
+```bash
+vigor4 -i entovirus.fasta -o entovirus -d /VIGOR_DB/Reference_DBs/entovirus_txid2733257_db
+```
diff --git a/viridian/1.2.2/Dockerfile b/viridian/1.2.2/Dockerfile
new file mode 100644
index 000000000..71738a7bc
--- /dev/null
+++ b/viridian/1.2.2/Dockerfile
@@ -0,0 +1,215 @@
+ARG VIRIDIAN_VER="1.2.2"
+ARG SAMTOOLS_VER="1.20"
+ARG BCFTOOLS_VER=${SAMTOOLS_VER}
+ARG HTSLIB_VER=${SAMTOOLS_VER}
+ARG ENA_VER="1.7.1"
+ARG NGMERGE_VER="0.3"
+ARG VT_VER="0.57721"
+ARG RACON_VER="1.5.0"
+ARG MUMMER_VER="4.0.0rc1"
+ARG READITANDKEEP_VER="0.3.0"
+ARG CYLON_COMMIT_HASH="57d559a76254b0b95785f7c02fa58ef806713e01"
+ARG VARIFIER_COMMIT_HASH="8bc8726ed3cdb337dc47b62515e709759e451137"
+ARG MINIMAP2_VER="2.28"
+
+## Builder ##
+FROM ubuntu:jammy as build
+ARG SAMTOOLS_VER
+ARG BCFTOOLS_VER
+ARG HTSLIB_VER
+ARG NGMERGE_VER
+ARG VT_VER
+ARG RACON_VER
+ARG READITANDKEEP_VER
+ARG MINIMAP2_VER
+ARG MUMMER_VER
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ wget \
+ ca-certificates \
+ perl \
+ bzip2 \
+ cmake \
+ git \
+ autoconf \
+ automake \
+ make \
+ curl \
+ gcc \
+ g++ \
+ gnuplot \
+ zlib1g-dev \
+ libbz2-dev \
+ liblzma-dev \
+ libcurl4-gnutls-dev \
+ libncurses5-dev \
+ libssl-dev \
+ libperl-dev \
+ libgsl0-dev \
+ procps \
+ gawk \
+ sed \
+ build-essential \
+ unzip \
+ nasm \
+ pkgconf \
+ libtool \
+ ruby \
+ yaggo \
+ gcc-11 && \
+ rm -rf /var/lib/apt/lists/* && apt-get autoclean
+
+# compile bcftools
+RUN wget -q https://github.com/samtools/bcftools/releases/download/${BCFTOOLS_VER}/bcftools-${BCFTOOLS_VER}.tar.bz2 && \
+ tar -xjf bcftools-${BCFTOOLS_VER}.tar.bz2 && \
+ rm -v bcftools-${BCFTOOLS_VER}.tar.bz2 && \
+ cd bcftools-${BCFTOOLS_VER} && \
+ make && \
+ make install
+
+# compile samtools
+RUN wget -q https://github.com/samtools/samtools/releases/download/${SAMTOOLS_VER}/samtools-${SAMTOOLS_VER}.tar.bz2 && \
+ tar -xjf samtools-${SAMTOOLS_VER}.tar.bz2 && \
+ cd samtools-${SAMTOOLS_VER} && \
+ ./configure && \
+ make && \
+ make install
+
+# compile htslib
+RUN wget -q https://github.com/samtools/htslib/releases/download/${HTSLIB_VER}/htslib-${HTSLIB_VER}.tar.bz2 && \
+ tar -vxjf htslib-${HTSLIB_VER}.tar.bz2 && \
+ rm -v htslib-${HTSLIB_VER}.tar.bz2 && \
+ cd htslib-${HTSLIB_VER} && \
+ make && \
+ make install
+
+# compile NGmerge
+RUN wget -q https://github.com/harvardinformatics/NGmerge/archive/refs/tags/v${NGMERGE_VER}.tar.gz && \
+ tar -vxf v${NGMERGE_VER}.tar.gz && \
+ cd NGmerge-${NGMERGE_VER} && \
+ make && \
+ cp NGmerge /usr/local/bin/.
+
+# compile vt
+RUN wget -q https://github.com/atks/vt/archive/refs/tags/${VT_VER}.tar.gz && \
+ tar -vxf ${VT_VER}.tar.gz && \
+ cd vt-${VT_VER} && \
+ make && \
+ cp vt /usr/local/bin/.
+
+# compile racon
+RUN wget -q https://github.com/lbcb-sci/racon/archive/refs/tags/${RACON_VER}.tar.gz && \
+ tar -xvf ${RACON_VER}.tar.gz && \
+ cd racon-${RACON_VER} && \
+ mkdir build && \
+ cd build && \
+ cmake -DCMAKE_BUILD_TYPE=Release .. && \
+ make && \
+ cp bin/racon /usr/local/bin/.
+
+# compile read-it-and-keep
+RUN wget -q https://github.com/GlobalPathogenAnalysisService/read-it-and-keep/archive/refs/tags/v${READITANDKEEP_VER}.tar.gz && \
+ tar -vxf v${READITANDKEEP_VER}.tar.gz && \
+ cd read-it-and-keep-${READITANDKEEP_VER}/src && \
+ make && \
+ cp readItAndKeep /usr/local/bin/.
+
+# install minimap2 binary
+RUN curl -L https://github.com/lh3/minimap2/releases/download/v${MINIMAP2_VER}/minimap2-${MINIMAP2_VER}_x64-linux.tar.bz2 | tar -jxvf - --no-same-owner && \
+ cp minimap2-${MINIMAP2_VER}_x64-linux/minimap2 /usr/local/bin
+
+RUN wget -q https://github.com/mummer4/mummer/archive/refs/tags/v${MUMMER_VER}.tar.gz && \
+ tar -xvf v${MUMMER_VER}.tar.gz && \
+ cd mummer-${MUMMER_VER} && \
+ autoreconf -i && \
+ ./configure CXXFLAGS="-std=c++11 -Wno-maybe-uninitialized" LDFLAGS=-static && \
+ make && \
+ make install && \
+ ldconfig
+
+## App ##
+FROM ubuntu:jammy as app
+
+ARG VIRIDIAN_VER
+ARG ENA_VER
+ARG CYLON_COMMIT_HASH
+ARG VARIFIER_COMMIT_HASH
+
+LABEL base.image="ubuntu:jammy"
+LABEL dockerfile.version="1"
+LABEL software="viridian"
+LABEL software.version="${VIRIDIAN_VER}"
+LABEL description="Ultra-careful amplicon-aware viral assembly for tiled amplicon schemes."
+LABEL website="https://github.com/iqbal-lab-org/viridian"
+LABEL license="https://github.com/iqbal-lab-org/viridian/blob/master/LICENSE"
+LABEL maintainer="Wilson Chan"
+LABEL maintainer.email="chan.wilson.wc@gmail.com"
+LABEL maintainer2="Kutluhan Incekara"
+LABEL maintainer2.email="kutluhan.incekara@ct.gov"
+LABEL maintainer3="Erin Young"
+LABEL maintainer3.email="eriny@utah.gov"
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ python3 \
+ python3-pip \
+ python3-dev \
+ gzip \
+ gcc \
+ perl \
+ zlib1g \
+ libncurses5 \
+ bzip2 \
+ liblzma-dev \
+ libcurl4-gnutls-dev \
+ wget \
+ && apt-get autoclean && rm -rf /var/lib/apt/lists/*
+
+COPY --from=build /usr/local/bin/* /usr/local/bin/
+COPY --from=build /usr/local/lib/* /usr/local/lib/
+
+RUN pip install --no-cache-dir cython
+
+# ENA tools
+RUN wget -q https://github.com/enasequence/enaBrowserTools/archive/refs/tags/v${ENA_VER}.tar.gz && \
+ tar -xvf v${ENA_VER}.tar.gz && \
+ rm v${ENA_VER}.tar.gz
+
+RUN wget -q https://github.com/iqbal-lab-org/cylon/archive/${CYLON_COMMIT_HASH}.zip &&\
+ pip install --no-cache-dir ${CYLON_COMMIT_HASH}.zip && \
+ rm ${CYLON_COMMIT_HASH}.zip
+
+RUN wget -q https://github.com/iqbal-lab-org/varifier/archive/${VARIFIER_COMMIT_HASH}.zip &&\
+ pip install --no-cache-dir ${VARIFIER_COMMIT_HASH}.zip && \
+ rm ${VARIFIER_COMMIT_HASH}.zip
+
+# install viridian
+RUN wget -q https://github.com/iqbal-lab-org/viridian/archive/refs/tags/v${VIRIDIAN_VER}.tar.gz && \
+ pip install --no-cache-dir v${VIRIDIAN_VER}.tar.gz && \
+ mkdir viridian && \
+ tar -C viridian --strip-components=1 -xvf v${VIRIDIAN_VER}.tar.gz && \
+ rm v${VIRIDIAN_VER}.tar.gz
+
+WORKDIR /data
+
+CMD ["viridian", "--help "]
+
+ENV PATH="/enaBrowserTools-${ENA_VER}/python3:$PATH" LC_ALL=C
+
+## Test ##
+FROM app as test
+
+WORKDIR /test
+
+RUN cd /viridian && \
+ pip install --no-cache-dir pytest && \
+ pytest
+
+RUN viridian run_one_sample --run_accession SRR29437696 --outdir OUT && \
+ wc -l OUT/consensus.fa.gz OUT/log.json.gz OUT/qc.tsv.gz && \
+ head OUT/variants.vcf
+
+RUN viridian run_one_sample --run_accession SRR29437696 --outdir OUT2 --keep_bam && \
+ wc -l OUT2/consensus.fa.gz OUT2/log.json.gz OUT2/qc.tsv.gz OUT2/reference_mapped.bam && \
+ head OUT2/variants.vcf
diff --git a/viridian/1.2.2/README.md b/viridian/1.2.2/README.md
new file mode 100644
index 000000000..e762947fe
--- /dev/null
+++ b/viridian/1.2.2/README.md
@@ -0,0 +1,47 @@
+# Viridian container
+
+Main tool: [Viridian](https://github.com/iqbal-lab-org/viridian)
+
+Code repository: [Viridian](https://github.com/iqbal-lab-org/viridian)
+
+Additional tools:
+- samtools: 1.20
+- bcftools: 1.20
+- htslib: 1.20
+- ENA: 1.7.1
+- ngmerge: 0.3
+- vt: 0.577721
+- racon: 1.5.0
+- mummer: 4.0.0
+- read-it-and-keep: 0.3.0
+- cylon: commit hash 57d559a76254b0b95785f7c02fa58ef806713e01
+- varifier: commit hash 8bc8726ed3cdb337dc47b62515e709759e451137
+- minimap2: 2.28
+
+Basic information on how to use this tool:
+- executable: viridian
+- help: --help
+- version: --version
+- description: Ultra-careful amplicon-aware viral assembly for tiled amplicon schemes.
+
+Full documentation: [Viridian wiki](https://github.com/iqbal-lab-org/viridian/wiki)
+
+
+## Example Usage
+
+To run on paired Illumina reads:
+
+```bash
+viridian run_one_sample \
+ --tech illumina \
+ --reads1 reads_1.fastq.gz \
+ --reads2 reads_2.fastq.gz \
+ --outdir OUT
+```
+
+Download reads with accession SRR12345678 and run:
+```bash
+viridian run_one_sample --run_accession SRR12345678 --outdir OUT
+```
+
+The sequencing tech and unpaired/paired is taken from the ENA metadata for each run.