From 7175d66f2b06631e800034e4efa9c0ffbdb1a181 Mon Sep 17 00:00:00 2001 From: Daniel King Date: Mon, 12 Feb 2024 10:04:45 -0800 Subject: [PATCH] new base images --- .github/workflows/docker.yaml | 10 ++-------- README.md | 8 +++----- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index e62b01fa52..c363e78654 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -17,17 +17,11 @@ jobs: strategy: matrix: include: - - name: "2.1.0_cu121" - base_image: mosaicml/pytorch:2.1.0_cu121-python3.10-ubuntu20.04 - dep_groups: "[gpu]" - name: "2.1.0_cu121_flash2" - base_image: mosaicml/pytorch:2.1.0_cu121-python3.10-ubuntu20.04 + base_image: mosaicml/pytorch:2.1.2_cu121-python3.10-ubuntu20.04 dep_groups: "[gpu-flash2]" - - name: "2.1.0_cu121_aws" - base_image: mosaicml/pytorch:2.1.0_cu121-python3.10-ubuntu20.04-aws - dep_groups: "[gpu]" - name: "2.1.0_cu121_flash2_aws" - base_image: mosaicml/pytorch:2.1.0_cu121-python3.10-ubuntu20.04-aws + base_image: mosaicml/pytorch:2.1.2_cu121-python3.10-ubuntu20.04-aws dep_groups: "[gpu-flash2]" steps: - name: Maximize Build Space on Worker diff --git a/README.md b/README.md index 8ffe222cb7..ee9df13ff6 100644 --- a/README.md +++ b/README.md @@ -113,11 +113,9 @@ You can select a specific commit hash such as `mosaicml/llm-foundry:1.13.1_cu117 | Docker Image | Torch Version | Cuda Version | LLM Foundry dependencies installed? | | ------------------------------------------------------ | ------------- | ----------------- | ----------------------------------- | -| `mosaicml/pytorch:2.1.0_cu121-python3.10-ubuntu20.04` | 2.1.0 | 12.1 (Infiniband) | No | -| `mosaicml/llm-foundry:2.1.0_cu121-latest` | 2.1.0 | 12.1 (Infiniband) | Yes (flash attention v1. Warning: Support for flash attention v1 has been deprecated.) | -| `mosaicml/llm-foundry:2.1.0_cu121_flash2-latest` | 2.1.0 | 12.1 (Infiniband) | Yes (flash attention v2. Note: We recommend using flash attention v2.) | -| `mosaicml/llm-foundry:2.1.0_cu121_aws-latest` | 2.1.0 | 12.1 (EFA) | Yes (flash attention v1. Warning: Support for flash attention v1 has been deprecated.) | -| `mosaicml/llm-foundry:2.1.0_cu121_flash2_aws-latest` | 2.1.0 | 12.1 (EFA) | Yes (flash attention v2. Note: We recommend using flash attention v2.) | +| `mosaicml/pytorch:2.1.2_cu121-python3.10-ubuntu20.04` | 2.1.2 | 12.1 (Infiniband) | No | +| `mosaicml/llm-foundry:2.1.0_cu121_flash2-latest` | 2.1.0 | 12.1 (Infiniband) | Yes | +| `mosaicml/llm-foundry:2.1.0_cu121_flash2_aws-latest` | 2.1.0 | 12.1 (EFA) | Yes | # Installation