From 33e0860c9c5667fded5af674882e731909096a7f Mon Sep 17 00:00:00 2001 From: Tri Dao Date: Thu, 19 Jan 2023 13:17:19 -0800 Subject: [PATCH] Bump to v0.2.8 --- setup.py | 2 +- training/Dockerfile | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 3fb766695..1cd61e562 100644 --- a/setup.py +++ b/setup.py @@ -158,7 +158,7 @@ def append_nvcc_threads(nvcc_extra_args): setup( name="flash_attn", - version="0.2.7", + version="0.2.8", packages=find_packages( exclude=("build", "csrc", "include", "tests", "dist", "docs", "benchmarks", "flash_attn.egg-info",) ), diff --git a/training/Dockerfile b/training/Dockerfile index 8ad2d134e..b2c746dde 100644 --- a/training/Dockerfile +++ b/training/Dockerfile @@ -85,11 +85,11 @@ RUN pip install transformers==4.25.1 datasets==2.8.0 pytorch-lightning==1.8.6 tr RUN pip install git+https://github.com/mlcommons/logging.git@2.1.0 # Install FlashAttention -RUN pip install flash-attn==0.2.7 +RUN pip install flash-attn==0.2.8 # Install CUDA extensions for cross-entropy, fused dense, layer norm RUN git clone https://github.com/HazyResearch/flash-attention \ - && cd flash-attention && git checkout v0.2.7 \ + && cd flash-attention && git checkout v0.2.8 \ && cd csrc/fused_softmax && pip install . && cd ../../ \ && cd csrc/rotary && pip install . && cd ../../ \ && cd csrc/xentropy && pip install . && cd ../../ \