Commit 05481617 authored by Tri Dao's avatar Tri Dao
Browse files

Bump version to 0.2.1

Showing with 2 additions and 2 deletions
+2 -2
......@@ -14,7 +14,7 @@ from apex.transformer.tensor_parallel.utils import VocabUtility
# `all_gather_into_tensor` and `reduce_scatter_tensor` are new placeholders for
# `_all_gather_base` and `_reduce_scatter_base`. They require the most recent
# version of PyTorch. The following 4 lines are for backward comparability with
# version of PyTorch. The following 4 lines are for backward compatibility with
# older PyTorch.
if "all_gather_into_tensor" not in dir(torch.distributed):
torch.distributed.all_gather_into_tensor = torch.distributed._all_gather_base
......
......@@ -152,7 +152,7 @@ ext_modules.append(
setup(
name="flash_attn",
version="0.2.0",
version="0.2.1",
packages=find_packages(
exclude=("build", "csrc", "include", "tests", "dist", "docs", "benchmarks", "flash_attn.egg-info",)
),
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment