summarylogtreecommitdiffstats
path: root/flash-attention.diff
blob: 98ee942a604c5cf3ac56eec8dcebdc6e04f2ab82 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
--- a/setup.py	2024-12-27 04:10:19.630516943 +0300
+++ b/setup.py	2024-12-27 04:42:21.118387447 +0300
@@ -151,7 +151,7 @@
 
     # Check, if ATen/CUDAGeneratorImpl.h is found, otherwise use ATen/cuda/CUDAGeneratorImpl.h
     # See https://github.com/pytorch/pytorch/pull/70650
-    generator_flag = []
+    generator_flag = ['-DGLOG_USE_GLOG_EXPORT']
     torch_dir = torch.__path__[0]
     if os.path.exists(os.path.join(torch_dir, "include", "ATen", "CUDAGeneratorImpl.h")):
         generator_flag = ["-DOLD_GENERATOR_PATH"]
@@ -523,6 +523,7 @@
             "docs",
             "benchmarks",
             "flash_attn.egg-info",
+            "hopper",
         )
     ),
     author="Tri Dao",
@@ -550,6 +551,5 @@
     setup_requires=[
         "packaging",
         "psutil",
-        "ninja",
     ],
 )