Update modeling_motif.py
Browse filesremove ScaleDotProductAttention
- modeling_motif.py +0 -2
modeling_motif.py
CHANGED
|
@@ -63,11 +63,9 @@ if is_flash_attn_2_available():
|
|
| 63 |
|
| 64 |
try:
|
| 65 |
moreh_ops = torch.ops.moreh
|
| 66 |
-
ScaledDotProductAttention = moreh_ops.scaled_dot_product_attention
|
| 67 |
MorehFlashAttention = moreh_ops.flash_attention
|
| 68 |
logger.warning_once("Using moreh ops")
|
| 69 |
except AttributeError:
|
| 70 |
-
ScaledDotProductAttention = None
|
| 71 |
MorehFlashAttention = None
|
| 72 |
logger.warning_once("Failed to import moreh ops")
|
| 73 |
|
|
|
|
| 63 |
|
| 64 |
try:
|
| 65 |
moreh_ops = torch.ops.moreh
|
|
|
|
| 66 |
MorehFlashAttention = moreh_ops.flash_attention
|
| 67 |
logger.warning_once("Using moreh ops")
|
| 68 |
except AttributeError:
|
|
|
|
| 69 |
MorehFlashAttention = None
|
| 70 |
logger.warning_once("Failed to import moreh ops")
|
| 71 |
|