Skip to content

Commit

Permalink
use F instead of torch.nn.functional
Browse files Browse the repository at this point in the history
  • Loading branch information
chengzeyi committed Feb 7, 2025
1 parent 4375530 commit ffef121
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions src/para_attn/para_attn_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def __torch_function__(self, func, types, args=(), kwargs=None):
if RingAttnMode.disabled:
return func(*args, **kwargs)

if func is torch.nn.functional.scaled_dot_product_attention:
if func is F.scaled_dot_product_attention:
return ring_attn_func(*args, **kwargs, mesh=self._mesh)

return func(*args, **kwargs)
Expand Down Expand Up @@ -334,7 +334,7 @@ def __torch_function__(self, func, types, args=(), kwargs=None):
if UlyssesAttnMode.disabled:
return func(*args, **kwargs)

if func is torch.nn.functional.scaled_dot_product_attention:
if func is F.scaled_dot_product_attention:
return ulysses_attn_func(*args, **kwargs, mesh=self._mesh)

return func(*args, **kwargs)
Expand Down Expand Up @@ -402,7 +402,7 @@ def __torch_function__(self, func, types, args=(), kwargs=None):
if UnifiedAttnMode.disabled:
return func(*args, **kwargs)

if func is torch.nn.functional.scaled_dot_product_attention:
if func is F.scaled_dot_product_attention:
parallel_method = self._parallel_method
if parallel_method == "ulysses":
with self._set_parallel_method("ring"), self:
Expand Down Expand Up @@ -470,7 +470,7 @@ def __torch_function__(self, func, types, args=(), kwargs=None):
if InBatchAttnMode.disabled:
return func(*args, **kwargs)

if func is torch.nn.functional.scaled_dot_product_attention:
if func is F.scaled_dot_product_attention:
return in_batch_attn_func(*args, **kwargs)

return func(*args, **kwargs)
Expand Down

0 comments on commit ffef121

Please sign in to comment.