From fe34634ae22e45cfae2248f201f4ec4987a5aa82 Mon Sep 17 00:00:00 2001 From: liufengwei0103 <2472937968@qq.com> Date: Mon, 3 Nov 2025 17:35:18 +0800 Subject: [PATCH] fix function not having return value in dist case --- python/paddle/nn/functional/flash_attention.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/paddle/nn/functional/flash_attention.py b/python/paddle/nn/functional/flash_attention.py index 2d1b050cdba6e7..75feccd2f7a5a2 100644 --- a/python/paddle/nn/functional/flash_attention.py +++ b/python/paddle/nn/functional/flash_attention.py @@ -1453,6 +1453,7 @@ def scaled_dot_product_attention( dropout_p, is_causal, ) + return out if attn_mask is None: # downgraded to ordinary flash attention implementation