From b0c168d249c22c1e18f84a78cede0e4f307034e9 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Fri, 31 May 2024 17:56:08 +0200 Subject: [PATCH] Update server/text_generation_server/layers/attention/xpu.py --- server/text_generation_server/layers/attention/xpu.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/server/text_generation_server/layers/attention/xpu.py b/server/text_generation_server/layers/attention/xpu.py index a716fcdd..d9a096f9 100644 --- a/server/text_generation_server/layers/attention/xpu.py +++ b/server/text_generation_server/layers/attention/xpu.py @@ -14,9 +14,6 @@ def attention( softmax_scale, window_size_left=-1, ): - if window_size_left <= 0 and window_size_left != -1: - raise ValueError("`window_size_left` must be > 0 or -1") - if window_size_left != -1: raise ValueError( f"XPU version of Flash Attention does not support window attention (window_size_left != -1, got window_size_left={window_size_left})."