adjust the round_up_seq logit to align with prefill warmup phase on HPU

Signed-off-by: Liu, Kaixuan <kaixuan.liu@intel.com>
This commit is contained in:
Liu, Kaixuan 2025-05-12 07:21:33 -04:00
parent c94f415af4
commit c264a42aa1

View File

@ -73,7 +73,7 @@ def torch_compile_for_eager(func):
def round_up_seq(number, k, base):
exponent = math.ceil(math.log(number / k, base))
exponent = max(0, math.ceil(math.log(number / k, base)))
return int(k * (base**exponent))