diff --git a/server/text_generation_server/layers/moe/__init__.py b/server/text_generation_server/layers/moe/__init__.py index 91b3a610..fb285bfe 100644 --- a/server/text_generation_server/layers/moe/__init__.py +++ b/server/text_generation_server/layers/moe/__init__.py @@ -89,9 +89,9 @@ class DenseMoELayer(nn.Module): "No fused layers are available for this model type, using (slower) dense MoE layer", ) - assert (n_expert_group is None) == (topk_group is None), ( - "n_expert_group and topk_group must both be None or have some value" - ) + assert (n_expert_group is None) == ( + topk_group is None + ), "n_expert_group and topk_group must both be None or have some value" self.n_expert_group = n_expert_group self.n_experts = n_experts diff --git a/server/text_generation_server/layers/moe/fp8.py b/server/text_generation_server/layers/moe/fp8.py index e7a1f80b..3016c8a2 100644 --- a/server/text_generation_server/layers/moe/fp8.py +++ b/server/text_generation_server/layers/moe/fp8.py @@ -36,9 +36,9 @@ class FP8SparseMoELayer(nn.Module): ): super().__init__() - assert (n_expert_group is None) == (topk_group is None), ( - "n_expert_group and topk_group must both be None or have some value" - ) + assert (n_expert_group is None) == ( + topk_group is None + ), "n_expert_group and topk_group must both be None or have some value" self.n_expert_group = n_expert_group self.topk = topk