disable _custom_C for debug purpose

This commit is contained in:
fxmarty 2024-05-10 18:22:41 +00:00
parent cd313364a0
commit f4dac978d2
2 changed files with 2 additions and 2 deletions

View File

@ -244,7 +244,7 @@ class LlamaMLP(nn.Module):
) )
def forward(self, hidden_states): def forward(self, hidden_states):
if IS_ROCM_SYSTEM and self.hidden_act == "silu" and hidden_states.shape[0] == 1: if False and IS_ROCM_SYSTEM and self.hidden_act == "silu" and hidden_states.shape[0] == 1:
out = torch.empty( out = torch.empty(
hidden_states.shape[0], hidden_states.shape[0],
self.intermediate_size, self.intermediate_size,

View File

@ -366,7 +366,7 @@ class FastLinearROCm(nn.Module):
weight = self.weight weight = self.weight
bias = self.bias bias = self.bias
if IS_ROCM_SYSTEM and inp.numel() // inp.size(-1) == 1: if False and IS_ROCM_SYSTEM and inp.numel() // inp.size(-1) == 1:
batched = False batched = False
if inp.dim() == 3: if inp.dim() == 3: