fix: fix num_ln_in_parallel_attn attribute name typo in RWConfig

This commit is contained in:
Islam Almersawi 2024-08-01 14:35:00 +04:00
parent 8642250602
commit dab00af971

View File

@ -94,7 +94,7 @@ class RWConfig(PretrainedConfig):
else kwargs.pop("n_head", 8) else kwargs.pop("n_head", 8)
) )
self.layer_norm_epsilon = layer_norm_epsilon self.layer_norm_epsilon = layer_norm_epsilon
self.num_ln_in_parallel_attention = num_ln_in_prallel_attention self.num_ln_in_parallel_attn = num_ln_in_prallel_attention
self.initializer_range = initializer_range self.initializer_range = initializer_range
self.use_cache = use_cache self.use_cache = use_cache
self.hidden_dropout = hidden_dropout self.hidden_dropout = hidden_dropout