count bias too

This commit is contained in:
Alex Barron 2024-12-08 14:05:12 -08:00
parent 46109e4141
commit 4345c7c8bb

View File

@ -250,8 +250,9 @@ def remove_lora_layers(model: nn.Module) -> nn.Module:
def nparams(module):
if isinstance(module, (nn.QuantizedLinear, nn.QuantizedEmbedding)):
return module.weight.size * 32 // module.bits
if hasattr(module, "bits"):
n = 0 if not hasattr(module, "bias") else module.bias.size
return n + module.weight.size * 32 // module.bits
return sum(v.size for _, v in tree_flatten(module.parameters()))