QWEN: Fix unsupported ScalarType BFloat16 (#187)

Fix unsupported ScalarType BFloat16.
This commit is contained in:
Yifan 2023-12-25 22:10:01 +08:00 committed by GitHub
parent 647e48870a
commit 738448c2d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -60,7 +60,7 @@ def convert(args):
args.model, trust_remote_code=True, torch_dtype=torch.float16
)
state_dict = model.state_dict()
weights = {replace_key(k): v.numpy() for k, v in state_dict.items()}
weights = {replace_key(k): (v.numpy() if v.dtype != torch.bfloat16 else v.to(torch.float32).numpy()) for k, v in state_dict.items()}
config = model.config.to_dict()
if args.quantize: