fix: Unsupported BFloat16 Data Type Issue with MPS Backend

This commit is contained in:
张嘉豪 2023-12-08 16:19:35 +08:00
parent ff3cc56c8d
commit 4018aed335

View File

@ -32,7 +32,12 @@ def map_torch_to_mlx(key, value):
elif "rope" in key:
return None, None
return key, value.numpy()
return (
key,
value.numpy()
if value.dtype != torch.bfloat16
else value.to(torch.float32).numpy(),
)
if __name__ == "__main__":