mirror of
https://github.com/ml-explore/mlx-examples.git
synced 2025-09-01 04:14:38 +08:00
BERT implementation
This commit is contained in:
48
bert/convert.py
Normal file
48
bert/convert.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from transformers import BertModel
|
||||
|
||||
import argparse
|
||||
import numpy
|
||||
|
||||
|
||||
def replace_key(key: str) -> str:
|
||||
key = key.replace(".layer.", ".layers.")
|
||||
key = key.replace(".self.key.", ".key_proj.")
|
||||
key = key.replace(".self.query.", ".query_proj.")
|
||||
key = key.replace(".self.value.", ".value_proj.")
|
||||
key = key.replace(".attention.output.dense.", ".attention.out_proj.")
|
||||
key = key.replace(".attention.output.LayerNorm.", ".ln1.")
|
||||
key = key.replace(".output.LayerNorm.", ".ln2.")
|
||||
key = key.replace(".intermediate.dense.", ".linear1.")
|
||||
key = key.replace(".output.dense.", ".linear2.")
|
||||
key = key.replace(".LayerNorm.", ".norm.")
|
||||
key = key.replace("pooler.dense.", "pooler.")
|
||||
return key
|
||||
|
||||
|
||||
def convert(bert_model: str, mlx_model: str) -> None:
|
||||
model = BertModel.from_pretrained(bert_model)
|
||||
# save the tensors
|
||||
tensors = {
|
||||
replace_key(key): tensor.numpy() for key, tensor in model.state_dict().items()
|
||||
}
|
||||
numpy.savez(mlx_model, **tensors)
|
||||
# save the tokenizer
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Convert BERT weights to MLX.")
|
||||
parser.add_argument(
|
||||
"--bert-model",
|
||||
type=str,
|
||||
default="bert-base-uncased",
|
||||
help="The huggingface name of the BERT model to save.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--mlx-model",
|
||||
type=str,
|
||||
default="weights/bert-base-uncased.npz",
|
||||
help="The output path for the MLX BERT weights.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
convert(args.bert_model, args.mlx_model)
|
Reference in New Issue
Block a user