black format

This commit is contained in:
Awni Hannun
2023-12-09 14:15:25 -08:00
parent b8332a1e66
commit 98f4346c81
6 changed files with 44 additions and 18 deletions

View File

@@ -30,7 +30,7 @@ if __name__ == "__main__":
torch_path = Path(args.torch_model)
if not os.path.exists(args.mlx_model):
os.makedirs(args.mlx_model)
mlx_path = Path(args.mlx_model)
mlx_path = Path(args.mlx_model)
state = torch.load(str(torch_path / "consolidated.00.pth"))
np.savez(
@@ -57,5 +57,3 @@ if __name__ == "__main__":
config["hidden_dim"] = state["layers.0.feed_forward.w1.weight"].shape
with open(mlx_path / "params.json", "w") as outfile:
json.dump(config, outfile)

View File

@@ -20,9 +20,13 @@ import wikisql
def build_parser():
parser = argparse.ArgumentParser(description="LoRA finetuning with Llama or Mistral")
parser = argparse.ArgumentParser(
description="LoRA finetuning with Llama or Mistral"
)
parser.add_argument(
"--model", required=True, help="A path to the model files containing the tokenizer, weights, config."
"--model",
required=True,
help="A path to the model files containing the tokenizer, weights, config.",
)
# Generation args
parser.add_argument(
@@ -227,6 +231,7 @@ def generate(model, prompt, tokenizer, args):
def generate_step():
temp = args.temp
def sample(logits):
if temp == 0:
return mx.argmax(logits, axis=-1)