Update new_tokens.py

This commit is contained in:
paNikitin 2025-02-23 12:34:17 +03:00
parent 68403f5577
commit 95d44228c9

View File

@ -159,4 +159,4 @@ def implement_new_tokens(
""" """
tokenizer = update_tokenizer(tokenizer=tokenizer, tokens=tokens, special=special) tokenizer = update_tokenizer(tokenizer=tokenizer, tokens=tokens, special=special)
model = resize_embeddings(model=model, tokenizer=tokenizer) model = resize_embeddings(model=model, tokenizer=tokenizer)
return model, tokenizer return model, tokenizer