Feat: update pre-commit rev (#432)

This commit is contained in:
Nripesh Niketan 2024-02-11 19:23:27 +04:00 committed by GitHub
parent f45a1ab83c
commit f1ef378a58
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 9 additions and 7 deletions

View File

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.12.1
rev: 24.1.1
hooks:
- id: black
- repo: https://github.com/pycqa/isort

View File

@ -80,9 +80,9 @@ def iterate_batches(dataset, tokenizer, batch_size, max_seq_length, train=False)
for j in range(batch_size):
truncated_length = min(lengths[j], max_seq_length)
batch_arr[j, :truncated_length] = batch[j][:truncated_length]
lengths[
j
] = truncated_length # Update lengths to match truncated lengths
lengths[j] = (
truncated_length # Update lengths to match truncated lengths
)
batch = mx.array(batch_arr)
yield batch[:, :-1], batch[:, 1:], mx.array(lengths)

View File

@ -186,9 +186,11 @@ def load_unet(key: str = _DEFAULT_MODEL, float16: bool = False):
out_channels=config["out_channels"],
block_out_channels=config["block_out_channels"],
layers_per_block=[config["layers_per_block"]] * n_blocks,
num_attention_heads=[config["attention_head_dim"]] * n_blocks
if isinstance(config["attention_head_dim"], int)
else config["attention_head_dim"],
num_attention_heads=(
[config["attention_head_dim"]] * n_blocks
if isinstance(config["attention_head_dim"], int)
else config["attention_head_dim"]
),
cross_attention_dim=[config["cross_attention_dim"]] * n_blocks,
norm_num_groups=config["norm_num_groups"],
)