From 45ad06aac898a64abee55569b7594b5a3d2f228d Mon Sep 17 00:00:00 2001 From: Chunyang Wen Date: Tue, 18 Mar 2025 22:12:24 +0800 Subject: [PATCH] Fix typo; Fix lint warning when reuse the same name (#1968) * Fix typo; Fix lint warning when reuse the same name * Add missing period --- python/mlx/optimizers/optimizers.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/python/mlx/optimizers/optimizers.py b/python/mlx/optimizers/optimizers.py index 36068403d..1c45865a1 100644 --- a/python/mlx/optimizers/optimizers.py +++ b/python/mlx/optimizers/optimizers.py @@ -50,19 +50,19 @@ class Optimizer: dict_keys(['step', 'learning_rate', 'weight', 'bias']) """ - # Iniatilize the optimizer state to match the parameter state + # Initialize the optimizer state to match the parameter state def update_state(params, state): if isinstance(params, (list, tuple)): state = list(state) for i in range(len(state)): state[i] = update_state(params[i], state[i]) if len(state) != len(params): - state.extend(tree_map(lambda x: {}, params[len(state) :])) + state.extend(tree_map(lambda _: {}, params[len(state) :])) return type(params)(state) elif isinstance(params, dict): for k, v in params.items(): if k not in state: - state[k] = tree_map(lambda x: {}, v) + state[k] = tree_map(lambda _: {}, v) else: state[k] = update_state(v, state[k]) return state @@ -79,6 +79,7 @@ class Optimizer: Args: parameter (mx.array): A single parameter that will be optimized. + state (dict): The optimizer's state. """ raise NotImplementedError() @@ -148,10 +149,10 @@ class Optimizer: """ if isinstance(param, Callable): self._schedulers[name] = param - param = param(self.step) + parameter = param(self.step) else: - param = mx.array(param) - self.state[name] = param + parameter = mx.array(param) + self.state[name] = parameter class MultiOptimizer(Optimizer):