Refactor activation function and loss calculation (#325)

This commit is contained in:
AtomicVar
2024-01-17 05:42:56 +08:00
committed by GitHub
parent ce7b65e8c4
commit 2ba5d3db14
2 changed files with 3 additions and 3 deletions

View File

@@ -155,5 +155,5 @@ class StableDiffusion:
def decode(self, x_t):
x = self.autoencoder.decode(x_t)
x = mx.minimum(1, mx.maximum(0, x / 2 + 0.5))
x = mx.clip(x / 2 + 0.5, 0, 1)
return x