Remove "using namespace mlx::core" in benchmarks/examples (#1685)

* Remove "using namespace mlx::core" in benchmarks/examples

* Fix building example extension

* A missing one in comment

* Fix building on M chips
This commit is contained in:
Cheng
2024-12-12 00:08:29 +09:00
committed by GitHub
parent f76a49e555
commit 4f9b60dd53
12 changed files with 373 additions and 367 deletions

View File

@@ -10,7 +10,7 @@
/**
* An example of logistic regression with MLX.
*/
using namespace mlx::core;
namespace mx = mlx::core;
int main() {
int num_features = 100;
@@ -19,35 +19,35 @@ int main() {
float learning_rate = 0.1;
// True parameters
auto w_star = random::normal({num_features});
auto w_star = mx::random::normal({num_features});
// The input examples
auto X = random::normal({num_examples, num_features});
auto X = mx::random::normal({num_examples, num_features});
// Labels
auto y = matmul(X, w_star) > 0;
auto y = mx::matmul(X, w_star) > 0;
// Initialize random parameters
array w = 1e-2 * random::normal({num_features});
mx::array w = 1e-2 * mx::random::normal({num_features});
auto loss_fn = [&](array w) {
auto logits = matmul(X, w);
auto loss_fn = [&](mx::array w) {
auto logits = mx::matmul(X, w);
auto scale = (1.0f / num_examples);
return scale * sum(logaddexp(array(0.0f), logits) - y * logits);
return scale * mx::sum(mx::logaddexp(mx::array(0.0f), logits) - y * logits);
};
auto grad_fn = grad(loss_fn);
auto grad_fn = mx::grad(loss_fn);
auto tic = timer::time();
for (int it = 0; it < num_iters; ++it) {
auto grad = grad_fn(w);
w = w - learning_rate * grad;
eval(w);
auto grads = grad_fn(w);
w = w - learning_rate * grads;
mx::eval(w);
}
auto toc = timer::time();
auto loss = loss_fn(w);
auto acc = sum((matmul(X, w) > 0) == y) / num_examples;
auto acc = mx::sum((mx::matmul(X, w) > 0) == y) / num_examples;
auto throughput = num_iters / timer::seconds(toc - tic);
std::cout << "Loss " << loss << ", Accuracy, " << acc << ", Throughput "
<< throughput << " (it/s)." << std::endl;