From 3684295e5a208da5016d6f0d6d1e9eeb1aed7565 Mon Sep 17 00:00:00 2001 From: NeTT Date: Sun, 10 Dec 2023 19:05:55 +0530 Subject: [PATCH] change comments --- examples/classification/iris.ts | 12 ++++++------ examples/classification/spam.ts | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/classification/iris.ts b/examples/classification/iris.ts index 8bea622..01f45bc 100644 --- a/examples/classification/iris.ts +++ b/examples/classification/iris.ts @@ -58,17 +58,17 @@ const net = new Sequential({ // Define each layer of the network layers: [ - // A dense layer with 4 neurons + // A dense layer with 16 neurons DenseLayer({ size: [16] }), - // A sigmoid activation layer + // A ReLu activation layer ReluLayer(), - // A dense layer with 1 neuron + // A dense layer with 3 neurons DenseLayer({ size: [3] }), - // Another sigmoid layer + // A Softmax activation layer SoftmaxLayer(), ], optimizer: AdamOptimizer(), - // We are using MSE for finding cost + // We are using CrossEntropy for finding cost cost: Cost.CrossEntropy, scheduler: OneCycle({ max_rate: 0.05, step_size: 50 }), }); @@ -83,7 +83,7 @@ net.train( outputs: tensor2D(train[1]), }, ], - // Train for 10000 epochs + // Train for 300 epochs 300, 1, 0.02, diff --git a/examples/classification/spam.ts b/examples/classification/spam.ts index 4a14d44..ff5b181 100644 --- a/examples/classification/spam.ts +++ b/examples/classification/spam.ts @@ -90,7 +90,7 @@ const net = new Sequential({ SigmoidLayer(), ], - // We are using MSE for finding cost + // We are using Log Loss for finding cost cost: Cost.BinCrossEntropy, optimizer: AdamOptimizer(), }); @@ -106,7 +106,7 @@ net.train( outputs: tensor2D(train[1].map((x) => [x])), }, ], - // Train for 10000 epochs + // Train for 20 epochs 20, 1, 0.01,