Skip to content

Commit

Permalink
feat: dropout layer (#28)
Browse files Browse the repository at this point in the history
* chore: bump version

* feat: dropout layer

Co-authored-by: Dean Srebnik <[email protected]>

* feat: auto backend

* feat: dataset utils

* feat: tensor docs

* fix: dropout layer

---------

Co-authored-by: CarrotzRule123 <[email protected]>
Co-authored-by: Dean Srebnik <[email protected]>
  • Loading branch information
3 people authored May 16, 2023
1 parent 28e6236 commit 2b7c5ca
Show file tree
Hide file tree
Showing 49 changed files with 658 additions and 13,308 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ jobs:
rustc --version
cargo --version
cargo build --release
deno run -A https://deno.land/x/[email protected]/main.ts --out src/backend_wasm/lib
- name: Release
uses: softprops/action-gh-release@master
env:
Expand All @@ -43,3 +44,4 @@ jobs:
target/release/libnetsaur.so
target/release/libnetsaur.dylib
target/release/netsaur.dll
src/backend_wasm/lib/netsaur_bg.wasm
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@
- XOR ([CPU](./examples/xor_cpu.ts), [WASM](./examples/xor_wasm.ts))
- Linear Regression ([CPU](./examples/linear_cpu.ts),
[WASM](./examples/linear_wasm.ts))
- Filters ([CPU](./examples/filters/conv.ts),
[WASM](./examples/filters/conv_wasm.ts))
- Mnist ([CPU](./examples/mnist),
[WASM](./examples/mnist))

### Maintainers

Expand Down Expand Up @@ -127,7 +131,7 @@ net.train(
/**
* The number of iterations is set to 10000.
*/
10000,
10000
);

console.log(`training time: ${performance.now() - time}ms`);
Expand Down Expand Up @@ -226,7 +230,7 @@ net.train(
/**
* The number of iterations is set to 10000.
*/
10000,
10000
);

console.log(`training time: ${performance.now() - time}ms`);
Expand Down
15 changes: 9 additions & 6 deletions src/model/data/csv.ts → data/csv.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import { tensor2D } from "./core/tensor.ts";
import type { DataLike } from "./data.ts";
import { CsvStream } from "../../../deps.ts";
import { tensor2D } from "../mod.ts";
import type { DataLike } from "./types.ts";
import { CsvParseStream } from "./deps.ts";

export interface CsvColumnConfig {
/**
* Whether this column is a label column.
*/
label?: boolean;
}

Expand All @@ -16,7 +19,7 @@ export async function loadCsv(
): Promise<DataLike> {
const data = await fetch(url).then((res) =>
res.body!.pipeThrough(new TextDecoderStream())
.pipeThrough(new CsvStream())
.pipeThrough(new CsvParseStream())
);
const colConfigs = Object.entries(config.columns ?? {});
const train_x: number[][] = [];
Expand Down Expand Up @@ -52,7 +55,7 @@ export async function loadCsv(
train_y.push(y);
}
return {
train_x: await tensor2D(train_x),
train_y: await tensor2D(train_y),
train_x: tensor2D(train_x),
train_y: tensor2D(train_y),
};
}
27 changes: 27 additions & 0 deletions data/data.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import { Rank, Tensor } from "../mod.ts";
import { CsvLoaderConfig, loadCsv } from "./csv.ts";
import type { DataLike } from "./types.ts";

export class Data {
/**
* Model input data
*/
inputs: Tensor<Rank>;

/**
* Model output data / labels
*/
outputs: Tensor<Rank>;

constructor(data: DataLike) {
this.inputs = data.train_x;
this.outputs = data.train_y;
}

/**
* Load data from a CSV file or URL containing CSV data.
*/
static async csv(url: string | URL, config?: CsvLoaderConfig) {
return new Data(await loadCsv(url, config));
}
}
1 change: 1 addition & 0 deletions data/deps.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export { CsvParseStream} from "https://deno.land/[email protected]/csv/csv_parse_stream.ts";
File renamed without changes.
23 changes: 23 additions & 0 deletions data/types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { Rank, Tensor } from "../mod.ts";

export interface DataLike {
/**
* Model input data
*/
train_x: Tensor<Rank>;

/**
* Model output data / labels
*/
train_y: Tensor<Rank>;

/**
* Model test input data
*/
test_x?: Tensor<Rank>;

/**
* Model test output data / labels
*/
test_y?: Tensor<Rank>;
}
10 changes: 8 additions & 2 deletions deno.json
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
{
"tasks": {
"example:xor": "deno run -A --unstable ./examples/xor_cpu.ts",
"example:xor": "deno run -A --unstable ./examples/xor_auto.ts",
"example:xor-cpu": "deno run -A --unstable ./examples/xor_cpu.ts",
"example:xor-wasm": "deno run -A --unstable ./examples/xor_wasm.ts",
"example:linear": "deno run -A --unstable ./examples/linear_cpu.ts",
"example:linear-wasm": "deno run -A --unstable ./examples/linear_wasm.ts",
"example:filters": "deno run -A --unstable examples/filters/conv.ts ",
"example:filters-wasm": "deno run -A --unstable examples/filters/conv_wasm.ts ",
"example:train": "deno run -A --unstable examples/model/train.ts ",
"example:run": "deno run -A --unstable examples/model/run.ts ",
"example:mnist-download": "deno run -A --unstable examples/mnist/download.ts ",
"example:mnist-train": "deno run -A --unstable examples/mnist/train.ts ",
"example:mnist-train-wasm": "deno run -A --unstable examples/mnist/train_wasm.ts ",
"example:mnist-predict": "deno run -A --unstable examples/mnist/predict.ts ",
"example:mnist-predict-wasm": "deno run -A --unstable examples/mnist/predict_wasm.ts ",
"build": "deno task build:cpu && deno task build:wasm",
"build:cpu": "cargo build --release",
"build:wasm": "deno run -A https://deno.land/x/[email protected]/main.ts --sync --out src/backend_wasm/lib"
"build:wasm": "deno run -A https://deno.land/x/[email protected]/main.ts --out src/backend_wasm/lib"
}
}
1 change: 0 additions & 1 deletion deps.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
export { CsvStream } from "https://deno.land/[email protected]/csv/mod.ts";
export { dlopen } from "https://deno.land/x/[email protected]/mod.ts";
export type { FetchOptions } from "https://deno.land/x/[email protected]/mod.ts";
35 changes: 35 additions & 0 deletions examples/mnist/predict_wasm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import { Rank, Sequential, setupBackend, Tensor, WASM } from "../../mod.ts";
import { loadDataset } from "./common.ts";

await setupBackend(WASM);

const network = Sequential.loadFile("examples/mnist/mnist.test.st");

const testSet = loadDataset("test-images.idx", "test-labels.idx", 0, 1000);
testSet.map((_, i) => (testSet[i].inputs.shape = [1, 28, 28]));

function argmax(mat: Tensor<Rank>) {
let max = -Infinity;
let index = -1;
for (let i = 0; i < mat.data.length; i++) {
if (mat.data[i] > max) {
max = mat.data[i];
index = i;
}
}
return index;
}

let correct = 0;
for (const test of testSet) {
const prediction = argmax(await network.predict(test.inputs as Tensor<Rank>));
const expected = argmax(test.outputs as Tensor<Rank>);
if (expected === prediction) {
correct += 1;
}
}

console.log(`${correct} / ${testSet.length} correct`);
console.log(
`accuracy: ${((correct / testSet.length) * 100).toFixed(2)}%`,
);
36 changes: 1 addition & 35 deletions examples/mnist/train.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,12 @@ import {
FlattenLayer,
Init,
MaxPool2DLayer,
Rank,
ReluLayer,
Sequential,
setupBackend,
SoftmaxLayer,
Tensor,
} from "../../mod.ts";

import { loadDataset } from "./common.ts";

await setupBackend(CPU);
Expand Down Expand Up @@ -46,36 +45,3 @@ console.log("Training (" + epochs + " epochs)...");
const start = performance.now();
network.train(trainSet, epochs, 32, 0.01);
console.log("Training complete!", performance.now() - start);

// predicting

const testSet = loadDataset("test-images.idx", "test-labels.idx", 0, 1000);
testSet.map((_, i) => (testSet[i].inputs.shape = [1, 28, 28]));

function argmax(mat: Tensor<Rank>) {
let max = -Infinity;
let index = -1;
for (let i = 0; i < mat.data.length; i++) {
if (mat.data[i] > max) {
max = mat.data[i];
index = i;
}
}
return index;
}

let correct = 0;
for (const test of testSet) {
const prediction = argmax(await network.predict(test.inputs as Tensor<Rank>));
const expected = argmax(test.outputs as Tensor<Rank>);
if (expected === prediction) {
correct += 1;
}
}

console.log(`${correct} / ${testSet.length} correct`);
console.log(
`accuracy: ${((correct / testSet.length) * 100).toFixed(2)}%`,
);

network.saveFile("examples/mnist/mnist.test.st");
46 changes: 46 additions & 0 deletions examples/mnist/train_wasm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import {
Conv2DLayer,
Cost,
DenseLayer,
FlattenLayer,
Init,
MaxPool2DLayer,
ReluLayer,
Sequential,
setupBackend,
SoftmaxLayer,
WASM,
} from "../../mod.ts";
import { loadDataset } from "./common.ts";

await setupBackend(WASM);

// training
const network = new Sequential({
size: [1, 1, 28, 28],
layers: [
Conv2DLayer({ kernelSize: [6, 1, 5, 5], padding: [2, 2] }),
ReluLayer(),
MaxPool2DLayer({ strides: [2, 2] }),
Conv2DLayer({ kernelSize: [16, 6, 5, 5] }),
ReluLayer(),
MaxPool2DLayer({ strides: [2, 2] }),
Conv2DLayer({ kernelSize: [120, 16, 5, 5] }),
ReluLayer(),
FlattenLayer({ size: [120] }),
DenseLayer({ size: [84], init: Init.Kaiming }),
ReluLayer(),
DenseLayer({ size: [10], init: Init.Kaiming }),
SoftmaxLayer(),
],
cost: Cost.CrossEntropy,
});

console.log("Loading training dataset...");
const trainSet = loadDataset("train-images.idx", "train-labels.idx", 0, 5000);

const epochs = 1;
console.log("Training (" + epochs + " epochs)...");
const start = performance.now();
network.train(trainSet, epochs, 32, 0.01);
console.log("Training complete!", performance.now() - start);
96 changes: 96 additions & 0 deletions examples/xor_auto.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
/**
* This example shows how to train a neural network to predict the output of the XOR function.
*/

import {
Cost,
AUTO,
DenseLayer,
Sequential,
setupBackend,
SigmoidLayer,
tensor1D,
tensor2D,
} from "../mod.ts";

/**
* Setup the AUTO backend. This backend is chosen automatically based on the environment.
*/
await setupBackend(AUTO);

/**
* Creates a sequential neural network.
*/
const net = new Sequential({
/**
* The number of minibatches is set to 4 and the output size is set to 2.
*/
size: [4, 2],

/**
* The silent option is set to true, which means that the network will not output any logs during trainin
*/
silent: true,

/**
* Defines the layers of a neural network in the XOR function example.
* The neural network has two input neurons and one output neuron.
* The layers are defined as follows:
* - A dense layer with 3 neurons.
* - sigmoid activation layer.
* - A dense layer with 1 neuron.
* -A sigmoid activation layer.
*/
layers: [
DenseLayer({ size: [3] }),
SigmoidLayer(),
DenseLayer({ size: [1] }),
SigmoidLayer(),
],

/**
* The cost function used for training the network is the mean squared error (MSE).
*/
cost: Cost.MSE,
});

const time = performance.now();

/**
* Train the network on the given data.
*/
net.train(
[
{
inputs: tensor2D([
[0, 0],
[1, 0],
[0, 1],
[1, 1],
]),
outputs: tensor2D([[0], [1], [1], [0]]),
},
],
/**
* The number of iterations is set to 10000.
*/
10000,
);

console.log(`training time: ${performance.now() - time}ms`);

/**
* Predict the output of the XOR function for the given inputs.
*/
const out1 = (await net.predict(tensor1D([0, 0]))).data;
console.log(`0 xor 0 = ${out1[0]} (should be close to 0)`);

const out2 = (await net.predict(tensor1D([1, 0]))).data;
console.log(`1 xor 0 = ${out2[0]} (should be close to 1)`);

const out3 = (await net.predict(tensor1D([0, 1]))).data;
console.log(`0 xor 1 = ${out3[0]} (should be close to 1)`);

const out4 = (await net.predict(tensor1D([1, 1]))).data;
console.log(`1 xor 1 = ${out4[0]} (should be close to 0)`);

Loading

0 comments on commit 2b7c5ca

Please sign in to comment.