Neurex

These examples are also available in this repository. Feel free to clone the repository and try out these examples for yourself!

Code

Result

                            
// NOTE: the results may vary
const {Neurex, CsvDataHandler, MinMaxScaler, Layers, split_dataset, Interpreter, RegressionMetrics} = require('neurex');

const csv = new CsvDataHandler();
const model = new Neurex();
const interpreter = new Interpreter();
const scaler = new MinMaxScaler();
const layer = new Layers();

const dataset = csv.read_csv('housing.csv');
// csv.tabularize(dataset); // to view

const adjusted_dataset = csv.removeColumns(["ZN", "CHAS"], dataset); // remove columns that might you don't need
// csv.tabularize(adjusted_dataset); // to view

const formatted_dataset = csv.rowsToInt(adjusted_dataset); // converts string cell values to numbers

const extractedColumn = csv.extractColumn("MEDV",formatted_dataset);
// csv.tabularize(formatted_dataset); // check the formatted_dataset, it mutated and the MEDV column is extracted. Try logging the extracted column

// =========== normalization ============= //
scaler.fit(formatted_dataset);
const features = scaler.transform(formatted_dataset); // normalizes the features to values between 0 to 1
scaler.fit(extractedColumn);
const target = scaler.transform(extractedColumn); // normalizes the target values between 0 to 1

// ============ splitting the dataset into training and test sets ============ //
const {X_train, Y_train, X_test, Y_test} = split_dataset(features, target, 0.2); // 0.2 is the test size, so the remaining 0.8 (80%) belongs to the training sets

// ============ building the network ============ //

model.configure({
    optimizer: 'adam', // we'll use Adam optimizer
    learning_rate: 0.001,

    // these are other configurable options
    randMin: -0.011, // minimum range for initializing weights and biases
    randMax: 0.11 // maximum range for initializing weights and biases
});

model.sequentialBuild([
    layer.inputShape({features: 11}),
    layer.connectedLayer("relu", 10),
    layer.connectedLayer("relu", 10),
    layer.connectedLayer("relu", 10),
    layer.connectedLayer("relu", 3),
    layer.connectedLayer("linear", 1),
]);
model.build();

model.train(X_train, Y_train, 'mse', 5000, 32); // train the network
model.saveModel('housing'); // will be save as housing.nrx
interpreter.loadSavedModel('housing.nrx');
const predictions = interpreter.predict(X_test);

RegressionMetrics(predictions, Y_test);
                            
                        

Code

Result

                            
// NOTE: the results may vary
const {Neurex, CsvDataHandler, MinMaxScaler, Interpreter, Layers, BinaryLabeling, split_dataset, ClassificationMetrics} = require('neurex');

const model = new Neurex();
const csv = new CsvDataHandler();
const scaler = new MinMaxScaler();
const interpreter = new Interpreter();
const layer = new Layers();

const dataset = csv.read_csv('ionosphere.csv');
const extracted_column = csv.extractColumn("y", dataset);
const formatted_dataset = csv.getRowElements(20, csv.rowsToInt(dataset));
const labels = BinaryLabeling(extracted_column);

const {X_train, Y_train, X_test, Y_test} = split_dataset(formatted_dataset, labels, 0.2);

scaler.fit(X_train);
const normalized_X_train = scaler.transform(X_train);
scaler.fit(X_test);
const normalized_X_test = scaler.transform(X_test);

model.configure({
    learning_rate: 0.0001,
    optimizer: 'adam',
    randMin: -0.001,
    randMax: 0.001
});

model.sequentialBuild([
    layer.inputShape({features: 20}),
    layer.connectedLayer("relu", 10),
    layer.connectedLayer("relu", 5),
    layer.connectedLayer("relu", 5),
    layer.connectedLayer("relu", 3),
    layer.connectedLayer("sigmoid", 1),
]);
model.build();

model.train(normalized_X_train, Y_train, "binary_cross_entropy",5000,12);
model.saveModel('ionosphere');
interpreter.loadSavedModel('ionosphere.nrx');
const predictions = interpreter.predict(normalized_X_test);
ClassificationMetrics(predictions, Y_test, 'binary', ["good", "bad"]);
                            
                        

Code

Result

                            
// NOTE: the results may vary
const {Neurex, CsvDataHandler, Interpreter, Layers, OneHotEncoded, split_dataset, ClassificationMetrics, IntegerLabeling} = require('neurex');

const model = new Neurex();
const csv = new CsvDataHandler();
const interpreter = new Interpreter();
const layer = new Layers();

const dataset = csv.read_csv('iris-dataset.csv');
const extract_column = csv.extractColumn('iris', dataset);
const features = csv.normalize('MinMax',csv.rowsToInt(dataset));

const labels = IntegerLabeling(extract_column);
const {X_train, Y_train, X_test, Y_test} = split_dataset(features, labels, 0.2);

model.configure({
    optimizer: 'adam',
    randMin: -0.1,
    randMax: 0.1,
    learning_rate: 0.0001
});

model.sequentialBuild([
    layer.inputShape({features: 4}),
    layer.connectedLayer("relu", 8),
    layer.connectedLayer("relu", 5),
    layer.connectedLayer("softmax", 3)
]);
model.build();

model.train(X_train, Y_train, "sparse_categorical_cross_entropy", 5000, 32);
model.saveModel('sparse_test3');
interpreter.loadSavedModel('sparse_test3.nrx');
const predictions = interpreter.predict(X_test);

ClassificationMetrics(predictions, Y_test, "sparse_categorical", ["Iris-setosa", "Iris-versicolor", "Iris-virginica"]);
                            
                        

Code

Result

                            
// NOTE: the results may vary
const { Neurex, Layers } = require("neurex");

const model = new Neurex();
const layer = new Layers();

model.sequentialBuild([
    layer.inputShape({ features: 2 }),
    layer.connectedLayer("relu", 4),
    layer.connectedLayer("sigmoid", 1)
]);

model.build();

model.configure({
    optimizer: 'adam',
    learning_rate: 0.01
});

model.modelSummary();


const trainX = [
    [0, 0],
    [0, 1],
    [1, 0],
    [1, 1]
];


const trainY = [
    [0],
    [1],
    [1],
    [0]
];

model.train(trainX, trainY, 'binary_cross_entropy', 1000, 2);

const predictions = model.predict(trainX);

console.log("XOR Predictions:");
trainX.forEach((input, i) => {
    // The sigmoid output is a probability. Convert it to a binary prediction (0 or 1).
    const predictedValue = predictions[i][0] > 0.5 ? 1 : 0;
    console.log(`Input: [${input}] -> Predicted: ${predictedValue} (Raw Output: ${predictions[i][0].toFixed(4)}) | Actual: ${trainY[i][0]}`);
});