ML5.js Samples

ML5.js machine learning examples for creative coding, image classification, pose detection, and neural networks

Key Facts

Category
Machine Learning
Items
4
Format Families
image, audio

Sample Overview

ML5.js machine learning examples for creative coding, image classification, pose detection, and neural networks This sample set belongs to Machine Learning and can be used to test related workflows inside Elysia Tools.

💻 Image Classification with ML5.js javascript

🟡 intermediate ⭐⭐⭐

Classify images using pre-trained MobileNet model

⏱️ 20 min 🏷️ ml5js, image classification, mobilenet
Prerequisites: Basic JavaScript, HTML5 Canvas
// Image Classification with ML5.js

// Import ML5.js
// In browser: <script src="https://unpkg.com/ml5@latest/dist/ml5.min.js"></script>
// HTML: <div id="videoContainer"></div><canvas id="canvas"></canvas>

let model;
let video;

// 1. Setup video capture
function setupVideo() {
    video = createCapture(VIDEO);
    video.size(640, 480);
    video.hide();
}

// 2. Load MobileNet model
async function loadModel() {
    console.log('Loading MobileNet model...');

    model = await ml5.imageClassifier('MobileNet', video, () => {
        console.log('Model loaded successfully!');
        classifyVideo();
    });
}

// 3. Classify video frame
function classifyVideo() {
    model.classify((error, results) => {
        if (error) {
            console.error(error);
            return;
        }

        // Display results
        displayResults(results);

        // Continue classifying
        classifyVideo();
    });
}

// 4. Display classification results
function displayResults(results) {
    console.log('Classification Results:');

    // Clear previous results
    const resultsDiv = document.getElementById('results');
    if (resultsDiv) {
        resultsDiv.innerHTML = '';

        results.forEach((result, index) => {
            const confidence = (result.confidence * 100).toFixed(2);
            const label = result.label;

            console.log(`${index + 1}. ${label}: ${confidence}%`);

            // Create HTML element for each result
            const resultElement = document.createElement('div');
            resultElement.className = 'classification-result';
            resultElement.innerHTML = `
                <span class="label">${label}</span>
                <div class="confidence-bar">
                    <div class="confidence-fill" style="width: ${confidence}%"></div>
                </div>
                <span class="confidence-text">${confidence}%</span>
            `;
            resultsDiv.appendChild(resultElement);
        });
    }
}

// 5. Alternative: Classify still image
function classifyImage(imageElement) {
    model.classify(imageElement, (error, results) => {
        if (error) {
            console.error(error);
            return;
        }

        console.log('Image Classification Results:');
        results.forEach((result, index) => {
            const confidence = (result.confidence * 100).toFixed(2);
            console.log(`${index + 1}. ${result.label}: ${confidence}%`);
        });
    });
}

// 6. Classify from canvas
function classifyCanvas() {
    const canvas = document.getElementById('canvas');
    const context = canvas.getContext('2d');

    // Draw video frame to canvas
    context.drawImage(video, 0, 0, canvas.width, canvas.height);

    // Classify canvas content
    model.classify(canvas, (error, results) => {
        if (error) {
            console.error(error);
            return;
        }

        console.log('Canvas Classification Results:', results);
    });
}

// 7. Custom classification with confidence threshold
function classifyWithThreshold(threshold = 0.5) {
    model.classify((error, results) => {
        if (error) {
            console.error(error);
            return;
        }

        const highConfidenceResults = results.filter(result => result.confidence > threshold);

        if (highConfidenceResults.length > 0) {
            console.log('High Confidence Results:');
            highConfidenceResults.forEach(result => {
                console.log(`${result.label}: ${(result.confidence * 100).toFixed(2)}%`);
            });
        } else {
            console.log('No results above confidence threshold');
        }
    });
}

// 8. Batch classification of multiple images
async function batchClassify(imageUrls) {
    console.log('Starting batch classification...');

    for (let i = 0; i < imageUrls.length; i++) {
        const imageUrl = imageUrls[i];
        console.log(`\nClassifying image ${i + 1}: ${imageUrl}`);

        try {
            const results = await new Promise((resolve, reject) => {
                const img = new Image();
                img.crossOrigin = 'anonymous';
                img.onload = () => {
                    model.classify(img, (error, results) => {
                        if (error) reject(error);
                        else resolve(results);
                    });
                };
                img.onerror = reject;
                img.src = imageUrl;
            });

            console.log('Results:');
            results.forEach((result, index) => {
                console.log(`  ${index + 1}. ${result.label}: ${(result.confidence * 100).toFixed(2)}%`);
            });

        } catch (error) {
            console.error(`Error classifying image ${i + 1}:`, error);
        }
    }
}

// 9. Classification with custom visualization
function setupVisualization() {
    function draw() {
        // Draw video
        image(video, 0, 0, width, height);

        // Get current classification
        if (model && frameCount % 30 === 0) { // Classify every 30 frames
            model.classify((error, results) => {
                if (!error && results.length > 0) {
                    // Draw top result on screen
                    fill(255, 255, 255);
                    stroke(0);
                    strokeWeight(3);
                    textSize(24);
                    textAlign(LEFT, TOP);

                    const topResult = results[0];
                    const confidence = (topResult.confidence * 100).toFixed(1);
                    const labelText = `${topResult.label} (${confidence}%)`;
                    text(labelText, 10, 10);
                }
            });
        }
    }
}

// 10. Export classification data
function exportClassificationData() {
    model.classify((error, results) => {
        if (error) {
            console.error(error);
            return;
        }

        const data = {
            timestamp: new Date().toISOString(),
            results: results.map(result => ({
                label: result.label,
                confidence: result.confidence,
                confidencePercent: (result.confidence * 100).toFixed(2)
            }))
        };

        // Download as JSON
        const dataStr = JSON.stringify(data, null, 2);
        const dataUri = 'data:application/json;charset=utf-8,'+ encodeURIComponent(dataStr);

        const exportFileDefaultName = `classification_${Date.now()}.json`;

        const linkElement = document.createElement('a');
        linkElement.setAttribute('href', dataUri);
        linkElement.setAttribute('download', exportFileDefaultName);
        linkElement.click();

        console.log('Classification data exported');
    });
}

// Complete initialization
async function initializeImageClassification() {
    console.log('Initializing Image Classification...');

    // Create HTML elements if they don't exist
    if (!document.getElementById('results')) {
        const resultsDiv = document.createElement('div');
        resultsDiv.id = 'results';
        resultsDiv.style.position = 'absolute';
        resultsDiv.style.top = '10px';
        resultsDiv.style.right = '10px';
        resultsDiv.style.background = 'rgba(0, 0, 0, 0.7)';
        resultsDiv.style.color = 'white';
        resultsDiv.style.padding = '10px';
        resultsDiv.style.borderRadius = '5px';
        resultsDiv.style.fontFamily = 'Arial, sans-serif';
        resultsDiv.style.minWidth = '250px';
        document.body.appendChild(resultsDiv);

        // Add styles for confidence bars
        const style = document.createElement('style');
        style.textContent = `
            .classification-result {
                margin: 5px 0;
                display: flex;
                align-items: center;
            }
            .label {
                flex: 1;
                margin-right: 10px;
            }
            .confidence-bar {
                flex: 2;
                height: 20px;
                background: #333;
                border-radius: 10px;
                overflow: hidden;
                margin-right: 10px;
            }
            .confidence-fill {
                height: 100%;
                background: linear-gradient(90deg, #ff6b6b, #4ecdc4);
                transition: width 0.3s ease;
            }
            .confidence-text {
                min-width: 45px;
                text-align: right;
            }
        `;
        document.head.appendChild(style);
    }

    setupVideo();
    await loadModel();
}

// Example usage
// initializeImageClassification();

// For static image classification:
// const img = document.getElementById('myImage');
// classifyImage(img);

// For batch classification:
// const imageUrls = ['image1.jpg', 'image2.jpg', 'image3.jpg'];
// batchClassify(imageUrls);

console.log('ML5.js Image Classification module loaded');
console.log('Call initializeImageClassification() to start');

💻 Pose Detection with ML5.js javascript

🟡 intermediate ⭐⭐⭐

Detect human body poses using PoseNet model

⏱️ 25 min 🏷️ ml5js, pose detection, posenet, body tracking
Prerequisites: Basic JavaScript, p5.js
// Pose Detection with ML5.js

// Import ML5.js
// In browser: <script src="https://unpkg.com/ml5@latest/dist/ml5.min.js"></script>
// Requires p5.js for visualizations

let poseNet;
let poses = [];
let video;
let skeletons = [];

// 1. Setup video capture
function setupPoseDetection() {
    video = createCapture(VIDEO);
    video.size(640, 480);
    video.hide();
}

// 2. Load PoseNet model
async function loadPoseNet() {
    console.log('Loading PoseNet model...');

    const options = {
        architecture: 'MobileNetV1',
        imageScaleFactor: 0.3,
        outputStride: 16,
        flipHorizontal: false,
        minConfidence: 0.5,
        maxPoseDetections: 5,
        scoreThreshold: 0.5,
        nmsRadius: 20,
        detectionType: 'single',
        multiplier: 0.75
    };

    poseNet = ml5.poseNet(video, options, () => {
        console.log('PoseNet model loaded!');
    });

    // Listen for new poses
    poseNet.on('pose', (results) => {
        poses = results;
    });
}

// 3. Draw poses
function drawPoses() {
    // Draw video
    image(video, 0, 0, width, height);

    // Loop through all the poses detected
    for (let i = 0; i < poses.length; i++) {
        let pose = poses[i].pose;

        // Draw skeleton
        drawSkeleton(poses[i].skeleton);

        // Draw keypoints
        for (let j = 0; j < pose.keypoints.length; j++) {
            let keypoint = pose.keypoints[j];

            // Only draw a circle if the keypoint's confidence is high enough
            if (keypoint.score > 0.2) {
                fill(255, 0, 0);
                noStroke();
                ellipse(keypoint.position.x, keypoint.position.y, 20, 20);

                // Draw keypoint label
                fill(255);
                textAlign(CENTER, CENTER);
                textSize(10);
                text(getKeypointName(j), keypoint.position.x, keypoint.position.y - 15);
            }
        }
    }
}

// 4. Draw skeleton connections
function drawSkeleton(skeleton) {
    stroke(0, 255, 0);
    strokeWeight(5);

    for (let i = 0; i < skeleton.length; i++) {
        let partA = skeleton[i][0];
        let partB = skeleton[i][1];

        line(partA.position.x, partA.position.y, partB.position.x, partB.position.y);
    }
}

// 5. Get keypoint name by index
function getKeypointName(index) {
    const keypointNames = [
        'nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar',
        'leftShoulder', 'rightShoulder', 'leftElbow', 'rightElbow',
        'leftWrist', 'rightWrist', 'leftHip', 'rightHip',
        'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle'
    ];
    return keypointNames[index] || 'unknown';
}

// 6. Calculate angle between three points
function calculateAngle(a, b, c) {
    let radians = Math.atan2(c.y - b.y, c.x - b.x) - Math.atan2(a.y - b.y, a.x - b.x);
    let angle = Math.abs(radians * 180.0 / Math.PI);
    if (angle > 180.0) {
        angle = 360 - angle;
    }
    return angle;
}

// 7. Get specific body angles
function getBodyAngles(pose) {
    const keypoints = pose.keypoints;

    // Find keypoint positions
    const getKeyPoint = (name) => {
        const names = ['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar',
                      'leftShoulder', 'rightShoulder', 'leftElbow', 'rightElbow',
                      'leftWrist', 'rightWrist', 'leftHip', 'rightHip',
                      'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle'];
        const index = names.indexOf(name);
        return index >= 0 && keypoints[index].score > 0.2 ? keypoints[index].position : null;
    };

    const leftShoulder = getKeyPoint('leftShoulder');
    const leftElbow = getKeyPoint('leftElbow');
    const leftWrist = getKeyPoint('leftWrist');
    const rightShoulder = getKeyPoint('rightShoulder');
    const rightElbow = getKeyPoint('rightElbow');
    const rightWrist = getKeyPoint('rightWrist');
    const leftHip = getKeyPoint('leftHip');
    const rightHip = getKeyPoint('rightHip');
    const leftKnee = getKeyPoint('leftKnee');
    const rightKnee = getKeyPoint('rightKnee');

    const angles = {};

    // Calculate arm angles
    if (leftShoulder && leftElbow && leftWrist) {
        angles.leftArm = calculateAngle(leftShoulder, leftElbow, leftWrist);
    }
    if (rightShoulder && rightElbow && rightWrist) {
        angles.rightArm = calculateAngle(rightShoulder, rightElbow, rightWrist);
    }

    // Calculate leg angles
    if (leftHip && leftKnee && leftWrist) {
        angles.leftLeg = calculateAngle(leftHip, leftKnee, leftWrist);
    }
    if (rightHip && rightKnee && rightWrist) {
        angles.rightLeg = calculateAngle(rightHip, rightKnee, rightWrist);
    }

    return angles;
}

// 8. Detect specific poses or gestures
function detectPose(pose) {
    const angles = getBodyAngles(pose);
    const keypoints = pose.keypoints;

    // Get keypoint positions
    const leftShoulder = keypoints.find(kp => kp.part === 'leftShoulder');
    const rightShoulder = keypoints.find(kp => kp.part === 'rightShoulder');
    const leftWrist = keypoints.find(kp => kp.part === 'leftWrist');
    const rightWrist = keypoints.find(kp => kp.part === 'rightWrist');

    const detectedPoses = [];

    // Detect "T-pose" (arms horizontal)
    if (leftShoulder && rightShoulder && leftWrist && rightWrist) {
        const shoulderY = (leftShoulder.position.y + rightShoulder.position.y) / 2;
        const wristY = (leftWrist.position.y + rightWrist.position.y) / 2;

        if (Math.abs(shoulderY - wristY) < 50) {
            detectedPoses.push('T-Pose');
        }
    }

    // Detect "hands up"
    if (leftShoulder && leftWrist && rightShoulder && rightWrist) {
        if (leftWrist.position.y < leftShoulder.position.y &&
            rightWrist.position.y < rightShoulder.position.y) {
            detectedPoses.push('Hands Up');
        }
    }

    // Detect "arms crossed" (approximate)
    if (leftWrist && rightWrist) {
        if (leftWrist.position.x > rightWrist.position.x) {
            detectedPoses.push('Arms Crossed');
        }
    }

    // Check specific angles for exercise detection
    if (angles.leftArm !== undefined) {
        if (angles.leftArm > 160) {
            detectedPoses.push('Left Arm Extended');
        }
        if (angles.leftArm < 90 && angles.leftArm > 70) {
            detectedPoses.push('Left Arm 90 Degrees');
        }
    }

    return detectedPoses;
}

// 9. Track pose over time
class PoseTracker {
    constructor() {
        this.poseHistory = [];
        this.maxHistory = 30; // Track last 30 frames
    }

    update(poses) {
        if (poses && poses.length > 0) {
            const pose = poses[0];
            const poseData = {
                timestamp: Date.now(),
                keypoints: pose.keypoints,
                angles: getBodyAngles(pose),
                detectedPoses: detectPose(pose)
            };

            this.poseHistory.push(poseData);

            if (this.poseHistory.length > this.maxHistory) {
                this.poseHistory.shift();
            }
        }
    }

    getCurrentPose() {
        return this.poseHistory.length > 0 ? this.poseHistory[this.poseHistory.length - 1] : null;
    }

    getRecentPoses(frames = 10) {
        return this.poseHistory.slice(-frames);
    }

    detectExercise() {
        const recentPoses = this.getRecentPoses(20);
        if (recentPoses.length < 10) return null;

        // Simple exercise detection: arm curl
        const armAngles = recentPoses.map(p => p.angles.leftArm).filter(a => a !== undefined);
        if (armAngles.length > 0) {
            const minAngle = Math.min(...armAngles);
            const maxAngle = Math.max(...armAngles);

            if (maxAngle - minAngle > 90) {
                return 'Arm Curl';
            }
        }

        return null;
    }
}

// 10. Pose visualization with exercise tracking
const poseTracker = new PoseTracker();

function drawPoseVisualization() {
    // Draw video and poses
    drawPoses();

    // Update pose tracker
    poseTracker.update(poses);

    // Display detected poses
    if (poses.length > 0) {
        const currentPose = poseTracker.getCurrentPose();
        if (currentPose && currentPose.detectedPoses.length > 0) {
            fill(255, 255, 0);
            noStroke();
            textSize(24);
            textAlign(LEFT, TOP);
            text(`Detected: ${currentPose.detectedPoses.join(', ')}`, 10, 30);
        }

        // Display exercise
        const exercise = poseTracker.detectExercise();
        if (exercise) {
            fill(0, 255, 0);
            textSize(32);
            text(`Exercise: ${exercise}`, 10, 70);
        }

        // Display body angles
        if (currentPose && currentPose.angles) {
            fill(255);
            textSize(16);
            let y = 110;

            if (currentPose.angles.leftArm !== undefined) {
                text(`Left Arm: ${currentPose.angles.leftArm.toFixed(1)}°`, 10, y);
                y += 20;
            }
            if (currentPose.angles.rightArm !== undefined) {
                text(`Right Arm: ${currentPose.angles.rightArm.toFixed(1)}°`, 10, y);
                y += 20;
            }
        }
    }
}

// Initialize pose detection
async function initializePoseDetection() {
    console.log('Initializing Pose Detection...');
    setupPoseDetection();
    await loadPoseNet();
}

// Export functions
if (typeof module !== 'undefined' && module.exports) {
    module.exports = {
        initializePoseDetection,
        PoseTracker,
        calculateAngle,
        getBodyAngles,
        detectPose
    };
}

console.log('ML5.js Pose Detection module loaded');
console.log('Call initializePoseDetection() to start');

// Example p5.js sketch structure:
/*
function setup() {
    createCanvas(640, 480);
    initializePoseDetection();
}

function draw() {
    drawPoseVisualization();
}
*/

💻 Neural Network Training with ML5.js javascript

🔴 complex ⭐⭐⭐⭐

Create and train custom neural networks for various tasks

⏱️ 30 min 🏷️ ml5js, neural network, training, deep learning
Prerequisites: Advanced JavaScript, Neural network basics
// Neural Network Training with ML5.js

// Import ML5.js
// In browser: <script src="https://unpkg.com/ml5@latest/dist/ml5.min.js"></script>

let nn;
let trainingData = [];
let isTraining = false;

// 1. Create a neural network for regression
function createRegressionNetwork() {
    console.log('Creating regression neural network...');

    const options = {
        inputs: 1,
        outputs: 1,
        task: 'regression',
        debug: true,
        learningRate: 0.05
    };

    nn = ml5.neuralNetwork(options);

    // Add training data (simple y = 2x + 1 relationship)
    for (let i = 0; i < 100; i++) {
        const x = Math.random() * 10;
        const y = 2 * x + 1 + (Math.random() - 0.5) * 0.5; // Add some noise
        trainingData.push({ input: [x], output: [y] });
    }

    nn.addData(trainingData);

    return nn;
}

// 2. Create a neural network for classification
function createClassificationNetwork() {
    console.log('Creating classification neural network...');

    const options = {
        inputs: 2,
        outputs: 3,
        task: 'classification',
        debug: true,
        learningRate: 0.1
    };

    nn = ml5.neuralNetwork(options);

    // Generate classification data (3 clusters)
    for (let i = 0; i < 150; i++) {
        const cluster = Math.floor(Math.random() * 3);
        let x, y, output;

        switch(cluster) {
            case 0:
                x = Math.random() * 3 + 1;
                y = Math.random() * 3 + 1;
                output = [1, 0, 0]; // Class 0
                break;
            case 1:
                x = Math.random() * 3 + 5;
                y = Math.random() * 3 + 5;
                output = [0, 1, 0]; // Class 1
                break;
            case 2:
                x = Math.random() * 3 + 3;
                y = Math.random() * 3 + 7;
                output = [0, 0, 1]; // Class 2
                break;
        }

        trainingData.push({ input: [x, y], output: output });
    }

    nn.addData(trainingData);

    return nn;
}

// 3. Train the neural network
function trainNetwork(callback) {
    console.log('Starting training...');
    isTraining = true;

    const trainingOptions = {
        epochs: 50,
        batchSize: 16,
        validationSplit: 0.1,
        shuffle: true
    };

    nn.train(trainingOptions, (epoch, loss) => {
        console.log(`Epoch ${epoch}: Loss = ${loss.toFixed(4)}`);
    }, () => {
        console.log('Training complete!');
        isTraining = false;
        if (callback) callback();
    });
}

// 4. Make predictions
function predict(input) {
    if (isTraining) {
        console.log('Network is still training...');
        return null;
    }

    nn.classify(input, (error, results) => {
        if (error) {
            console.error(error);
            return;
        }

        console.log('Prediction results:');
        results.forEach((result, i) => {
            console.log(`Class ${i}: ${result.label} (confidence: ${(result.confidence * 100).toFixed(2)}%)`);
        });
    });
}

// 5. Save and load model
function saveModel() {
    nn.save('my-model');
    console.log('Model saved!');
}

function loadModel(path, callback) {
    const options = {
        inputs: 2,
        outputs: 3,
        task: 'classification'
    };

    nn = ml5.neuralNetwork(options);
    nn.load(path, () => {
        console.log('Model loaded successfully!');
        if (callback) callback();
    });
}

// 6. Custom data generator
function generateCustomData() {
    // Generate data for XOR problem
    const xorData = [
        { input: [0, 0], output: [0] },
        { input: [0, 1], output: [1] },
        { input: [1, 0], output: [1] },
        { input: [1, 1], output: [0] }
    ];

    // Add noise and generate more samples
    for (let i = 0; i < 100; i++) {
        xorData.forEach(sample => {
            const noisyInput = sample.input.map(x => x + (Math.random() - 0.5) * 0.1);
            const noisyOutput = sample.output.map(y => y + (Math.random() - 0.5) * 0.1);
            xorData.push({ input: noisyInput, output: noisyOutput });
        });
    }

    return xorData;
}

// 7. Advanced neural network for image classification
function createImageClassificationNetwork() {
    console.log('Creating image classification network...');

    const options = {
        inputs: 64, // 8x8 grayscale images
        outputs: 2,  // 2 classes
        task: 'classification',
        debug: true,
        learningRate: 0.01,
        hiddenUnits: [32, 16] // Two hidden layers
    };

    nn = ml5.neuralNetwork(options);

    // Generate simple image data
    for (let i = 0; i < 200; i++) {
        const isClass1 = Math.random() > 0.5;
        const imageData = generateImagePattern(isClass1);
        const output = isClass1 ? [1, 0] : [0, 1];

        nn.addData({ input: imageData, output: output });
    }

    return nn;
}

// 8. Generate image patterns
function generateImagePattern(isVertical) {
    const size = 8;
    const imageData = [];

    for (let i = 0; i < size * size; i++) {
        const x = i % size;
        const y = Math.floor(i / size);

        let value = 0;

        if (isVertical) {
            // Vertical line pattern
            value = (x > 2 && x < 5) ? 1 : 0;
        } else {
            // Horizontal line pattern
            value = (y > 2 && y < 5) ? 1 : 0;
        }

        // Add some noise
        value += (Math.random() - 0.5) * 0.2;
        imageData.push(Math.max(0, Math.min(1, value)));
    }

    return imageData;
}

// 9. Real-time data collection
class DataCollector {
    constructor() {
        this.data = [];
        this.currentInput = [];
        this.isCollecting = false;
        this.currentLabel = 0;
    }

    start(label) {
        this.currentLabel = label;
        this.isCollecting = true;
        this.currentInput = [];
        console.log(`Collecting data for label ${label}...`);
    }

    addDataPoint(point) {
        if (this.isCollecting) {
            this.currentInput.push(point);
        }
    }

    stop() {
        if (this.isCollecting && this.currentInput.length > 0) {
            this.data.push({
                input: this.currentInput,
                output: [this.currentLabel]
            });
            console.log(`Added ${this.currentInput.length} data points for label ${this.currentLabel}`);
        }
        this.isCollecting = false;
        this.currentInput = [];
    }

    getData() {
        return this.data;
    }

    clear() {
        this.data = [];
        console.log('Data cleared');
    }
}

// 10. Gesture recognition network
class GestureRecognizer {
    constructor() {
        this.nn = null;
        this.dataCollector = new DataCollector();
        this.gestures = ['wave', 'swipe_left', 'swipe_right', 'circle'];
        this.currentGesture = 0;
        this.isTraining = false;
    }

    initialize() {
        const options = {
            inputs: 30, // 10 frames x 3 coordinates (x, y, z)
            outputs: this.gestures.length,
            task: 'classification',
            debug: true
        };

        this.nn = ml5.neuralNetwork(options);
        this.loadTrainingData();
    }

    startCollecting(gestureIndex) {
        this.currentGesture = gestureIndex;
        this.dataCollector.start(gestureIndex);
        console.log(`Collecting gesture: ${this.gestures[gestureIndex]}`);
    }

    addHandPosition(x, y, z = 0) {
        this.dataCollector.addDataPoint([x, y, z]);
    }

    stopCollecting() {
        this.dataCollector.stop();
        const newData = this.dataCollector.getData();
        newData.forEach(sample => {
            this.nn.addData(sample);
        });
    }

    train() {
        if (this.isTraining) return;

        this.isTraining = true;
        this.nn.normalizeData();

        const options = {
            epochs: 100,
            batchSize: 16
        };

        this.nn.train(options, (epoch, loss) => {
            console.log(`Training - Epoch ${epoch}: Loss = ${loss.toFixed(4)}`);
        }, () => {
            this.isTraining = false;
            console.log('Gesture model trained!');
        });
    }

    recognize(points) {
        if (this.isTraining) return null;

        this.nn.classify(points, (error, results) => {
            if (error) {
                console.error(error);
                return;
            }

            const topResult = results[0];
            const confidence = (topResult.confidence * 100).toFixed(2);
            console.log(`Gesture: ${topResult.label} (${confidence}% confidence)`);

            return topResult;
        });
    }

    loadTrainingData() {
        // Load pre-existing training data if available
        // This would typically load from a file or API
        console.log('Loading existing training data...');
    }

    saveModel() {
        this.nn.save('gesture-model');
        console.log('Gesture model saved!');
    }
}

// Initialize and train example
async function runNeuralNetworkExample() {
    console.log('=== Neural Network Example ===');

    // Create a classification network
    const network = createClassificationNetwork();

    // Train the network
    trainNetwork(() => {
        console.log('Making predictions...');

        // Test predictions
        predict([2, 2]);  // Should be class 0
        predict([6, 6]);  // Should be class 1
        predict([4, 8]);  // Should be class 2
    });
}

// Initialize gesture recognizer
const gestureRecognizer = new GestureRecognizer();

// Export classes and functions
if (typeof module !== 'undefined' && module.exports) {
    module.exports = {
        createRegressionNetwork,
        createClassificationNetwork,
        DataCollector,
        GestureRecognizer,
        runNeuralNetworkExample
    };
}

console.log('ML5.js Neural Network module loaded');
console.log('Available functions:');
console.log('- createRegressionNetwork()');
console.log('- createClassificationNetwork()');
console.log('- runNeuralNetworkExample()');
console.log('- GestureRecognizer class for gesture recognition');

💻 Sound Classification with ML5.js javascript

🔴 complex ⭐⭐⭐⭐

Classify sounds and audio using machine learning

⏱️ 25 min 🏷️ ml5js, sound classification, audio, speech
Prerequisites: Advanced JavaScript, Web Audio API
// Sound Classification with ML5.js

// Import ML5.js
// In browser: <script src="https://unpkg.com/ml5@latest/dist/ml5.min.js"></script>
// Also requires p5.js and p5.sound library

let soundClassifier;
let audioContext;
let mic;
let modelReady = false;

// 1. Setup audio context and microphone
function setupAudio() {
    audioContext = new (window.AudioContext || window.webkitAudioContext)();

    // Create audio input
    mic = new p5.AudioIn();
    mic.start();

    console.log('Audio setup complete');
}

// 2. Load pre-trained sound classification model
async function loadSoundClassifier() {
    console.log('Loading sound classifier...');

    // Options for the classifier
    const options = {
        probabilityThreshold: 0.75,
        overlapFactor: 0.5
    };

    soundClassifier = ml5.soundClassifier('SpeechCommands18w', options, () => {
        console.log('Sound classifier loaded!');
        modelReady = true;

        // Start classifying
        startClassification();
    });
}

// 3. Start continuous classification
function startClassification() {
    if (!modelReady) {
        console.log('Model not ready yet...');
        return;
    }

    // Get audio classification results
    soundClassifier.classify(gotResult);
}

// 4. Handle classification results
function gotResult(error, results) {
    if (error) {
        console.error(error);
        return;
    }

    // Display results
    if (results && results.length > 0) {
        console.log('Sound Classification Results:');
        results.forEach((result, i) => {
            console.log(`${i + 1}. ${result.label}: ${(result.confidence * 100).toFixed(2)}%`);
        });

        // Visual feedback
        displaySoundResults(results);

        // Trigger actions based on sound
        handleSoundCommand(results[0]);
    }
}

// 5. Display sound classification results
function displaySoundResults(results) {
    const resultsDiv = document.getElementById('soundResults');
    if (resultsDiv) {
        resultsDiv.innerHTML = '';

        results.forEach(result => {
            const confidence = (result.confidence * 100).toFixed(1);
            const resultElement = document.createElement('div');
            resultElement.style.margin = '5px 0';
            resultElement.innerHTML = `
                <span style="display: inline-block; width: 150px;">${result.label}:</span>
                <div style="display: inline-block; width: 200px; height: 20px; background: #ddd; border-radius: 10px; overflow: hidden; position: relative;">
                    <div style="width: ${confidence}%; height: 100%; background: linear-gradient(90deg, #4ecdc4, #44a3a0);"></div>
                </div>
                <span style="margin-left: 10px;">${confidence}%</span>
            `;
            resultsDiv.appendChild(resultElement);
        });
    }
}

// 6. Handle voice commands
function handleSoundCommand(result) {
    const command = result.label.toLowerCase();
    const confidence = result.confidence;

    if (confidence < 0.7) return; // Only act on high confidence

    console.log(`Detected command: ${command} (${(confidence * 100).toFixed(1)}%)`);

    switch(command) {
        case 'yes':
            console.log('✓ Yes detected');
            document.body.style.backgroundColor = '#90EE90';
            break;

        case 'no':
            console.log('✗ No detected');
            document.body.style.backgroundColor = '#FFB6C1';
            break;

        case 'up':
            console.log('↑ Up command');
            window.scrollBy(0, -100);
            break;

        case 'down':
            console.log('↓ Down command');
            window.scrollBy(0, 100);
            break;

        case 'left':
            console.log('← Left command');
            window.scrollBy(-100, 0);
            break;

        case 'right':
            console.log('→ Right command');
            window.scrollBy(100, 0);
            break;

        case 'stop':
            console.log('⏹ Stop command');
            // Pause any ongoing action
            break;

        case 'go':
            console.log('▶ Go command');
            // Resume or start action
            break;

        case 'on':
            console.log('💡 On command');
            document.body.style.filter = 'brightness(1.2)';
            break;

        case 'off':
            console.log('🌙 Off command');
            document.body.style.filter = 'brightness(0.8)';
            break;

        case 'zero':
            console.log('0 Reset command');
            document.body.style.backgroundColor = '';
            document.body.style.filter = '';
            break;
    }
}

// 7. Custom sound classification with training data
class CustomSoundClassifier {
    constructor() {
        this.featureExtractor = ml5.featureExtractor('MobileNet');
        this.classifier = this.featureExtractor.classification();
        this.audioBuffer = [];
        this.isRecording = false;
        this.isTraining = false;
        this.labels = ['silence', 'clap', 'whistle', 'snap'];
    }

    async initialize() {
        console.log('Initializing custom sound classifier...');

        // Setup audio capture
        this.stream = await navigator.mediaDevices.getUserMedia({ audio: true });
        this.audioContext = new AudioContext();
        this.source = this.audioContext.createMediaStreamSource(this.stream);
        this.analyser = this.audioContext.createAnalyser();

        this.source.connect(this.analyser);
        this.analyser.fftSize = 2048;

        console.log('Custom classifier initialized');
    }

    startRecording(label) {
        this.currentLabel = label;
        this.isRecording = true;
        this.audioBuffer = [];

        console.log(`Recording for label: ${label}`);

        this.captureAudioData();
    }

    captureAudioData() {
        if (!this.isRecording) return;

        const bufferLength = this.analyser.frequencyBinCount;
        const dataArray = new Uint8Array(bufferLength);
        this.analyser.getByteFrequencyData(dataArray);

        // Convert to normalized features
        const features = Array.from(dataArray).map(x => x / 255);
        this.audioBuffer.push(...features);

        requestAnimationFrame(() => this.captureAudioData());
    }

    stopRecording() {
        this.isRecording = false;
        console.log(`Stopped recording. Captured ${this.audioBuffer.length} features`);

        // Add to training data
        this.classifier.addExample(this.audioBuffer, this.currentLabel);
    }

    async train() {
        if (this.isTraining) return;

        this.isTraining = true;
        console.log('Training custom sound classifier...');

        const options = {
            epochs: 50,
            batchSize: 32
        };

        this.classifier.train(options, (epoch, loss) => {
            console.log(`Training - Epoch ${epoch}: Loss = ${loss.toFixed(4)}`);
        }, () => {
            this.isTraining = false;
            console.log('Training complete!');
            this.startPrediction();
        });
    }

    startPrediction() {
        if (this.isTraining) return;

        this.isPredicting = true;
        this.predictContinuously();
    }

    predictContinuously() {
        if (!this.isPredicting) return;

        const bufferLength = this.analyser.frequencyBinCount;
        const dataArray = new Uint8Array(bufferLength);
        this.analyser.getByteFrequencyData(dataArray);

        const features = Array.from(dataArray).map(x => x / 255);

        this.classifier.classify(features, (error, results) => {
            if (!error && results.length > 0) {
                const topResult = results[0];
                if (topResult.confidence > 0.7) {
                    console.log(`Detected: ${topResult.label} (${(topResult.confidence * 100).toFixed(1)}%)`);
                    this.handleCustomSound(topResult);
                }
            }
        });

        requestAnimationFrame(() => this.predictContinuously());
    }

    handleCustomSound(result) {
        // Custom actions for different sounds
        switch(result.label) {
            case 'clap':
                console.log('👏 Clap detected!');
                document.body.style.animation = 'pulse 0.5s';
                setTimeout(() => {
                    document.body.style.animation = '';
                }, 500);
                break;

            case 'whistle':
                console.log('🎵 Whistle detected!');
                document.body.style.color = '#FF6B6B';
                setTimeout(() => {
                    document.body.style.color = '';
                }, 1000);
                break;

            case 'snap':
                console.log('🤏 Snap detected!');
                document.body.style.transform = 'scale(1.05)';
                setTimeout(() => {
                    document.body.style.transform = '';
                }, 200);
                break;
        }
    }

    saveModel() {
        this.classifier.save('custom-sound-model');
        console.log('Custom sound model saved!');
    }

    async loadModel(path) {
        await this.classifier.load(path);
        console.log('Custom sound model loaded!');
        this.startPrediction();
    }
}

// 8. Audio visualization
function visualizeAudio() {
    if (!mic || !modelReady) return;

    push();
    translate(width/2, height/2);

    // Get audio level
    const level = mic.getLevel();
    const diameter = map(level, 0, 1, 50, 300);

    // Draw visual representation
    noFill();
    stroke(255, 100);
    strokeWeight(2);

    for (let i = 0; i < 10; i++) {
        const offset = i * 10;
        ellipse(0, 0, diameter + offset, diameter + offset);
    }

    // Center circle reacts to audio
    fill(255, 150);
    noStroke();
    ellipse(0, 0, level * 200, level * 200);

    pop();
}

// 9. Volume-based interaction
function volumeBasedControl() {
    if (!mic || !modelReady) return;

    const level = mic.getLevel();
    const threshold = 0.1;

    if (level > threshold) {
        // Loud sound detected
        console.log(`Loud sound detected: ${(level * 100).toFixed(1)}%`);

        // Trigger action based on volume level
        if (level > 0.3) {
            // Very loud - dramatic action
            document.body.style.transition = 'transform 0.1s';
            document.body.style.transform = 'scale(1.1)';
        } else if (level > 0.2) {
            // Moderate - subtle action
            document.body.style.transition = 'background-color 0.3s';
            document.body.style.backgroundColor = '#E8F4FF';
        }
    } else {
        // Reset to normal
        document.body.style.transform = '';
        document.body.style.backgroundColor = '';
    }
}

// 10. Initialize everything
async function initializeSoundClassification() {
    console.log('Initializing Sound Classification...');

    // Create HTML elements for results
    if (!document.getElementById('soundResults')) {
        const resultsDiv = document.createElement('div');
        resultsDiv.id = 'soundResults';
        resultsDiv.style.position = 'fixed';
        resultsDiv.style.top = '10px';
        resultsDiv.style.right = '10px';
        resultsDiv.style.background = 'rgba(0, 0, 0, 0.8)';
        resultsDiv.style.color = 'white';
        resultsDiv.style.padding = '15px';
        resultsDiv.style.borderRadius = '5px';
        resultsDiv.style.fontFamily = 'monospace';
        resultsDiv.style.minWidth = '300px';
        document.body.appendChild(resultsDiv);

        // Add title
        const title = document.createElement('h3');
        title.textContent = 'Sound Classification';
        title.style.margin = '0 0 10px 0';
        title.style.fontSize = '16px';
        resultsDiv.insertBefore(title, resultsDiv.firstChild);
    }

    // Add pulse animation for claps
    const style = document.createElement('style');
    style.textContent = `
        @keyframes pulse {
            0% { transform: scale(1); }
            50% { transform: scale(1.05); }
            100% { transform: scale(1); }
        }
    `;
    document.head.appendChild(style);

    setupAudio();
    await loadSoundClassifier();
}

// Create custom classifier instance
const customSoundClassifier = new CustomSoundClassifier();

// Export classes and functions
if (typeof module !== 'undefined' && module.exports) {
    module.exports = {
        initializeSoundClassification,
        CustomSoundClassifier,
        customSoundClassifier
    };
}

console.log('ML5.js Sound Classification module loaded');
console.log('Available functions:');
console.log('- initializeSoundClassification()');
console.log('- CustomSoundClassifier class for custom training');

// Usage examples:
/*
// Initialize sound classification
initializeSoundClassification();

// Train custom classifier
customSoundClassifier.initialize().then(() => {
    customSoundClassifier.startRecording('clap');
    // Record clap sound...
    customSoundClassifier.stopRecording();

    customSoundClassifier.startRecording('whistle');
    // Record whistle...
    customSoundClassifier.stopRecording();

    // Train and start predicting
    customSoundClassifier.train();
});
*/