🎯 Exemples recommandés
Balanced sample collections from various categories for you to explore
Exemples de Vision par Ordinateur OpenCV
Exemples OpenCV.js pour le traitement d'image, la vision par ordinateur et l'analyse vidéo en temps réel en JavaScript
💻 Opérations de Base OpenCV.js javascript
🟢 simple
⭐⭐
Premiers pas avec OpenCV.js pour le traitement d'image et les tâches de base de vision par ordinateur
⏱️ 30 min
🏷️ opencv, image processing, computer vision
Prerequisites:
Basic JavaScript, HTML Canvas, OpenCV concepts
// OpenCV.js Basic Operations
// 1. Load OpenCV.js
// In HTML: <script async src="https://docs.opencv.org/4.5.0/opencv.js" onload="onOpenCvReady();"></script>
let cv;
function onOpenCvReady() {
cv = window.cv;
console.log('OpenCV.js is ready');
runBasicExamples();
}
// 2. Load and Display Image
function loadImageAndDisplay(imageElementId, canvasId) {
const imgElement = document.getElementById(imageElementId);
const canvasElement = document.getElementById(canvasId);
const src = cv.imread(imgElement);
cv.imshow(canvasId, src);
src.delete();
}
// 3. Basic Image Operations
function basicImageOperations() {
console.log('=== Basic Image Operations ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Create destination mat
const dst = new cv.Mat();
// Convert to grayscale
cv.cvtColor(src, dst, cv.COLOR_RGBA2GRAY, 0);
cv.imshow('grayCanvas', dst);
// Apply Gaussian blur
const blurred = new cv.Mat();
const ksize = new cv.Size(5, 5);
cv.GaussianBlur(dst, blurred, ksize, 0, 0, cv.BORDER_DEFAULT);
cv.imshow('blurCanvas', blurred);
// Edge detection with Canny
const edges = new cv.Mat();
cv.Canny(dst, edges, 50, 150, 3, false);
cv.imshow('edgesCanvas', edges);
// Clean up
src.delete();
dst.delete();
blurred.delete();
edges.delete();
}
// 4. Image Filtering and Enhancement
function imageFiltering() {
console.log('\n=== Image Filtering ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Histogram Equalization
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
const equalized = new cv.Mat();
cv.equalizeHist(gray, equalized);
cv.imshow('equalizedCanvas', equalized);
// Brightness and Contrast Adjustment
const adjusted = new cv.Mat();
src.convertTo(adjusted, -1, 1.2, 30); // alpha=1.2 (contrast), beta=30 (brightness)
cv.imshow('adjustedCanvas', adjusted);
// Sepia Tone Effect
const sepia = new cv.Mat();
const kernel = cv.matFromArray(3, 3, cv.CV_32FC1, [
0.272, 0.534, 0.131,
0.349, 0.686, 0.168,
0.393, 0.769, 0.189
]);
cv.transform(src, sepia, kernel);
cv.imshow('sepiaCanvas', sepia);
// Clean up
src.delete();
gray.delete();
equalized.delete();
adjusted.delete();
sepia.delete();
kernel.delete();
}
// 5. Geometric Transformations
function geometricTransformations() {
console.log('\n=== Geometric Transformations ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Rotation
const rotated = new cv.Mat();
const center = new cv.Point(src.cols / 2, src.rows / 2);
const rotationMatrix = cv.getRotationMatrix2D(center, 45, 1); // 45 degrees
cv.warpAffine(src, rotated, rotationMatrix, src.size(), cv.INTER_LINEAR, cv.BORDER_CONSTANT, new cv.Scalar());
cv.imshow('rotatedCanvas', rotated);
// Scaling
const scaled = new cv.Mat();
const scaleFactor = 0.5;
const newSize = new cv.Size(src.cols * scaleFactor, src.rows * scaleFactor);
cv.resize(src, scaled, newSize, 0, 0, cv.INTER_AREA);
cv.imshow('scaledCanvas', scaled);
// Translation (shifting)
const translated = new cv.Mat();
const translationMatrix = cv.matFromArray(2, 3, cv.CV_64FC1, [
1, 0, 50, // shift right by 50
0, 1, 30 // shift down by 30
]);
cv.warpAffine(src, translated, translationMatrix, src.size());
cv.imshow('translatedCanvas', translated);
// Flipping
const flipped = new cv.Mat();
cv.flip(src, flipped, 1); // 0 = vertical, 1 = horizontal, -1 = both
cv.imshow('flippedCanvas', flipped);
// Clean up
src.delete();
rotated.delete();
scaled.delete();
translated.delete();
flipped.delete();
rotationMatrix.delete();
translationMatrix.delete();
}
// 6. Morphological Operations
function morphologicalOperations() {
console.log('\n=== Morphological Operations ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Binary threshold
const binary = new cv.Mat();
cv.threshold(gray, binary, 127, 255, cv.THRESH_BINARY);
// Erosion
const eroded = new cv.Mat();
const kernel = cv.getStructuringElement(cv.MORPH_RECT, new cv.Size(3, 3));
cv.erode(binary, eroded, kernel);
cv.imshow('erodedCanvas', eroded);
// Dilation
const dilated = new cv.Mat();
cv.dilate(binary, dilated, kernel);
cv.imshow('dilatedCanvas', dilated);
// Opening (Erosion followed by Dilation)
const opened = new cv.Mat();
cv.morphologyEx(binary, opened, cv.MORPH_OPEN, kernel);
cv.imshow('openedCanvas', opened);
// Closing (Dilation followed by Erosion)
const closed = new cv.Mat();
cv.morphologyEx(binary, closed, cv.MORPH_CLOSE, kernel);
cv.imshow('closedCanvas', closed);
// Clean up
src.delete();
gray.delete();
binary.delete();
eroded.delete();
dilated.delete();
opened.delete();
closed.delete();
kernel.delete();
}
// 7. Face Detection
function faceDetection() {
console.log('\n=== Face Detection ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
const gray = new cv.Mat();
// Convert to grayscale
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Load face cascade classifier
const faceCascade = new cv.CascadeClassifier();
faceCascade.load('haarcascade_frontalface_default.xml');
// Detect faces
const faces = new cv.RectVector();
const msize = new cv.Size(0, 0);
faceCascade.detectMultiScale(gray, faces, 1.1, 3, 0);
// Draw rectangles around faces
for (let i = 0; i < faces.size(); ++i) {
const face = faces.get(i);
const point1 = new cv.Point(face.x, face.y);
const point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(src, point1, point2, [255, 0, 0, 255], 3);
}
cv.imshow('faceDetectionCanvas', src);
console.log(`Detected ${faces.size()} faces`);
// Clean up
src.delete();
gray.delete();
faces.delete();
faceCascade.delete();
}
// 8. Object Detection with Template Matching
function templateMatching() {
console.log('\n=== Template Matching ===');
const mainImageElement = document.getElementById('mainImage');
const templateElement = document.getElementById('templateImage');
const mainImage = cv.imread(mainImageElement);
const template = cv.imread(templateElement);
const grayMain = new cv.Mat();
const grayTemplate = new cv.Mat();
// Convert to grayscale
cv.cvtColor(mainImage, grayMain, cv.COLOR_RGBA2GRAY, 0);
cv.cvtColor(template, grayTemplate, cv.COLOR_RGBA2GRAY, 0);
// Template matching
const result = new cv.Mat();
cv.matchTemplate(grayMain, grayTemplate, result, cv.TM_CCOEFF_NORMED);
// Find best match
const minMax = cv.minMaxLoc(result);
const maxLoc = minMax.maxLoc;
const color = new cv.Scalar(255, 0, 0, 255);
// Draw rectangle around match
const point1 = maxLoc;
const point2 = new cv.Point(maxLoc.x + template.cols, maxLoc.y + template.rows);
cv.rectangle(mainImage, point1, point2, color, 2, cv.LINE_8, 0);
cv.imshow('templateMatchCanvas', mainImage);
console.log(`Template match found at (${maxLoc.x}, ${maxLoc.y}) with confidence ${minMax.maxVal.toFixed(4)}`);
// Clean up
mainImage.delete();
template.delete();
grayMain.delete();
grayTemplate.delete();
result.delete();
}
// 9. Color Detection and Segmentation
function colorDetection() {
console.log('\n=== Color Detection ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
const hsv = new cv.Mat();
// Convert to HSV color space
cv.cvtColor(src, hsv, cv.COLOR_RGBA2RGB);
cv.cvtColor(hsv, hsv, cv.COLOR_RGB2HSV);
// Define range for blue color (adjust as needed)
const lower = new cv.Mat(hsv.rows, hsv.cols, hsv.type(), [100, 50, 50, 0]);
const upper = new cv.Mat(hsv.rows, hsv.cols, hsv.type(), [130, 255, 255, 255]);
// Create mask
const mask = new cv.Mat();
cv.inRange(hsv, lower, upper, mask);
// Apply mask to original image
const result = new cv.Mat();
src.copyTo(result, mask);
cv.imshow('colorDetectionCanvas', result);
cv.imshow('colorMaskCanvas', mask);
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(mask, contours, hierarchy, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
// Draw contours
const contourImage = src.clone();
for (let i = 0; i < contours.size(); ++i) {
const color = new cv.Scalar(Math.random() * 255, Math.random() * 255, Math.random() * 255, 255);
cv.drawContours(contourImage, contours, i, color, 2, cv.LINE_8, hierarchy, 100);
}
cv.imshow('contoursCanvas', contourImage);
console.log(`Found ${contours.size()} blue objects`);
// Clean up
src.delete();
hsv.delete();
lower.delete();
upper.delete();
mask.delete();
result.delete();
contours.delete();
hierarchy.delete();
contourImage.delete();
}
// 10. Image Histogram Analysis
function histogramAnalysis() {
console.log('\n=== Histogram Analysis ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Calculate histogram for each channel
const histSize = [256];
const ranges = [0, 255];
const channels = [0, 1, 2];
const hist = new cv.MatVector();
const mask = new cv.Mat(); // Empty mask
cv.calcHist(src, channels, mask, hist, histSize, ranges);
// Create histogram visualization
const histW = 512;
const histH = 400;
const histImage = new cv.Mat.zeros(histH, histW, cv.CV_8UC3);
// Normalize histogram
cv.normalize(hist, hist, 0, histImage.rows, cv.NORM_MINMAX);
// Draw histogram lines
const binW = Math.round(histW / histSize[0]);
for (let i = 1; i < histSize[0]; i++) {
const h = Math.round(hist.get(0, i, 0));
const h1 = Math.round(hist.get(0, i - 1, 0));
cv.line(histImage, new cv.Point((i - 1) * binW, histH - h1),
new cv.Point(i * binW, histH - h),
new cv.Scalar(255, 0, 0, 255), 2, cv.LINE_8, 0);
}
cv.imshow('histogramCanvas', histImage);
// Calculate image statistics
const mean = cv.mean(src);
const stddev = new cv.Mat();
const mean2 = new cv.Mat();
cv.meanStdDev(src, mean2, stddev);
console.log('Image Statistics:');
console.log(`Mean (R,G,B,A): (${mean[0].toFixed(2)}, ${mean[1].toFixed(2)}, ${mean[2].toFixed(2)}, ${mean[3].toFixed(2)})`);
console.log(`Std Dev: (${stddev.floatAt(0, 0).toFixed(2)}, ${stddev.floatAt(0, 1).toFixed(2)}, ${stddev.floatAt(0, 2).toFixed(2)})`);
// Clean up
src.delete();
hist.delete();
mask.delete();
histImage.delete();
stddev.delete();
mean2.delete();
}
// 11. Utility Functions
class OpenCVUtils {
static loadImageFromFile(file, callback) {
const reader = new FileReader();
reader.onload = function(e) {
const imgElement = document.createElement('img');
imgElement.onload = function() {
callback(imgElement);
};
imgElement.src = e.target.result;
};
reader.readAsDataURL(file);
}
static downloadCanvas(canvasId, filename) {
const canvas = document.getElementById(canvasId);
const link = document.createElement('a');
link.download = filename;
link.href = canvas.toDataURL();
link.click();
}
static getCanvasImageData(canvasId) {
const canvas = document.getElementById(canvasId);
const ctx = canvas.getContext('2d');
return ctx.getImageData(0, 0, canvas.width, canvas.height);
}
static performanceTimer(start) {
const end = performance.now();
return (end - start).toFixed(2) + 'ms';
}
static logMemoryUsage() {
if (performance.memory) {
console.log('Memory Usage:', {
used: (performance.memory.usedJSHeapSize / 1048576).toFixed(2) + ' MB',
total: (performance.memory.totalJSHeapSize / 1048576).toFixed(2) + ' MB',
limit: (performance.memory.jsHeapSizeLimit / 1048576).toFixed(2) + ' MB'
});
}
}
}
// Main function to run all examples
function runBasicExamples() {
console.log('Starting OpenCV.js Basic Examples...');
// Wait for images to load
setTimeout(() => {
const startTime = performance.now();
try {
basicImageOperations();
imageFiltering();
geometricTransformations();
morphologicalOperations();
colorDetection();
histogramAnalysis();
console.log('\nAll basic examples completed successfully!');
console.log('Total execution time:', OpenCVUtils.performanceTimer(startTime));
OpenCVUtils.logMemoryUsage();
} catch (error) {
console.error('Error running examples:', error);
}
}, 1000);
}
// Export functions for external use
if (typeof module !== 'undefined' && module.exports) {
module.exports = {
loadImageAndDisplay,
basicImageOperations,
imageFiltering,
geometricTransformations,
morphologicalOperations,
faceDetection,
templateMatching,
colorDetection,
histogramAnalysis,
OpenCVUtils
};
}
console.log('OpenCV.js Basic Samples module loaded');
console.log('Make sure to include OpenCV.js in your HTML: <script async src="https://docs.opencv.org/4.5.0/opencv.js"></script>');
💻 Traitement Vidéo avec OpenCV.js javascript
🟡 intermediate
⭐⭐⭐⭐
Traitement vidéo en temps réel, suivi d'objets et détection de mouvement utilisant WebRTC et OpenCV.js
⏱️ 40 min
🏷️ opencv, video, real-time, webcam
Prerequisites:
Advanced JavaScript, WebRTC, OpenCV basics
// Video Processing with OpenCV.js
let cv;
let video;
let canvas;
let canvasContext;
let streaming = false;
let processingEnabled = true;
// 1. Initialize Video Capture
class VideoProcessor {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.ctx = this.canvas.getContext('2d');
this.cap = new cv.VideoCapture(video);
this.processingEnabled = true;
this.currentFilter = 'normal';
}
// Start video stream
async startVideo() {
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: { width: 640, height: 480 },
audio: false
});
this.video.srcObject = stream;
this.video.play();
this.video.addEventListener('loadeddata', () => {
this.canvas.width = this.video.videoWidth;
this.canvas.height = this.video.videoHeight;
streaming = true;
this.processVideo();
});
console.log('Video stream started successfully');
} catch (error) {
console.error('Error accessing camera:', error);
}
}
// Stop video stream
stopVideo() {
if (this.video.srcObject) {
this.video.srcObject.getTracks().forEach(track => track.stop());
this.video.srcObject = null;
streaming = false;
}
}
// Main processing loop
processVideo() {
if (!streaming || !this.processingEnabled) {
requestAnimationFrame(() => this.processVideo());
return;
}
// Capture frame from video
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
this.cap.read(src);
// Apply selected filter
let dst = src.clone();
switch (this.currentFilter) {
case 'grayscale':
dst = this.applyGrayscale(src);
break;
case 'edge':
dst = this.applyEdgeDetection(src);
break;
case 'blur':
dst = this.applyBlur(src);
break;
case 'cartoon':
dst = this.applyCartoonEffect(src);
break;
case 'motion':
dst = this.detectMotion(src);
break;
default:
dst = src;
}
// Display result
cv.imshow(this.canvas, dst);
// Clean up
if (dst !== src) dst.delete();
src.delete();
// Continue processing
requestAnimationFrame(() => this.processVideo());
}
// Apply grayscale filter
applyGrayscale(src) {
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
const dst = new cv.Mat();
cv.cvtColor(gray, dst, cv.COLOR_GRAY2RGBA);
gray.delete();
return dst;
}
// Apply edge detection
applyEdgeDetection(src) {
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
const edges = new cv.Mat();
cv.Canny(gray, edges, 50, 150);
const dst = new cv.Mat();
cv.cvtColor(edges, dst, cv.COLOR_GRAY2RGBA);
gray.delete();
edges.delete();
return dst;
}
// Apply blur filter
applyBlur(src) {
const dst = new cv.Mat();
const ksize = new cv.Size(15, 15);
cv.GaussianBlur(src, dst, ksize, 0, 0, cv.BORDER_DEFAULT);
return dst;
}
// Apply cartoon effect
applyCartoonEffect(src) {
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
// Apply bilateral filter multiple times
let filtered = new cv.Mat();
cv.bilateralFilter(src, filtered, 15, 80, 80, cv.BORDER_DEFAULT);
for (let i = 0; i < 2; i++) {
const temp = filtered.clone();
cv.bilateralFilter(temp, filtered, 15, 80, 80, cv.BORDER_DEFAULT);
temp.delete();
}
// Create edges
const edges = new cv.Mat();
const gray2 = new cv.Mat();
cv.cvtColor(filtered, gray2, cv.COLOR_RGBA2GRAY);
cv.medianBlur(gray2, edges, 5);
const adaptive = new cv.Mat();
cv.adaptiveThreshold(gray2, adaptive, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY, 9, 2);
// Combine edges and filtered image
const dst = new cv.Mat();
cv.bitwise_and(filtered, filtered, dst, adaptive);
// Clean up
gray.delete();
filtered.delete();
edges.delete();
gray2.delete();
adaptive.delete();
return dst;
}
// Motion detection
detectMotion(src) {
if (!this.previousFrame) {
this.previousFrame = new cv.Mat();
cv.cvtColor(src, this.previousFrame, cv.COLOR_RGBA2GRAY);
return src;
}
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
// Calculate difference
const diff = new cv.Mat();
cv.absdiff(gray, this.previousFrame, diff);
// Threshold
const thresh = new cv.Mat();
cv.threshold(diff, thresh, 25, 255, cv.THRESH_BINARY);
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(thresh, contours, hierarchy, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
// Draw rectangles around moving objects
const dst = src.clone();
for (let i = 0; i < contours.size(); ++i) {
const contour = contours.get(i);
const area = cv.contourArea(contour);
if (area > 500) { // Filter small movements
const rect = cv.boundingRect(contour);
const point1 = new cv.Point(rect.x, rect.y);
const point2 = new cv.Point(rect.x + rect.width, rect.y + rect.height);
cv.rectangle(dst, point1, point2, [0, 255, 0, 255], 2);
}
}
// Update previous frame
this.previousFrame.delete();
this.previousFrame = gray.clone();
// Clean up
gray.delete();
diff.delete();
thresh.delete();
contours.delete();
hierarchy.delete();
return dst;
}
// Set filter
setFilter(filterName) {
this.currentFilter = filterName;
console.log('Filter changed to:', filterName);
}
// Toggle processing
toggleProcessing() {
this.processingEnabled = !this.processingEnabled;
console.log('Processing enabled:', this.processingEnabled);
}
// Take snapshot
takeSnapshot() {
const dataURL = this.canvas.toDataURL('image/png');
const link = document.createElement('a');
link.download = `snapshot_${Date.now()}.png`;
link.href = dataURL;
link.click();
console.log('Snapshot saved');
}
}
// 2. Face Detection in Video
class FaceDetectionVideo {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(video);
this.faceCascade = null;
this.eyeCascade = null;
this.detectionEnabled = true;
}
async initialize() {
try {
// Load cascade classifiers
this.faceCascade = new cv.CascadeClassifier();
this.faceCascade.load('haarcascade_frontalface_default.xml');
this.eyeCascade = new cv.CascadeClassifier();
this.eyeCascade.load('haarcascade_eye.xml');
console.log('Face detection models loaded');
return true;
} catch (error) {
console.error('Error loading face detection models:', error);
return false;
}
}
startDetection() {
const processFrame = () => {
if (!this.detectionEnabled) {
requestAnimationFrame(processFrame);
return;
}
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
const gray = new cv.Mat();
this.cap.read(src);
// Convert to grayscale
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Detect faces
const faces = new cv.RectVector();
this.faceCascade.detectMultiScale(gray, faces, 1.1, 3, 0);
// Draw rectangles around faces and detect eyes
for (let i = 0; i < faces.size(); ++i) {
const face = faces.get(i);
const point1 = new cv.Point(face.x, face.y);
const point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(src, point1, point2, [255, 0, 0, 255], 3);
// Detect eyes within face region
const faceROI = gray.roi(face);
const eyes = new cv.RectVector();
this.eyeCascade.detectMultiScale(faceROI, eyes, 1.1, 10, 0);
// Draw eye rectangles
for (let j = 0; j < eyes.size(); ++j) {
const eye = eyes.get(j);
const eyePoint1 = new cv.Point(
face.x + eye.x,
face.y + eye.y
);
const eyePoint2 = new cv.Point(
face.x + eye.x + eye.width,
face.y + eye.y + eye.height
);
cv.rectangle(src, eyePoint1, eyePoint2, [0, 255, 255, 255], 2);
}
faceROI.delete();
eyes.delete();
}
cv.imshow(this.canvas, src);
// Display face count
document.getElementById('faceCount').textContent = faces.size();
src.delete();
gray.delete();
faces.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
toggleDetection() {
this.detectionEnabled = !this.detectionEnabled;
console.log('Face detection enabled:', this.detectionEnabled);
}
}
// 3. Object Tracking
class ObjectTracker {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(video);
this.tracker = null;
this.tracking = false;
this.roi = null;
}
initializeTracker(trackerType = 'KCF') {
const trackerTypes = ['BOOSTING', 'MIL', 'KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT'];
const trackerTypeIndex = trackerTypes.indexOf(trackerType);
if (trackerTypeIndex === -1) {
console.error('Invalid tracker type');
return false;
}
try {
this.tracker = new cv.TrackerCSRT_create(); // CSRT is more reliable
console.log(`${trackerType} tracker initialized`);
return true;
} catch (error) {
console.error('Error initializing tracker:', error);
return false;
}
}
selectROI(event) {
if (!this.tracker) {
alert('Please initialize tracker first');
return;
}
const rect = cv.selectROI('ObjectTracking', this.video, false, false);
if (rect.width > 0 && rect.height > 0) {
this.roi = rect;
this.startTracking();
}
}
startTracking() {
if (!this.tracker || !this.roi) return;
// Initialize tracker with ROI
this.tracker.init(this.roi, this.cap.read());
this.tracking = true;
this.track();
}
track() {
if (!this.tracking) return;
const processFrame = () => {
if (!this.tracking) return;
const frame = this.cap.read();
if (frame.empty) {
console.log('Video ended');
return;
}
// Update tracker
const ok = this.tracker.update(frame);
if (ok) {
// Draw tracked object
const trackedRect = this.tracker.getRegion();
cv.rectangle(frame,
new cv.Point(trackedRect.x, trackedRect.y),
new cv.Point(trackedRect.x + trackedRect.width, trackedRect.y + trackedRect.height),
[0, 255, 0, 255], 3);
} else {
cv.putText(frame, 'Tracking failure detected', new cv.Point(100, 80),
cv.FONT_HERSHEY_SIMPLEX, 0.75, [0, 0, 255, 255], 2);
}
cv.imshow(this.canvas, frame);
frame.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
stopTracking() {
this.tracking = false;
console.log('Tracking stopped');
}
}
// 4. Motion Detection and Alert System
class MotionDetector {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(video);
this.previousFrame = null;
this.motionThreshold = 25;
this.minContourArea = 500;
this.alertCallback = null;
this.detectionEnabled = true;
this.motionHistory = [];
this.maxHistoryLength = 10;
}
setAlertCallback(callback) {
this.alertCallback = callback;
}
startDetection() {
const processFrame = () => {
if (!this.detectionEnabled) {
requestAnimationFrame(processFrame);
return;
}
const frame = this.cap.read();
if (frame.empty) {
requestAnimationFrame(processFrame);
return;
}
const gray = new cv.Mat();
cv.cvtColor(frame, gray, cv.COLOR_RGBA2GRAY, 0);
cv.GaussianBlur(gray, gray, new cv.Size(21, 21), 0);
let motionDetected = false;
const output = frame.clone();
if (this.previousFrame) {
// Calculate difference
const diff = new cv.Mat();
cv.absdiff(gray, this.previousFrame, diff);
// Threshold
const thresh = new cv.Mat();
cv.threshold(diff, thresh, this.motionThreshold, 255, cv.THRESH_BINARY);
// Dilate to fill holes
const dilated = new cv.Mat();
cv.dilate(thresh, dilated, cv.getStructuringElement(cv.MORPH_RECT, new cv.Size(7, 7)));
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(dilated, contours, hierarchy, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
let totalMotionArea = 0;
for (let i = 0; i < contours.size(); ++i) {
const contour = contours.get(i);
const area = cv.contourArea(contour);
if (area > this.minContourArea) {
motionDetected = true;
totalMotionArea += area;
// Draw bounding rectangle
const rect = cv.boundingRect(contour);
cv.rectangle(output,
new cv.Point(rect.x, rect.y),
new cv.Point(rect.x + rect.width, rect.y + rect.height),
[0, 255, 0, 255], 2);
}
}
// Update motion history
this.motionHistory.push({
detected: motionDetected,
area: totalMotionArea,
timestamp: Date.now()
});
if (this.motionHistory.length > this.maxHistoryLength) {
this.motionHistory.shift();
}
// Trigger alert if significant motion
if (motionDetected && totalMotionArea > 5000) {
if (this.alertCallback) {
this.alertCallback({
detected: true,
area: totalMotionArea,
timestamp: Date.now()
});
}
// Draw alert text
cv.putText(output, 'MOTION DETECTED', new cv.Point(10, 30),
cv.FONT_HERSHEY_SIMPLEX, 1, [0, 0, 255, 255], 2);
}
// Display motion indicator
const motionLevel = Math.min(255, totalMotionArea / 100);
cv.rectangle(output, new cv.Point(10, output.rows - 40),
new cv.Point(10 + motionLevel, output.rows - 20),
[0, motionLevel, 255 - motionLevel, 255], -1);
// Clean up
diff.delete();
thresh.delete();
dilated.delete();
contours.delete();
hierarchy.delete();
}
// Update previous frame
if (this.previousFrame) {
this.previousFrame.delete();
}
this.previousFrame = gray.clone();
cv.imshow(this.canvas, output);
frame.delete();
output.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
getMotionStatistics() {
if (this.motionHistory.length === 0) {
return { averageMotion: 0, motionFrequency: 0 };
}
const recentMotion = this.motionHistory.slice(-5);
const motionCount = recentMotion.filter(m => m.detected).length;
const averageArea = recentMotion.reduce((sum, m) => sum + m.area, 0) / recentMotion.length;
return {
motionFrequency: (motionCount / recentMotion.length) * 100,
averageMotionArea: averageArea
};
}
}
// 5. Video Recorder
class VideoRecorder {
constructor(canvasId) {
this.canvas = document.getElementById(canvasId);
this.mediaRecorder = null;
this.recordedChunks = [];
this.recording = false;
}
startRecording() {
if (this.recording) return;
const stream = this.canvas.captureStream(30); // 30 FPS
this.mediaRecorder = new MediaRecorder(stream, {
mimeType: 'video/webm'
});
this.recordedChunks = [];
this.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
this.recordedChunks.push(event.data);
}
};
this.mediaRecorder.onstop = () => {
const blob = new Blob(this.recordedChunks, {
type: 'video/webm'
});
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = `recording_${Date.now()}.webm`;
link.click();
URL.revokeObjectURL(url);
};
this.mediaRecorder.start();
this.recording = true;
console.log('Recording started');
}
stopRecording() {
if (!this.recording || !this.mediaRecorder) return;
this.mediaRecorder.stop();
this.recording = false;
console.log('Recording stopped');
}
isRecording() {
return this.recording;
}
}
// 6. Initialize all components
function initializeVideoProcessing() {
console.log('Initializing Video Processing...');
// Wait for OpenCV to load
if (typeof cv === 'undefined') {
setTimeout(initializeVideoProcessing, 100);
return;
}
// Initialize video processor
const videoProcessor = new VideoProcessor('videoInput', 'canvasOutput');
videoProcessor.startVideo();
// Set up filter buttons
document.querySelectorAll('.filter-btn').forEach(btn => {
btn.addEventListener('click', () => {
videoProcessor.setFilter(btn.dataset.filter);
});
});
// Set up snapshot button
document.getElementById('snapshotBtn').addEventListener('click', () => {
videoProcessor.takeSnapshot();
});
// Initialize face detection
const faceDetector = new FaceDetectionVideo('videoInput', 'faceCanvas');
faceDetector.initialize().then(success => {
if (success) {
faceDetector.startDetection();
}
});
// Initialize motion detector
const motionDetector = new MotionDetector('videoInput', 'motionCanvas');
motionDetector.setAlertCallback((alert) => {
console.log('Motion Alert:', alert);
document.getElementById('motionAlert').textContent =
`Motion detected! Area: ${alert.area.toFixed(0)} pixels`;
});
motionDetector.startDetection();
// Initialize video recorder
const videoRecorder = new VideoRecorder('canvasOutput');
document.getElementById('recordBtn').addEventListener('click', () => {
if (videoRecorder.isRecording()) {
videoRecorder.stopRecording();
document.getElementById('recordBtn').textContent = 'Start Recording';
} else {
videoRecorder.startRecording();
document.getElementById('recordBtn').textContent = 'Stop Recording';
}
});
console.log('Video processing initialized successfully');
}
// Export classes for external use
if (typeof module !== 'undefined' && module.exports) {
module.exports = {
VideoProcessor,
FaceDetectionVideo,
ObjectTracker,
MotionDetector,
VideoRecorder
};
}
// Auto-initialize when page loads
document.addEventListener('DOMContentLoaded', () => {
initializeVideoProcessing();
});
console.log('OpenCV.js Video Processing module loaded');
💻 Applications Avancées OpenCV javascript
🔴 complex
⭐⭐⭐⭐⭐
Applications avancées de vision par ordinateur incluant les effets AR, le scan de codes-barres et le stitching d'images
⏱️ 50 min
🏷️ opencv, ar, barcode, panorama, 3d, gesture
Prerequisites:
Expert JavaScript, Computer Vision, 3D Graphics
// Advanced OpenCV.js Applications
let cv;
// 1. Augmented Reality Face Filters
class ARFaceFilters {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(video);
this.faceCascade = null;
this.currentFilter = 'none';
this.overlayImage = null;
this.filterEnabled = true;
}
async initialize() {
try {
this.faceCascade = new cv.CascadeClassifier();
this.faceCascade.load('haarcascade_frontalface_default.xml');
// Load filter overlays
await this.loadFilterImages();
console.log('AR Face Filters initialized');
return true;
} catch (error) {
console.error('Error initializing AR filters:', error);
return false;
}
}
async loadFilterImages() {
const filters = {
sunglasses: 'path/to/sunglasses.png',
mustache: 'path/to/mustache.png',
hat: 'path/to/hat.png',
mask: 'path/to/mask.png'
};
// Preload filter images (would need actual image files)
for (const [name, path] of Object.entries(filters)) {
// This would load actual images in practice
console.log(`Loading filter: ${name}`);
}
}
applyARFilter() {
if (!this.filterEnabled) return;
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
const gray = new cv.Mat();
this.cap.read(src);
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Detect faces
const faces = new cv.RectVector();
this.faceCascade.detectMultiScale(gray, faces, 1.1, 3, 0);
// Apply filter to each detected face
for (let i = 0; i < faces.size(); ++i) {
const face = faces.get(i);
switch (this.currentFilter) {
case 'sunglasses':
this.applySunglasses(src, face);
break;
case 'mustache':
this.applyMustache(src, face);
break;
case 'hat':
this.applyHat(src, face);
break;
case 'mask':
this.applyMask(src, face);
break;
case 'beautify':
this.beautifyFace(src, face);
break;
}
}
cv.imshow(this.canvas, src);
// Clean up
src.delete();
gray.delete();
faces.delete();
}
applySunglasses(image, face) {
const glassesWidth = face.width * 0.8;
const glassesHeight = face.height * 0.3;
const glassesX = face.x + face.width * 0.1;
const glassesY = face.y + face.height * 0.3;
// Draw sunglasses (simplified as rectangle)
const roi = image.roi(new cv.Rect(glassesX, glassesY, glassesWidth, glassesHeight));
const darkGlasses = roi.clone();
darkGlasses.setTo([0, 0, 0, 180]); // Dark black with transparency
darkGlasses.copyTo(roi);
roi.delete();
darkGlasses.delete();
}
applyMustache(image, face) {
const mustacheWidth = face.width * 0.4;
const mustacheHeight = face.height * 0.1;
const mustacheX = face.x + face.width * 0.3;
const mustacheY = face.y + face.height * 0.6;
// Draw mustache
cv.rectangle(image,
new cv.Point(mustacheX, mustacheY),
new cv.Point(mustacheX + mustacheWidth, mustacheY + mustacheHeight),
[0, 0, 0, 255], -1);
}
applyHat(image, face) {
const hatWidth = face.width * 1.2;
const hatHeight = face.height * 0.4;
const hatX = face.x - face.width * 0.1;
const hatY = face.y - face.height * 0.3;
// Draw hat
cv.rectangle(image,
new cv.Point(hatX, hatY),
new cv.Point(hatX + hatWidth, hatY + hatHeight),
[128, 0, 128, 255], -1);
}
applyMask(image, face) {
const maskWidth = face.width * 0.9;
const maskHeight = face.height * 0.3;
const maskX = face.x + face.width * 0.05;
const maskY = face.y + face.height * 0.4;
// Draw medical mask
cv.rectangle(image,
new cv.Point(maskX, maskY),
new cv.Point(maskX + maskWidth, maskY + maskHeight),
[200, 200, 200, 255], -1);
}
beautifyFace(image, face) {
const roi = image.roi(face);
const beautified = roi.clone();
// Apply subtle smoothing
cv.bilateralFilter(beautified, beautified, 15, 30, 30, cv.BORDER_DEFAULT);
// Slight brightness adjustment
beautified.convertTo(beautified, -1, 1.1, 10);
beautified.copyTo(roi);
roi.delete();
beautified.delete();
}
start() {
const process = () => {
if (this.filterEnabled) {
this.applyARFilter();
}
requestAnimationFrame(process);
};
process();
}
setFilter(filterName) {
this.currentFilter = filterName;
console.log('AR Filter changed to:', filterName);
}
toggleFilter() {
this.filterEnabled = !this.filterEnabled;
}
}
// 2. Barcode and QR Code Scanner
class BarcodeScanner {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(video);
this.scanning = true;
this.detectedCodes = [];
this.onCodeDetected = null;
}
startScanning() {
const processFrame = () => {
if (!this.scanning) {
requestAnimationFrame(processFrame);
return;
}
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
const gray = new cv.Mat();
this.cap.read(src);
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Detect barcodes (simplified - would need proper barcode library)
const barcodes = this.detectBarcodes(gray);
// Draw bounding boxes
for (const barcode of barcodes) {
cv.rectangle(src,
new cv.Point(barcode.x, barcode.y),
new cv.Point(barcode.x + barcode.width, barcode.y + barcode.height),
[0, 255, 0, 255], 3);
cv.putText(src, barcode.data,
new cv.Point(barcode.x, barcode.y - 10),
cv.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0, 255], 2);
// Trigger callback if new code detected
if (!this.detectedCodes.includes(barcode.data)) {
this.detectedCodes.push(barcode.data);
if (this.onCodeDetected) {
this.onCodeDetected(barcode);
}
}
}
cv.imshow(this.canvas, src);
// Clean up
src.delete();
gray.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
detectBarcodes(grayImage) {
// Simplified barcode detection
// In practice, you would use a dedicated barcode/QR code library
const barcodes = [];
// Apply adaptive threshold
const binary = new cv.Mat();
cv.adaptiveThreshold(grayImage, binary, 255, cv.ADAPTIVE_THRESH_GAUSSIAN_C, cv.THRESH_BINARY, 11, 2);
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(binary, contours, hierarchy, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE);
// Filter contours that look like barcodes
for (let i = 0; i < contours.size(); ++i) {
const contour = contours.get(i);
const rect = cv.boundingRect(contour);
const aspectRatio = rect.width / rect.height;
// Typical barcode aspect ratios and dimensions
if (aspectRatio > 2.0 && rect.width > 50 && rect.height > 10) {
barcodes.push({
x: rect.x,
y: rect.y,
width: rect.width,
height: rect.height,
data: `BARCODE_${Date.now()}_${i}` // Simulated barcode data
});
}
}
binary.delete();
contours.delete();
hierarchy.delete();
return barcodes;
}
onCodeDetected(callback) {
this.onCodeDetected = callback;
}
clearDetectedCodes() {
this.detectedCodes = [];
}
toggleScanning() {
this.scanning = !this.scanning;
}
}
// 3. Image Stitching and Panorama
class ImageStitcher {
constructor(canvasId) {
this.canvas = document.getElementById(canvasId);
this.images = [];
this.keypoints = [];
this.descriptors = [];
this.matcher = null;
}
addImage(imageElement) {
const mat = cv.imread(imageElement);
this.images.push(mat);
// Detect keypoints
const gray = new cv.Mat();
cv.cvtColor(mat, gray, cv.COLOR_RGBA2GRAY, 0);
const orb = new cv.ORB(1000);
const keypoints = new cv.KeyPointVector();
const descriptors = new cv.Mat();
orb.detectAndCompute(gray, new cv.Mat(), keypoints, descriptors);
this.keypoints.push(keypoints);
this.descriptors.push(descriptors);
gray.delete();
console.log(`Added image ${this.images.length} with ${keypoints.size()} keypoints`);
}
stitchImages() {
if (this.images.length < 2) {
console.error('Need at least 2 images for stitching');
return null;
}
// Match features between consecutive images
const matches = this.matchFeatures(
this.descriptors[0],
this.descriptors[1]
);
// Find homography
const homography = this.findHomography(
this.keypoints[0],
this.keypoints[1],
matches
);
// Warp images
const panorama = this.warpImages(this.images[0], this.images[1], homography);
// Display result
cv.imshow(this.canvas, panorama);
// Clean up
matches.delete();
homography.delete();
return panorama;
}
matchFeatures(desc1, desc2) {
// Use brute-force matcher
const matcher = new cv.BFMatcher(cv.NORM_HAMMING, false);
const matches = new cv.DMatchVector();
matcher.match(desc1, desc2, matches);
// Filter matches by distance
const goodMatches = [];
for (let i = 0; i < matches.size(); ++i) {
const match = matches.get(i);
if (match.distance < 30) { // Threshold
goodMatches.push(match);
}
}
const goodMatchesVector = new cv.DMatchVector();
goodMatches.forEach(match => goodMatchesVector.push_back(match));
matches.delete();
return goodMatchesVector;
}
findHomography(kp1, kp2, matches) {
const srcPoints = [];
const dstPoints = [];
for (let i = 0; i < matches.size(); ++i) {
const match = matches.get(i);
srcPoints.push(kp1.get(match.queryIdx).pt);
dstPoints.push(kp2.get(match.trainIdx).pt);
}
// Convert to cv.Mat
const srcMat = cv.matFromArray(srcPoints.length, 1, cv.CV_32FC2,
srcPoints.flat());
const dstMat = cv.matFromArray(dstPoints.length, 1, cv.CV_32FC2,
dstPoints.flat());
const homography = cv.findHomography(srcMat, dstMat, cv.RANSAC, 5.0);
srcMat.delete();
dstMat.delete();
return homography;
}
warpImages(img1, img2, homography) {
// Get corners of first image
const corners1 = [
[0, 0],
[img1.cols, 0],
[img1.cols, img1.rows],
[0, img1.rows]
];
// Transform corners
const corners2 = [];
for (const corner of corners1) {
const mat = cv.matFromArray(1, 1, cv.CV_32FC2, corner);
const transformed = cv.perspectiveTransform(mat, homography);
corners2.push(transformed.data32F);
mat.delete();
transformed.delete();
}
// Calculate output size
const allCorners = [...corners1, ...corners2];
const minX = Math.min(...allCorners.map(c => c[0]));
const minY = Math.min(...allCorners.map(c => c[1]));
const maxX = Math.max(...allCorners.map(c => c[0]));
const maxY = Math.max(...allCorners.map(c => c[1]));
const outputWidth = Math.ceil(maxX - minX);
const outputHeight = Math.ceil(maxY - minY);
// Create translation matrix
const translation = cv.matFromArray(3, 3, cv.CV_64FC1, [
1, 0, -minX,
0, 1, -minY,
0, 0, 1
]);
// Adjust homography
const adjustedHomography = new cv.Mat();
cv.gemm(translation, homography, 1, new cv.Mat(), 0, adjustedHomography);
// Warp images
const panorama = new cv.Mat();
cv.warpPerspective(img1, panorama, adjustedHomography,
new cv.Size(outputWidth, outputHeight));
// Place second image
const roi = panorama.roi(new cv.Rect(-minX, -minY, img2.cols, img2.rows));
img2.copyTo(roi);
// Clean up
translation.delete();
adjustedHomography.delete();
roi.delete();
return panorama;
}
clearImages() {
this.images.forEach(img => img.delete());
this.keypoints.forEach(kp => kp.delete());
this.descriptors.forEach(desc => desc.delete());
this.images = [];
this.keypoints = [];
this.descriptors = [];
}
}
// 4. 3D Object Detection (Simplified)
class Object3DDetector {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(video);
this.objects = [];
this.detectorEnabled = true;
}
add3DObject(name, features) {
this.objects.push({
name,
features: features, // Would be SIFT/SURF features in practice
color: [Math.random() * 255, Math.random() * 255, Math.random() * 255, 255]
});
}
detect3DObjects() {
const processFrame = () => {
if (!this.detectorEnabled) {
requestAnimationFrame(processFrame);
return;
}
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
const gray = new cv.Mat();
this.cap.read(src);
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Simplified 3D object detection
// In practice, would use feature matching with 3D reconstruction
const detectedObjects = this.performObjectDetection(gray);
// Draw detected objects
for (const obj of detectedObjects) {
// Draw bounding box
cv.rectangle(src,
new cv.Point(obj.x, obj.y),
new cv.Point(obj.x + obj.width, obj.y + obj.height),
obj.color, 3);
// Draw label
cv.putText(src, obj.name,
new cv.Point(obj.x, obj.y - 10),
cv.FONT_HERSHEY_SIMPLEX, 0.7, obj.color, 2);
// Draw 3D axes (simplified)
this.draw3DAxes(src, obj);
}
// Draw coordinate system
this.drawCoordinateSystem(src);
cv.imshow(this.canvas, src);
src.delete();
gray.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
performObjectDetection(grayImage) {
// Simplified object detection
// Would use SURF/BRISK features and matching in practice
const detectedObjects = [];
// Use template matching as simplified approach
for (const obj of this.objects) {
// This would be proper feature matching in practice
if (Math.random() > 0.7) { // Simulate detection
detectedObjects.push({
name: obj.name,
x: Math.random() * (grayImage.cols - 100),
y: Math.random() * (grayImage.rows - 100),
width: 80 + Math.random() * 40,
height: 80 + Math.random() * 40,
color: obj.color,
pose: {
pitch: Math.random() * 360,
yaw: Math.random() * 360,
roll: Math.random() * 360
}
});
}
}
return detectedObjects;
}
draw3DAxes(image, object) {
const centerX = object.x + object.width / 2;
const centerY = object.y + object.height / 2;
const axisLength = 50;
// X-axis (red)
const xEnd = new cv.Point(
centerX + axisLength * Math.cos(object.pose.yaw * Math.PI / 180),
centerY + axisLength * Math.sin(object.pose.yaw * Math.PI / 180)
);
cv.arrowedLine(image, new cv.Point(centerX, centerY), xEnd, [0, 0, 255, 255], 3);
// Y-axis (green)
const yEnd = new cv.Point(
centerX + axisLength * Math.cos((object.pose.yaw + 90) * Math.PI / 180),
centerY + axisLength * Math.sin((object.pose.yaw + 90) * Math.PI / 180)
);
cv.arrowedLine(image, new cv.Point(centerX, centerY), yEnd, [0, 255, 0, 255], 3);
}
drawCoordinateSystem(image) {
// Draw small coordinate system in corner
const origin = new cv.Point(50, image.rows - 50);
const axisLength = 30;
// X-axis
const xEnd = new cv.Point(origin.x + axisLength, origin.y);
cv.arrowedLine(image, origin, xEnd, [255, 0, 0, 255], 2);
// Y-axis
const yEnd = new cv.Point(origin.x, origin.y - axisLength);
cv.arrowedLine(image, origin, yEnd, [0, 255, 0, 255], 2);
// Labels
cv.putText(image, 'X', new cv.Point(xEnd.x + 5, xEnd.y),
cv.FONT_HERSHEY_SIMPLEX, 0.5, [255, 0, 0, 255], 1);
cv.putText(image, 'Y', new cv.Point(yEnd.x, yEnd.y - 5),
cv.FONT_HERSHEY_SIMPLEX, 0.5, [0, 255, 0, 255], 1);
}
startDetection() {
this.detect3DObjects();
}
toggleDetection() {
this.detectorEnabled = !this.detectorEnabled;
}
}
// 5. Gesture Recognition
class GestureRecognizer {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(video);
this.handCascade = null;
this.currentGesture = 'none';
this.gestureCallback = null;
this.recognitionEnabled = true;
this.gestureHistory = [];
this.maxHistoryLength = 10;
}
async initialize() {
try {
// Load hand cascade classifier
this.handCascade = new cv.CascadeClassifier();
this.handCascade.load('palm.xml'); // Would need actual palm detection model
console.log('Gesture Recognizer initialized');
return true;
} catch (error) {
console.error('Error initializing gesture recognizer:', error);
return false;
}
}
recognizeGestures() {
const processFrame = () => {
if (!this.recognitionEnabled) {
requestAnimationFrame(processFrame);
return;
}
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
const gray = new cv.Mat();
this.cap.read(src);
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Detect hands (simplified)
const hands = this.detectHands(gray);
// Recognize gestures
const gestures = [];
for (const hand of hands) {
const gesture = this.classifyGesture(gray, hand);
gestures.push(gesture);
// Draw hand and gesture
cv.rectangle(src,
new cv.Point(hand.x, hand.y),
new cv.Point(hand.x + hand.width, hand.y + hand.height),
[255, 0, 0, 255], 2);
cv.putText(src, gesture.name,
new cv.Point(hand.x, hand.y - 10),
cv.FONT_HERSHEY_SIMPLEX, 0.7, [0, 255, 0, 255], 2);
}
// Update gesture history
if (gestures.length > 0) {
this.gestureHistory.push({
gesture: gestures[0].name,
confidence: gestures[0].confidence,
timestamp: Date.now()
});
if (this.gestureHistory.length > this.maxHistoryLength) {
this.gestureHistory.shift();
}
// Trigger callback if gesture is consistent
if (this.gestureHistory.length >= 5) {
const recentGestures = this.gestureHistory.slice(-3);
const consistent = recentGestures.every(g => g.gesture === recentGestures[0].gesture);
if (consistent && recentGestures[0].confidence > 0.7) {
this.currentGesture = recentGestures[0].gesture;
if (this.gestureCallback) {
this.gestureCallback({
gesture: this.currentGesture,
confidence: recentGestures[0].confidence
});
}
}
}
}
// Display current gesture
cv.putText(src, `Gesture: ${this.currentGesture}`,
new cv.Point(10, 30),
cv.FONT_HERSHEY_SIMPLEX, 1, [255, 255, 255, 255], 2);
cv.imshow(this.canvas, src);
src.delete();
gray.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
detectHands(grayImage) {
// Simplified hand detection
// In practice, would use skin color detection or specialized hand detector
const hands = [];
// Use adaptive threshold to find hand-like regions
const binary = new cv.Mat();
cv.adaptiveThreshold(grayImage, binary, 255, cv.ADAPTIVE_THRESH_GAUSSIAN_C, cv.THRESH_BINARY, 11, 2);
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(binary, contours, hierarchy, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE);
// Filter for hand-like contours
for (let i = 0; i < contours.size(); ++i) {
const contour = contours.get(i);
const area = cv.contourArea(contour);
const hull = new cv.Mat();
cv.convexHull(contour, hull);
// Hand characteristics (simplified)
const hullArea = cv.contourArea(hull);
const solidity = area / hullArea;
if (area > 1000 && solidity < 0.9) {
const rect = cv.boundingRect(contour);
hands.push({
x: rect.x,
y: rect.y,
width: rect.width,
height: rect.height,
contour: contour
});
}
hull.delete();
}
binary.delete();
contours.delete();
hierarchy.delete();
return hands;
}
classifyGesture(grayImage, hand) {
// Simplified gesture classification
// In practice, would analyze finger count, hand shape, etc.
const handROI = grayImage.roi(hand);
const moments = cv.moments(handROI);
// Calculate features
const huMoments = cv.HuMoments(moments);
// Simple classification based on contour properties
let gesture = 'unknown';
let confidence = 0.5;
// Random gesture assignment for demonstration
const gestures = ['fist', 'open_palm', 'peace', 'thumbs_up', 'pointing'];
gesture = gestures[Math.floor(Math.random() * gestures.length)];
confidence = 0.6 + Math.random() * 0.4;
handROI.delete();
return { gesture, confidence };
}
onGestureDetected(callback) {
this.gestureCallback = callback;
}
startRecognition() {
this.recognizeGestures();
}
toggleRecognition() {
this.recognitionEnabled = !this.recognitionEnabled;
}
getGestureHistory() {
return this.gestureHistory;
}
}
// 6. Initialize all advanced applications
function initializeAdvancedApps() {
console.log('Initializing Advanced OpenCV Applications...');
// Wait for OpenCV
if (typeof cv === 'undefined') {
setTimeout(initializeAdvancedApps, 100);
return;
}
// Initialize AR Face Filters
const arFilters = new ARFaceFilters('videoInput', 'arCanvas');
arFilters.initialize().then(() => {
arFilters.start();
});
// Set up AR filter controls
document.querySelectorAll('.ar-filter-btn').forEach(btn => {
btn.addEventListener('click', () => {
arFilters.setFilter(btn.dataset.filter);
});
});
// Initialize Barcode Scanner
const barcodeScanner = new BarcodeScanner('videoInput', 'barcodeCanvas');
barcodeScanner.onCodeDetected((code) => {
console.log('Barcode detected:', code);
document.getElementById('barcodeResult').textContent = `Detected: ${code.data}`;
});
barcodeScanner.startScanning();
// Initialize Image Stitcher
const imageStitcher = new ImageStitcher('panoramaCanvas');
document.getElementById('addImage1Btn').addEventListener('click', () => {
const input = document.createElement('input');
input.type = 'file';
input.accept = 'image/*';
input.onchange = (e) => {
const file = e.target.files[0];
const reader = new FileReader();
reader.onload = (event) => {
const img = document.createElement('img');
img.onload = () => {
imageStitcher.addImage(img);
};
img.src = event.target.result;
};
reader.readAsDataURL(file);
};
input.click();
});
document.getElementById('stitchBtn').addEventListener('click', () => {
imageStitcher.stitchImages();
});
// Initialize 3D Object Detector
const detector3D = new Object3DDetector('videoInput', '3dCanvas');
detector3D.add3DObject('Cube', { /* features */ });
detector3D.add3DObject('Sphere', { /* features */ });
detector3D.startDetection();
// Initialize Gesture Recognizer
const gestureRecognizer = new GestureRecognizer('videoInput', 'gestureCanvas');
gestureRecognizer.initialize().then(() => {
gestureRecognizer.onGestureDetected((gesture) => {
console.log('Gesture detected:', gesture);
document.getElementById('gestureResult').textContent =
`Gesture: ${gesture.gesture} (Confidence: ${(gesture.confidence * 100).toFixed(1)}%)`;
});
gestureRecognizer.startRecognition();
});
console.log('Advanced applications initialized');
}
// Export classes for external use
if (typeof module !== 'undefined' && module.exports) {
module.exports = {
ARFaceFilters,
BarcodeScanner,
ImageStitcher,
Object3DDetector,
GestureRecognizer
};
}
// Auto-initialize
document.addEventListener('DOMContentLoaded', () => {
initializeAdvancedApps();
});
console.log('Advanced OpenCV.js Applications module loaded');