Exemples de Vision par Ordinateur OpenCV
Exemples OpenCV.js pour le traitement d'image, la vision par ordinateur et l'analyse vidéo en temps réel en JavaScript
Key Facts
- Category
- Computer Vision
- Items
- 3
- Format Families
- image, video
Sample Overview
Exemples OpenCV.js pour le traitement d'image, la vision par ordinateur et l'analyse vidéo en temps réel en JavaScript This sample set belongs to Computer Vision and can be used to test related workflows inside Elysia Tools.
💻 Opérations de Base OpenCV.js javascript
Premiers pas avec OpenCV.js pour le traitement d'image et les tâches de base de vision par ordinateur
// OpenCV.js Basic Operations
// 1. Load OpenCV.js
// In HTML: <script async src="https://docs.opencv.org/4.5.0/opencv.js" onload="onOpenCvReady();"></script>
let cv;
function onOpenCvReady() {
cv = window.cv;
console.log('OpenCV.js is ready');
runBasicExamples();
}
// 2. Load and Display Image
function loadImageAndDisplay(imageElementId, canvasId) {
const imgElement = document.getElementById(imageElementId);
const canvasElement = document.getElementById(canvasId);
const src = cv.imread(imgElement);
cv.imshow(canvasId, src);
src.delete();
}
// 3. Basic Image Operations
function basicImageOperations() {
console.log('=== Basic Image Operations ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Create destination mat
const dst = new cv.Mat();
// Convert to grayscale
cv.cvtColor(src, dst, cv.COLOR_RGBA2GRAY, 0);
cv.imshow('grayCanvas', dst);
// Apply Gaussian blur
const blurred = new cv.Mat();
const ksize = new cv.Size(5, 5);
cv.GaussianBlur(dst, blurred, ksize, 0, 0, cv.BORDER_DEFAULT);
cv.imshow('blurCanvas', blurred);
// Edge detection with Canny
const edges = new cv.Mat();
cv.Canny(dst, edges, 50, 150, 3, false);
cv.imshow('edgesCanvas', edges);
// Clean up
src.delete();
dst.delete();
blurred.delete();
edges.delete();
}
// 4. Image Filtering and Enhancement
function imageFiltering() {
console.log('\n=== Image Filtering ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Histogram Equalization
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
const equalized = new cv.Mat();
cv.equalizeHist(gray, equalized);
cv.imshow('equalizedCanvas', equalized);
// Brightness and Contrast Adjustment
const adjusted = new cv.Mat();
src.convertTo(adjusted, -1, 1.2, 30); // alpha=1.2 (contrast), beta=30 (brightness)
cv.imshow('adjustedCanvas', adjusted);
// Sepia Tone Effect
const sepia = new cv.Mat();
const kernel = cv.matFromArray(3, 3, cv.CV_32FC1, [
0.272, 0.534, 0.131,
0.349, 0.686, 0.168,
0.393, 0.769, 0.189
]);
cv.transform(src, sepia, kernel);
cv.imshow('sepiaCanvas', sepia);
// Clean up
src.delete();
gray.delete();
equalized.delete();
adjusted.delete();
sepia.delete();
kernel.delete();
}
// 5. Geometric Transformations
function geometricTransformations() {
console.log('\n=== Geometric Transformations ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Rotation
const rotated = new cv.Mat();
const center = new cv.Point(src.cols / 2, src.rows / 2);
const rotationMatrix = cv.getRotationMatrix2D(center, 45, 1); // 45 degrees
cv.warpAffine(src, rotated, rotationMatrix, src.size(), cv.INTER_LINEAR, cv.BORDER_CONSTANT, new cv.Scalar());
cv.imshow('rotatedCanvas', rotated);
// Scaling
const scaled = new cv.Mat();
const scaleFactor = 0.5;
const newSize = new cv.Size(src.cols * scaleFactor, src.rows * scaleFactor);
cv.resize(src, scaled, newSize, 0, 0, cv.INTER_AREA);
cv.imshow('scaledCanvas', scaled);
// Translation (shifting)
const translated = new cv.Mat();
const translationMatrix = cv.matFromArray(2, 3, cv.CV_64FC1, [
1, 0, 50, // shift right by 50
0, 1, 30 // shift down by 30
]);
cv.warpAffine(src, translated, translationMatrix, src.size());
cv.imshow('translatedCanvas', translated);
// Flipping
const flipped = new cv.Mat();
cv.flip(src, flipped, 1); // 0 = vertical, 1 = horizontal, -1 = both
cv.imshow('flippedCanvas', flipped);
// Clean up
src.delete();
rotated.delete();
scaled.delete();
translated.delete();
flipped.delete();
rotationMatrix.delete();
translationMatrix.delete();
}
// 6. Morphological Operations
function morphologicalOperations() {
console.log('\n=== Morphological Operations ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Binary threshold
const binary = new cv.Mat();
cv.threshold(gray, binary, 127, 255, cv.THRESH_BINARY);
// Erosion
const eroded = new cv.Mat();
const kernel = cv.getStructuringElement(cv.MORPH_RECT, new cv.Size(3, 3));
cv.erode(binary, eroded, kernel);
cv.imshow('erodedCanvas', eroded);
// Dilation
const dilated = new cv.Mat();
cv.dilate(binary, dilated, kernel);
cv.imshow('dilatedCanvas', dilated);
// Opening (Erosion followed by Dilation)
const opened = new cv.Mat();
cv.morphologyEx(binary, opened, cv.MORPH_OPEN, kernel);
cv.imshow('openedCanvas', opened);
// Closing (Dilation followed by Erosion)
const closed = new cv.Mat();
cv.morphologyEx(binary, closed, cv.MORPH_CLOSE, kernel);
cv.imshow('closedCanvas', closed);
// Clean up
src.delete();
gray.delete();
binary.delete();
eroded.delete();
dilated.delete();
opened.delete();
closed.delete();
kernel.delete();
}
// 7. Face Detection
function faceDetection() {
console.log('\n=== Face Detection ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
const gray = new cv.Mat();
// Convert to grayscale
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Load face cascade classifier
const faceCascade = new cv.CascadeClassifier();
faceCascade.load('haarcascade_frontalface_default.xml');
// Detect faces
const faces = new cv.RectVector();
const msize = new cv.Size(0, 0);
faceCascade.detectMultiScale(gray, faces, 1.1, 3, 0);
// Draw rectangles around faces
for (let i = 0; i < faces.size(); ++i) {
const face = faces.get(i);
const point1 = new cv.Point(face.x, face.y);
const point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(src, point1, point2, [255, 0, 0, 255], 3);
}
cv.imshow('faceDetectionCanvas', src);
console.log(`Detected ${faces.size()} faces`);
// Clean up
src.delete();
gray.delete();
faces.delete();
faceCascade.delete();
}
// 8. Object Detection with Template Matching
function templateMatching() {
console.log('\n=== Template Matching ===');
const mainImageElement = document.getElementById('mainImage');
const templateElement = document.getElementById('templateImage');
const mainImage = cv.imread(mainImageElement);
const template = cv.imread(templateElement);
const grayMain = new cv.Mat();
const grayTemplate = new cv.Mat();
// Convert to grayscale
cv.cvtColor(mainImage, grayMain, cv.COLOR_RGBA2GRAY, 0);
cv.cvtColor(template, grayTemplate, cv.COLOR_RGBA2GRAY, 0);
// Template matching
const result = new cv.Mat();
cv.matchTemplate(grayMain, grayTemplate, result, cv.TM_CCOEFF_NORMED);
// Find best match
const minMax = cv.minMaxLoc(result);
const maxLoc = minMax.maxLoc;
const color = new cv.Scalar(255, 0, 0, 255);
// Draw rectangle around match
const point1 = maxLoc;
const point2 = new cv.Point(maxLoc.x + template.cols, maxLoc.y + template.rows);
cv.rectangle(mainImage, point1, point2, color, 2, cv.LINE_8, 0);
cv.imshow('templateMatchCanvas', mainImage);
console.log(`Template match found at (${maxLoc.x}, ${maxLoc.y}) with confidence ${minMax.maxVal.toFixed(4)}`);
// Clean up
mainImage.delete();
template.delete();
grayMain.delete();
grayTemplate.delete();
result.delete();
}
// 9. Color Detection and Segmentation
function colorDetection() {
console.log('\n=== Color Detection ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
const hsv = new cv.Mat();
// Convert to HSV color space
cv.cvtColor(src, hsv, cv.COLOR_RGBA2RGB);
cv.cvtColor(hsv, hsv, cv.COLOR_RGB2HSV);
// Define range for blue color (adjust as needed)
const lower = new cv.Mat(hsv.rows, hsv.cols, hsv.type(), [100, 50, 50, 0]);
const upper = new cv.Mat(hsv.rows, hsv.cols, hsv.type(), [130, 255, 255, 255]);
// Create mask
const mask = new cv.Mat();
cv.inRange(hsv, lower, upper, mask);
// Apply mask to original image
const result = new cv.Mat();
src.copyTo(result, mask);
cv.imshow('colorDetectionCanvas', result);
cv.imshow('colorMaskCanvas', mask);
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(mask, contours, hierarchy, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
// Draw contours
const contourImage = src.clone();
for (let i = 0; i < contours.size(); ++i) {
const color = new cv.Scalar(Math.random() * 255, Math.random() * 255, Math.random() * 255, 255);
cv.drawContours(contourImage, contours, i, color, 2, cv.LINE_8, hierarchy, 100);
}
cv.imshow('contoursCanvas', contourImage);
console.log(`Found ${contours.size()} blue objects`);
// Clean up
src.delete();
hsv.delete();
lower.delete();
upper.delete();
mask.delete();
result.delete();
contours.delete();
hierarchy.delete();
contourImage.delete();
}
// 10. Image Histogram Analysis
function histogramAnalysis() {
console.log('\n=== Histogram Analysis ===');
const imgElement = document.getElementById('inputImage');
const src = cv.imread(imgElement);
// Calculate histogram for each channel
const histSize = [256];
const ranges = [0, 255];
const channels = [0, 1, 2];
const hist = new cv.MatVector();
const mask = new cv.Mat(); // Empty mask
cv.calcHist(src, channels, mask, hist, histSize, ranges);
// Create histogram visualization
const histW = 512;
const histH = 400;
const histImage = new cv.Mat.zeros(histH, histW, cv.CV_8UC3);
// Normalize histogram
cv.normalize(hist, hist, 0, histImage.rows, cv.NORM_MINMAX);
// Draw histogram lines
const binW = Math.round(histW / histSize[0]);
for (let i = 1; i < histSize[0]; i++) {
const h = Math.round(hist.get(0, i, 0));
const h1 = Math.round(hist.get(0, i - 1, 0));
cv.line(histImage, new cv.Point((i - 1) * binW, histH - h1),
new cv.Point(i * binW, histH - h),
new cv.Scalar(255, 0, 0, 255), 2, cv.LINE_8, 0);
}
cv.imshow('histogramCanvas', histImage);
// Calculate image statistics
const mean = cv.mean(src);
const stddev = new cv.Mat();
const mean2 = new cv.Mat();
cv.meanStdDev(src, mean2, stddev);
console.log('Image Statistics:');
console.log(`Mean (R,G,B,A): (${mean[0].toFixed(2)}, ${mean[1].toFixed(2)}, ${mean[2].toFixed(2)}, ${mean[3].toFixed(2)})`);
console.log(`Std Dev: (${stddev.floatAt(0, 0).toFixed(2)}, ${stddev.floatAt(0, 1).toFixed(2)}, ${stddev.floatAt(0, 2).toFixed(2)})`);
// Clean up
src.delete();
hist.delete();
mask.delete();
histImage.delete();
stddev.delete();
mean2.delete();
}
// 11. Utility Functions
class OpenCVUtils {
static loadImageFromFile(file, callback) {
const reader = new FileReader();
reader.onload = function(e) {
const imgElement = document.createElement('img');
imgElement.onload = function() {
callback(imgElement);
};
imgElement.src = e.target.result;
};
reader.readAsDataURL(file);
}
static downloadCanvas(canvasId, filename) {
const canvas = document.getElementById(canvasId);
const link = document.createElement('a');
link.download = filename;
link.href = canvas.toDataURL();
link.click();
}
static getCanvasImageData(canvasId) {
const canvas = document.getElementById(canvasId);
const ctx = canvas.getContext('2d');
return ctx.getImageData(0, 0, canvas.width, canvas.height);
}
static performanceTimer(start) {
const end = performance.now();
return (end - start).toFixed(2) + 'ms';
}
static logMemoryUsage() {
if (performance.memory) {
console.log('Memory Usage:', {
used: (performance.memory.usedJSHeapSize / 1048576).toFixed(2) + ' MB',
total: (performance.memory.totalJSHeapSize / 1048576).toFixed(2) + ' MB',
limit: (performance.memory.jsHeapSizeLimit / 1048576).toFixed(2) + ' MB'
});
}
}
}
// Main function to run all examples
function runBasicExamples() {
console.log('Starting OpenCV.js Basic Examples...');
// Wait for images to load
setTimeout(() => {
const startTime = performance.now();
try {
basicImageOperations();
imageFiltering();
geometricTransformations();
morphologicalOperations();
colorDetection();
histogramAnalysis();
console.log('\nAll basic examples completed successfully!');
console.log('Total execution time:', OpenCVUtils.performanceTimer(startTime));
OpenCVUtils.logMemoryUsage();
} catch (error) {
console.error('Error running examples:', error);
}
}, 1000);
}
// Export functions for external use
if (typeof module !== 'undefined' && module.exports) {
module.exports = {
loadImageAndDisplay,
basicImageOperations,
imageFiltering,
geometricTransformations,
morphologicalOperations,
faceDetection,
templateMatching,
colorDetection,
histogramAnalysis,
OpenCVUtils
};
}
console.log('OpenCV.js Basic Samples module loaded');
console.log('Make sure to include OpenCV.js in your HTML: <script async src="https://docs.opencv.org/4.5.0/opencv.js"></script>');
💻 Traitement Vidéo avec OpenCV.js javascript
Traitement vidéo en temps réel, suivi d'objets et détection de mouvement utilisant WebRTC et OpenCV.js
// Video Processing with OpenCV.js
let cv;
let video;
let canvas;
let canvasContext;
let streaming = false;
let processingEnabled = true;
// 1. Initialize Video Capture
class VideoProcessor {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.ctx = this.canvas.getContext('2d');
this.cap = new cv.VideoCapture(this.video);
this.processingEnabled = true;
this.currentFilter = 'normal';
}
// Start video stream
async startVideo() {
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: { width: 640, height: 480 },
audio: false
});
this.video.srcObject = stream;
this.video.play();
this.video.addEventListener('loadeddata', () => {
this.canvas.width = this.video.videoWidth;
this.canvas.height = this.video.videoHeight;
streaming = true;
this.processVideo();
});
console.log('Video stream started successfully');
} catch (error) {
console.error('Error accessing camera:', error);
}
}
// Stop video stream
stopVideo() {
if (this.video.srcObject) {
this.video.srcObject.getTracks().forEach(track => track.stop());
this.video.srcObject = null;
streaming = false;
}
}
// Main processing loop
processVideo() {
if (!streaming || !this.processingEnabled) {
requestAnimationFrame(() => this.processVideo());
return;
}
// Capture frame from video
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
this.cap.read(src);
// Apply selected filter
let dst = src.clone();
switch (this.currentFilter) {
case 'grayscale':
dst = this.applyGrayscale(src);
break;
case 'edge':
dst = this.applyEdgeDetection(src);
break;
case 'blur':
dst = this.applyBlur(src);
break;
case 'cartoon':
dst = this.applyCartoonEffect(src);
break;
case 'motion':
dst = this.detectMotion(src);
break;
default:
dst = src;
}
// Display result
cv.imshow(this.canvas, dst);
// Clean up
if (dst !== src) dst.delete();
src.delete();
// Continue processing
requestAnimationFrame(() => this.processVideo());
}
// Apply grayscale filter
applyGrayscale(src) {
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
const dst = new cv.Mat();
cv.cvtColor(gray, dst, cv.COLOR_GRAY2RGBA);
gray.delete();
return dst;
}
// Apply edge detection
applyEdgeDetection(src) {
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
const edges = new cv.Mat();
cv.Canny(gray, edges, 50, 150);
const dst = new cv.Mat();
cv.cvtColor(edges, dst, cv.COLOR_GRAY2RGBA);
gray.delete();
edges.delete();
return dst;
}
// Apply blur filter
applyBlur(src) {
const dst = new cv.Mat();
const ksize = new cv.Size(15, 15);
cv.GaussianBlur(src, dst, ksize, 0, 0, cv.BORDER_DEFAULT);
return dst;
}
// Apply cartoon effect
applyCartoonEffect(src) {
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
// Apply bilateral filter multiple times
let filtered = new cv.Mat();
cv.bilateralFilter(src, filtered, 15, 80, 80, cv.BORDER_DEFAULT);
for (let i = 0; i < 2; i++) {
const temp = filtered.clone();
cv.bilateralFilter(temp, filtered, 15, 80, 80, cv.BORDER_DEFAULT);
temp.delete();
}
// Create edges
const edges = new cv.Mat();
const gray2 = new cv.Mat();
cv.cvtColor(filtered, gray2, cv.COLOR_RGBA2GRAY);
cv.medianBlur(gray2, edges, 5);
const adaptive = new cv.Mat();
cv.adaptiveThreshold(gray2, adaptive, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY, 9, 2);
// Combine edges and filtered image
const dst = new cv.Mat();
cv.bitwise_and(filtered, filtered, dst, adaptive);
// Clean up
gray.delete();
filtered.delete();
edges.delete();
gray2.delete();
adaptive.delete();
return dst;
}
// Motion detection
detectMotion(src) {
if (!this.previousFrame) {
this.previousFrame = new cv.Mat();
cv.cvtColor(src, this.previousFrame, cv.COLOR_RGBA2GRAY);
return src;
}
const gray = new cv.Mat();
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY);
// Calculate difference
const diff = new cv.Mat();
cv.absdiff(gray, this.previousFrame, diff);
// Threshold
const thresh = new cv.Mat();
cv.threshold(diff, thresh, 25, 255, cv.THRESH_BINARY);
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(thresh, contours, hierarchy, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
// Draw rectangles around moving objects
const dst = src.clone();
for (let i = 0; i < contours.size(); ++i) {
const contour = contours.get(i);
const area = cv.contourArea(contour);
if (area > 500) { // Filter small movements
const rect = cv.boundingRect(contour);
const point1 = new cv.Point(rect.x, rect.y);
const point2 = new cv.Point(rect.x + rect.width, rect.y + rect.height);
cv.rectangle(dst, point1, point2, [0, 255, 0, 255], 2);
}
}
// Update previous frame
this.previousFrame.delete();
this.previousFrame = gray.clone();
// Clean up
gray.delete();
diff.delete();
thresh.delete();
contours.delete();
hierarchy.delete();
return dst;
}
// Set filter
setFilter(filterName) {
this.currentFilter = filterName;
console.log('Filter changed to:', filterName);
}
// Toggle processing
toggleProcessing() {
this.processingEnabled = !this.processingEnabled;
console.log('Processing enabled:', this.processingEnabled);
}
// Take snapshot
takeSnapshot() {
const dataURL = this.canvas.toDataURL('image/png');
const link = document.createElement('a');
link.download = `snapshot_${Date.now()}.png`;
link.href = dataURL;
link.click();
console.log('Snapshot saved');
}
}
// 2. Face Detection in Video
class FaceDetectionVideo {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(this.video);
this.faceCascade = null;
this.eyeCascade = null;
this.detectionEnabled = true;
}
async initialize() {
try {
// Load cascade classifiers
this.faceCascade = new cv.CascadeClassifier();
this.faceCascade.load('haarcascade_frontalface_default.xml');
this.eyeCascade = new cv.CascadeClassifier();
this.eyeCascade.load('haarcascade_eye.xml');
console.log('Face detection models loaded');
return true;
} catch (error) {
console.error('Error loading face detection models:', error);
return false;
}
}
startDetection() {
const processFrame = () => {
if (!this.detectionEnabled) {
requestAnimationFrame(processFrame);
return;
}
const src = new cv.Mat(this.video.height, this.video.width, cv.CV_8UC4);
const gray = new cv.Mat();
this.cap.read(src);
// Convert to grayscale
cv.cvtColor(src, gray, cv.COLOR_RGBA2GRAY, 0);
// Detect faces
const faces = new cv.RectVector();
this.faceCascade.detectMultiScale(gray, faces, 1.1, 3, 0);
// Draw rectangles around faces and detect eyes
for (let i = 0; i < faces.size(); ++i) {
const face = faces.get(i);
const point1 = new cv.Point(face.x, face.y);
const point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(src, point1, point2, [255, 0, 0, 255], 3);
// Detect eyes within face region
const faceROI = gray.roi(face);
const eyes = new cv.RectVector();
this.eyeCascade.detectMultiScale(faceROI, eyes, 1.1, 10, 0);
// Draw eye rectangles
for (let j = 0; j < eyes.size(); ++j) {
const eye = eyes.get(j);
const eyePoint1 = new cv.Point(
face.x + eye.x,
face.y + eye.y
);
const eyePoint2 = new cv.Point(
face.x + eye.x + eye.width,
face.y + eye.y + eye.height
);
cv.rectangle(src, eyePoint1, eyePoint2, [0, 255, 255, 255], 2);
}
faceROI.delete();
eyes.delete();
}
cv.imshow(this.canvas, src);
// Display face count
document.getElementById('faceCount').textContent = faces.size();
src.delete();
gray.delete();
faces.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
toggleDetection() {
this.detectionEnabled = !this.detectionEnabled;
console.log('Face detection enabled:', this.detectionEnabled);
}
}
// 3. Object Tracking
class ObjectTracker {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(this.video);
this.tracker = null;
this.tracking = false;
this.roi = null;
}
initializeTracker(trackerType = 'KCF') {
const trackerTypes = ['BOOSTING', 'MIL', 'KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT'];
const trackerTypeIndex = trackerTypes.indexOf(trackerType);
if (trackerTypeIndex === -1) {
console.error('Invalid tracker type');
return false;
}
try {
this.tracker = new cv.TrackerCSRT_create(); // CSRT is more reliable
console.log(`${trackerType} tracker initialized`);
return true;
} catch (error) {
console.error('Error initializing tracker:', error);
return false;
}
}
selectROI(event) {
if (!this.tracker) {
alert('Please initialize tracker first');
return;
}
const rect = cv.selectROI('ObjectTracking', this.video, false, false);
if (rect.width > 0 && rect.height > 0) {
this.roi = rect;
this.startTracking();
}
}
startTracking() {
if (!this.tracker || !this.roi) return;
// Initialize tracker with ROI
this.tracker.init(this.roi, this.cap.read());
this.tracking = true;
this.track();
}
track() {
if (!this.tracking) return;
const processFrame = () => {
if (!this.tracking) return;
const frame = this.cap.read();
if (frame.empty) {
console.log('Video ended');
return;
}
// Update tracker
const ok = this.tracker.update(frame);
if (ok) {
// Draw tracked object
const trackedRect = this.tracker.getRegion();
cv.rectangle(frame,
new cv.Point(trackedRect.x, trackedRect.y),
new cv.Point(trackedRect.x + trackedRect.width, trackedRect.y + trackedRect.height),
[0, 255, 0, 255], 3);
} else {
cv.putText(frame, 'Tracking failure detected', new cv.Point(100, 80),
cv.FONT_HERSHEY_SIMPLEX, 0.75, [0, 0, 255, 255], 2);
}
cv.imshow(this.canvas, frame);
frame.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
stopTracking() {
this.tracking = false;
console.log('Tracking stopped');
}
}
// 4. Motion Detection and Alert System
class MotionDetector {
constructor(videoId, canvasId) {
this.video = document.getElementById(videoId);
this.canvas = document.getElementById(canvasId);
this.cap = new cv.VideoCapture(this.video);
this.previousFrame = null;
this.motionThreshold = 25;
this.minContourArea = 500;
this.alertCallback = null;
this.detectionEnabled = true;
this.motionHistory = [];
this.maxHistoryLength = 10;
}
setAlertCallback(callback) {
this.alertCallback = callback;
}
startDetection() {
const processFrame = () => {
if (!this.detectionEnabled) {
requestAnimationFrame(processFrame);
return;
}
const frame = this.cap.read();
if (frame.empty) {
requestAnimationFrame(processFrame);
return;
}
const gray = new cv.Mat();
cv.cvtColor(frame, gray, cv.COLOR_RGBA2GRAY, 0);
cv.GaussianBlur(gray, gray, new cv.Size(21, 21), 0);
let motionDetected = false;
const output = frame.clone();
if (this.previousFrame) {
// Calculate difference
const diff = new cv.Mat();
cv.absdiff(gray, this.previousFrame, diff);
// Threshold
const thresh = new cv.Mat();
cv.threshold(diff, thresh, this.motionThreshold, 255, cv.THRESH_BINARY);
// Dilate to fill holes
const dilated = new cv.Mat();
cv.dilate(thresh, dilated, cv.getStructuringElement(cv.MORPH_RECT, new cv.Size(7, 7)));
// Find contours
const contours = new cv.MatVector();
const hierarchy = new cv.Mat();
cv.findContours(dilated, contours, hierarchy, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
let totalMotionArea = 0;
for (let i = 0; i < contours.size(); ++i) {
const contour = contours.get(i);
const area = cv.contourArea(contour);
if (area > this.minContourArea) {
motionDetected = true;
totalMotionArea += area;
// Draw bounding rectangle
const rect = cv.boundingRect(contour);
cv.rectangle(output,
new cv.Point(rect.x, rect.y),
new cv.Point(rect.x + rect.width, rect.y + rect.height),
[0, 255, 0, 255], 2);
}
}
// Update motion history
this.motionHistory.push({
detected: motionDetected,
area: totalMotionArea,
timestamp: Date.now()
});
if (this.motionHistory.length > this.maxHistoryLength) {
this.motionHistory.shift();
}
// Trigger alert if significant motion
if (motionDetected && totalMotionArea > 5000) {
if (this.alertCallback) {
this.alertCallback({
detected: true,
area: totalMotionArea,
timestamp: Date.now()
});
}
// Draw alert text
cv.putText(output, 'MOTION DETECTED', new cv.Point(10, 30),
cv.FONT_HERSHEY_SIMPLEX, 1, [0, 0, 255, 255], 2);
}
// Display motion indicator
const motionLevel = Math.min(255, totalMotionArea / 100);
cv.rectangle(output, new cv.Point(10, output.rows - 40),
new cv.Point(10 + motionLevel, output.rows - 20),
[0, motionLevel, 255 - motionLevel, 255], -1);
// Clean up
diff.delete();
thresh.delete();
dilated.delete();
contours.delete();
hierarchy.delete();
}
// Update previous frame
if (this.previousFrame) {
this.previousFrame.delete();
}
this.previousFrame = gray.clone();
cv.imshow(this.canvas, output);
frame.delete();
output.delete();
requestAnimationFrame(processFrame);
};
processFrame();
}
getMotionStatistics() {
if (this.motionHistory.length === 0) {
return { averageMotion: 0, motionFrequency: 0 };
}
const recentMotion = this.motionHistory.slice(-5);
const motionCount = recentMotion.filter(m => m.detected).length;
const averageArea = recentMotion.reduce((sum, m) => sum + m.area, 0) / recentMotion.length;
return {
motionFrequency: (motionCount / recentMotion.length) * 100,
averageMotionArea: averageArea
};
}
}
// 5. Video Recorder
class VideoRecorder {
constructor(canvasId) {
this.canvas = document.getElementById(canvasId);
this.mediaRecorder = null;
this.recordedChunks = [];
this.recording = false;
}
startRecording() {
if (this.recording) return;
const stream = this.canvas.captureStream(30); // 30 FPS
this.mediaRecorder = new MediaRecorder(stream, {
mimeType: 'video/webm'
});
this.recordedChunks = [];
this.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
this.recordedChunks.push(event.data);
}
};
this.mediaRecorder.onstop = () => {
const blob = new Blob(this.recordedChunks, {
type: 'video/webm'
});
const url = URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = `recording_${Date.now()}.webm`;
link.click();
URL.revokeObjectURL(url);
};
this.mediaRecorder.start();
this.recording = true;
console.log('Recording started');
}
stopRecording() {
if (!this.recording || !this.mediaRecorder) return;
this.mediaRecorder.stop();
this.recording = false;
console.log('Recording stopped');
}
isRecording() {
return this.recording;
}
}
// 6. Initialize all components
function initializeVideoProcessing() {
console.log('Initializing Video Processing...');
// Wait for OpenCV to load
if (typeof cv === 'undefined') {
setTimeout(initializeVideoProcessing, 100);
return;
}
// Initialize video processor
const videoProcessor = new VideoProcessor('videoInput', 'canvasOutput');
videoProcessor.startVideo();
// Set up filter buttons
document.querySelectorAll('.filter-btn').forEach(btn => {
btn.addEventListener('click', () => {
videoProcessor.setFilter(btn.dataset.filter);
});
});
// Set up snapshot button
document.getElementById('snapshotBtn').addEventListener('click', () => {
videoProcessor.takeSnapshot();
});
// Initialize face detection
const faceDetector = new FaceDetectionVideo('videoInput', 'faceCanvas');
faceDetector.initialize().then(success => {
if (success) {
faceDetector.startDetection();
}
});
// Initialize motion detector
const motionDetector = new MotionDetector('videoInput', 'motionCanvas');
motionDetector.setAlertCallback((alert) => {
console.log('Motion Alert:', alert);
document.getElementById('motionAlert').textContent =
`Motion detected! Area: ${alert.area.toFixed(0)} pixels`;
});
motionDetector.startDetection();
// Initialize video recorder
const videoRecorder = new VideoRecorder('canvasOutput');
document.getElementById('recordBtn').addEventListener('click', () => {
if (videoRecorder.isRecording()) {
videoRecorder.stopRecording();
document.getElementById('recordBtn').textContent = 'Start Recording';
} else {
videoRecorder.startRecording();
document.getElementById('recordBtn').textContent = 'Stop Recording';
}
});
console.log('Video processing initialized successfully');
}
// Export classes for external use
if (typeof module !== 'undefined' && module.exports) {
module.exports = {
VideoProcessor,
FaceDetectionVideo,
ObjectTracker,
MotionDetector,
VideoRecorder
};
}
// Auto-initialize when page loads
document.addEventListener('DOMContentLoaded', () => {
initializeVideoProcessing();
});
console.log('OpenCV.js Video Processing module loaded');
💻 Applications Avancées OpenCV javascript
Applications avancées de vision par ordinateur incluant les effets AR, le scan de codes-barres et le stitching d'images