🎯 empfohlene Sammlungen
Balanced sample collections from various categories for you to explore
TensorFlow.js-Beispiele
TensorFlow.js Beispiele für maschinelles Lernen einschließlich neuronaler Netze, Modelltraining und Tensor-Operationen
💻 TensorFlow.js Hello World javascript
🟢 simple
⭐⭐
Grundlegende TensorFlow.js-Einrichtung und einfache Tensor-Operationen
⏱️ 15 min
🏷️ tensorflow, machine learning, tensors
Prerequisites:
Basic JavaScript, Linear algebra basics
// TensorFlow.js Hello World
// Import TensorFlow.js
// In browser: <script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@latest/dist/tf.min.js"></script>
// In Node.js: const tf = require('@tensorflow/tfjs');
// 1. Basic tensor creation
console.log('=== Basic Tensors ===');
const scalar = tf.scalar(5);
console.log('Scalar:', scalar.toString());
const vector = tf.tensor1d([1, 2, 3, 4]);
console.log('Vector:', vector.toString());
const matrix = tf.tensor2d([[1, 2], [3, 4]]);
console.log('Matrix:', matrix.toString());
// 2. Tensor operations
console.log('\n=== Basic Operations ===');
const a = tf.tensor1d([1, 2, 3]);
const b = tf.tensor1d([4, 5, 6]);
const add = a.add(b);
console.log('Add:', add.toString());
const multiply = a.mul(b);
console.log('Multiply:', multiply.toString());
const squared = a.square();
console.log('Square:', squared.toString());
// 3. Matrix multiplication
console.log('\n=== Matrix Operations ===');
const mat1 = tf.tensor2d([[1, 2], [3, 4]]);
const mat2 = tf.tensor2d([[5, 6], [7, 8]]);
const matMul = mat1.matMul(mat2);
console.log('Matrix Multiplication:', matMul.toString());
// 4. Mathematical functions
console.log('\n=== Mathematical Functions ===');
const x = tf.tensor1d([-1, 0, 1, 2]);
const relu = x.relu();
console.log('ReLU:', relu.toString());
const sigmoid = tf.sigmoid(x);
console.log('Sigmoid:', sigmoid.toString());
const tanh = tf.tanh(x);
console.log('Tanh:', tanh.toString());
// 5. Reshaping tensors
console.log('\n=== Reshaping ===');
const tensor = tf.tensor1d([1, 2, 3, 4, 5, 6]);
const reshaped = tensor.reshape([2, 3]);
console.log('Original:', tensor.toString());
console.log('Reshaped (2x3):', reshaped.toString());
// 6. Reduction operations
console.log('\n=== Reductions ===');
const data = tf.tensor1d([1, 2, 3, 4, 5]);
const sum = data.sum();
console.log('Sum:', sum.toString());
const mean = data.mean();
console.log('Mean:', mean.toString());
const max = data.max();
console.log('Max:', max.toString());
// 7. Creating tensors with specific values
console.log('\n=== Special Tensors ===');
const zeros = tf.zeros([3, 3]);
console.log('Zeros (3x3):', zeros.toString());
const ones = tf.ones([2, 4]);
console.log('Ones (2x4):', ones.toString());
const identity = tf.eye(3);
console.log('Identity (3x3):', identity.toString());
const random = tf.randomNormal([2, 3]);
console.log('Random Normal (2x3):', random.toString());
// 8. Using variables (mutable tensors)
console.log('\n=== Variables ===');
const variable = tf.variable(tf.tensor1d([1, 2, 3]));
console.log('Initial variable:', variable.toString());
variable.assign(tf.tensor1d([4, 5, 6]));
console.log('Updated variable:', variable.toString());
// 9. Memory management
console.log('\n=== Memory Management ===');
// Use tf.tidy() to automatically dispose intermediate tensors
const result = tf.tidy(() => {
const x = tf.tensor1d([1, 2, 3]);
const y = tf.tensor1d([4, 5, 6]);
return x.add(y).square();
});
console.log('Complex operation result:', result.toString());
// Don't forget to dispose tensors when done
scalar.dispose();
vector.dispose();
matrix.dispose();
add.dispose();
multiply.dispose();
squared.dispose();
matMul.dispose();
relu.dispose();
sigmoid.dispose();
tanh.dispose();
reshaped.dispose();
sum.dispose();
mean.dispose();
max.dispose();
zeros.dispose();
ones.dispose();
identity.dispose();
random.dispose();
variable.dispose();
result.dispose();
console.log('\nAll tensors disposed successfully!');
// 10. Asynchronous operations
async function asyncExample() {
console.log('\n=== Async Operations ===');
// Use tf.data API for data processing
const data = tf.data.array([1, 2, 3, 4, 5]);
const processed = await data.map(x => x * 2).batch(2).toArray();
console.log('Batched data:', processed);
}
asyncExample().catch(console.error);
💻 Einfaches neuronales Netz javascript
🟡 intermediate
⭐⭐⭐
Erstellen und Trainieren eines einfachen neuronalen Netzes für Klassifikation
⏱️ 25 min
🏷️ tensorflow, neural network, classification
Prerequisites:
Basic TensorFlow.js, Neural networks concepts
// Simple Neural Network with TensorFlow.js
// 1. Create synthetic training data
function createTrainingData() {
const data = [];
const labels = [];
// Generate data for two classes
for (let i = 0; i < 100; i++) {
// Class 0: points near (0, 0)
data.push([Math.random() * 2 - 1, Math.random() * 2 - 1]);
labels.push([1, 0]);
// Class 1: points near (5, 5)
data.push([Math.random() * 2 + 4, Math.random() * 2 + 4]);
labels.push([0, 1]);
}
return {
data: tf.tensor2d(data),
labels: tf.tensor2d(labels)
};
}
// 2. Create the model
function createModel() {
const model = tf.sequential();
// Input layer
model.add(tf.layers.dense({
units: 8,
activation: 'relu',
inputShape: [2]
}));
// Hidden layer
model.add(tf.layers.dense({
units: 8,
activation: 'relu'
}));
// Output layer (2 classes)
model.add(tf.layers.dense({
units: 2,
activation: 'softmax'
}));
// Compile the model
model.compile({
optimizer: tf.train.adam(0.01),
loss: 'categoricalCrossentropy',
metrics: ['accuracy']
});
return model;
}
// 3. Train the model
async function trainModel(model, trainingData) {
console.log('Starting training...');
const history = await model.fit(trainingData.data, trainingData.labels, {
epochs: 50,
batchSize: 16,
validationSplit: 0.2,
shuffle: true,
callbacks: {
onEpochEnd: (epoch, logs) => {
console.log(`Epoch ${epoch + 1}: loss = ${logs.loss.toFixed(4)}, accuracy = ${(logs.acc * 100).toFixed(2)}%`);
}
}
});
return history;
}
// 4. Make predictions
function makePredictions(model) {
console.log('\nMaking predictions...');
// Test points
const testPoints = [
[0, 0], // Should be class 0
[5, 5], // Should be class 1
[1, 1], // Should be class 0
[4.5, 4.5] // Should be class 1
];
testPoints.forEach(point => {
const prediction = model.predict(tf.tensor2d([point]));
const probabilities = prediction.dataSync();
const predictedClass = probabilities[0] > probabilities[1] ? 0 : 1;
console.log(`Point [${point[0]}, ${point[1]}]: Class ${predictedClass} (Probabilities: [${probabilities[0].toFixed(3)}, ${probabilities[1].toFixed(3)}])`);
prediction.dispose();
});
}
// 5. Save and load model
async function saveAndLoadModel(model) {
console.log('\nSaving and loading model...');
// Save model to local storage
await model.save('localstorage://my-model');
console.log('Model saved to local storage');
// Load model from local storage
const loadedModel = await tf.loadLayersModel('localstorage://my-model');
console.log('Model loaded from local storage');
return loadedModel;
}
// Main execution
async function main() {
try {
// Create and prepare data
const trainingData = createTrainingData();
console.log('Training data created:', trainingData.data.shape, trainingData.labels.shape);
// Create model
const model = createModel();
console.log('Model architecture:');
model.summary();
// Train model
await trainModel(model, trainingData);
// Make predictions
makePredictions(model);
// Save and load model
const loadedModel = await saveAndLoadModel(model);
console.log('\nPrediction with loaded model:');
makePredictions(loadedModel);
// Clean up
trainingData.data.dispose();
trainingData.labels.dispose();
model.dispose();
loadedModel.dispose();
} catch (error) {
console.error('Error:', error);
}
}
// Run the example
main();
💻 Lineare Regression javascript
🟡 intermediate
⭐⭐⭐
Implementierung linearer Regression zur Vorhersage kontinuierlicher Werte
⏱️ 20 min
🏷️ tensorflow, regression, prediction
Prerequisites:
Basic TensorFlow.js, Regression concepts
// Linear Regression with TensorFlow.js
// 1. Generate synthetic data
function generateLinearData(numPoints = 100) {
const x = [];
const y = [];
// y = 2x + 1 + noise
for (let i = 0; i < numPoints; i++) {
const xVal = Math.random() * 10;
const noise = (Math.random() - 0.5) * 2;
const yVal = 2 * xVal + 1 + noise;
x.push(xVal);
y.push(yVal);
}
return {
x: tf.tensor2d(x, [x.length, 1]),
y: tf.tensor2d(y, [y.length, 1])
};
}
// 2. Create linear regression model
function createLinearModel() {
const model = tf.sequential();
// Single neuron for linear regression
model.add(tf.layers.dense({
units: 1,
inputShape: [1]
}));
// Compile with appropriate loss function for regression
model.compile({
optimizer: tf.train.sgd(0.01),
loss: 'meanSquaredError'
});
return model;
}
// 3. Train the model
async function trainLinearRegression(model, data) {
console.log('Training linear regression model...');
const history = await model.fit(data.x, data.y, {
epochs: 100,
batchSize: 32,
callbacks: {
onEpochEnd: (epoch, logs) => {
if (epoch % 10 === 0) {
console.log(`Epoch ${epoch}: loss = ${logs.loss.toFixed(4)}`);
}
}
}
});
return history;
}
// 4. Evaluate and visualize results
function evaluateModel(model, data) {
console.log('\nEvaluating model...');
// Get the learned parameters
const weights = model.getWeights();
const weight = weights[0].dataSync()[0];
const bias = weights[1].dataSync()[0];
console.log(`Learned equation: y = ${weight.toFixed(3)}x + ${bias.toFixed(3)}`);
console.log(`Expected equation: y = 2.000x + 1.000`);
// Make predictions on test data
const testX = tf.tensor2d([[0], [5], [10]]);
const predictions = model.predict(testX);
console.log('\nPredictions:');
testX.dataSync().forEach((x, i) => {
const pred = predictions.dataSync()[i];
console.log(`x = ${x}, predicted y = ${pred.toFixed(3)}`);
});
// Calculate R-squared
const yPred = model.predict(data.x);
const yMean = data.y.mean();
const totalSumSquares = data.y.sub(yMean).square().sum();
const residualSumSquares = data.y.sub(yPred).square().sum();
const rSquared = tf.tensor1d([1]).sub(residualSumSquares.div(totalSumSquares));
console.log(`R-squared: ${rSquared.dataSync()[0].toFixed(4)}`);
// Clean up
testX.dispose();
predictions.dispose();
yPred.dispose();
yMean.dispose();
totalSumSquares.dispose();
residualSumSquares.dispose();
rSquared.dispose();
}
// 5. Plot training progress (browser example)
function setupTrainingVisualization(model, data) {
console.log('\nSetting up visualization (browser)...');
// This would typically be used with a charting library like Chart.js
// For demonstration, we'll just log the data
const predictions = model.predict(data.x);
console.log('Sample points for visualization:');
for (let i = 0; i < Math.min(10, data.x.shape[0]); i++) {
const x = data.x.dataSync()[i];
const yActual = data.y.dataSync()[i];
const yPred = predictions.dataSync()[i];
console.log(`(${x.toFixed(2)}, ${yActual.toFixed(2)}) -> predicted: ${yPred.toFixed(2)}`);
}
predictions.dispose();
}
// 6. Advanced: Polynomial regression
function createPolynomialModel(degree = 2) {
const model = tf.sequential();
model.add(tf.layers.dense({
units: 10,
activation: 'relu',
inputShape: [1]
}));
model.add(tf.layers.dense({
units: 10,
activation: 'relu'
}));
model.add(tf.layers.dense({
units: 1
}));
model.compile({
optimizer: tf.train.adam(0.01),
loss: 'meanSquaredError'
});
return model;
}
// Main execution
async function main() {
try {
// Generate linear data
const data = generateLinearData(100);
console.log('Generated data shape:', data.x.shape, data.y.shape);
// Create and train linear model
const linearModel = createLinearModel();
console.log('\nLinear Model Architecture:');
linearModel.summary();
await trainLinearRegression(linearModel, data);
evaluateModel(linearModel, data);
setupTrainingVisualization(linearModel, data);
// Try polynomial regression for comparison
console.log('\n=== Polynomial Regression Comparison ===');
const polyModel = createPolynomialModel(2);
console.log('Polynomial Model Architecture:');
polyModel.summary();
await trainLinearRegression(polyModel, data);
console.log('\nPolynomial model evaluation:');
evaluateModel(polyModel, data);
// Clean up
data.x.dispose();
data.y.dispose();
linearModel.dispose();
polyModel.dispose();
} catch (error) {
console.error('Error:', error);
}
}
// Run the example
main();