🎯 Exemples recommandés
Balanced sample collections from various categories for you to explore
Exemples Google Cloud Platform
Exemples de services Google Cloud Platform incluant Cloud Functions, Cloud Storage, Firestore et Cloud Run
💻 Cloud Functions HTTP Trigger GCP python
🟢 simple
⭐
Cloud Function déclenchée par HTTP avec authentification et support CORS
⏱️ 10 min
🏷️ gcp, cloud functions, http, python, serverless
Prerequisites:
Google Cloud account, Cloud Functions enabled, Python 3.8+
# Google Cloud Functions HTTP Trigger Example
# Python - main.py + requirements.txt
# main.py
import functions_framework
import json
import os
from flask import Flask, request, jsonify
from google.cloud import pubsub_v1
from datetime import datetime
# Initialize Pub/Sub publisher
publisher = pubsub_v1.PublisherClient()
@functions_framework.http
def hello_http(request):
"""HTTP Cloud Function for handling API requests."""
# Handle CORS preflight requests
if request.method == 'OPTIONS':
# Allows GET, POST, PUT, DELETE requests from any origin with any headers
headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
'Access-Control-Max-Age': '3600'
}
return ('', 204, headers)
# Set CORS headers for main requests
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json'
}
try:
# Get request data
request_json = request.get_json(silent=True)
request_args = request.args
# Extract name from request
if request_json and 'name' in request_json:
name = request_json['name']
elif request_args and 'name' in request_args:
name = request_args['name']
else:
name = 'World'
# Log the request
print(f"Request received from: {request.remote_addr}")
print(f"Request method: {request.method}")
print(f"Request data: {request_json}")
# Create response message
response_data = {
'message': f'Hello, {name}! This is a Google Cloud Function.',
'timestamp': datetime.now().isoformat(),
'function_name': os.environ.get('FUNCTION_NAME', 'hello_http'),
'version': os.environ.get('FUNCTION_VERSION', 'v1'),
'request_id': request.headers.get('X-Cloud-Trace-Context', 'unknown')
}
# Publish message to Pub/Sub (optional)
if request_json and 'publish' in request_json and request_json['publish']:
topic_path = publisher.topic_path(
os.environ.get('GCP_PROJECT', 'your-project'),
'function-events'
)
message_data = json.dumps({
'event': 'http_request',
'name': name,
'timestamp': datetime.now().isoformat()
}).encode('utf-8')
future = publisher.publish(topic_path, message_data)
response_data['message_id'] = future.result()
return (jsonify(response_data), 200, headers)
except Exception as e:
error_response = {
'error': 'Internal server error',
'message': str(e),
'timestamp': datetime.now().isoformat()
}
return (jsonify(error_response), 500, headers)
# requirements.txt
"""
functions-framework==3.*
flask==2.*
google-cloud-pubsub==2.*
google-cloud-storage==2.*
google-cloud-firestore==2.*
cryptography==41.*
"""
💻 Opérations CRUD Firestore nodejs
🟡 intermediate
⭐⭐⭐
Opérations CRUD complètes avec base de données Google Cloud Firestore
⏱️ 25 min
🏷️ gcp, firestore, database, crud, nodejs
Prerequisites:
Google Cloud project, Firestore database enabled, Node.js knowledge
// Google Cloud Firestore CRUD Operations
// Node.js - package.json + functions/package.json + index.js
// package.json (project root)
{
"name": "firestore-crud-functions",
"version": "1.0.0",
"description": "Firestore CRUD operations with Cloud Functions",
"scripts": {
"start": "node index.js",
"deploy": "gcloud functions deploy"
}
}
// functions/package.json
{
"name": "functions",
"version": "1.0.0",
"dependencies": {
"@google-cloud/firestore": "^6.0.0",
"@google-cloud/functions": "^3.0.0",
"express": "^4.18.0"
}
}
// functions/index.js
const functions = require('@google-cloud/functions-framework');
const admin = require('firebase-admin');
const express = require('express');
// Initialize Firebase Admin SDK
admin.initializeApp({
credential: admin.credential.applicationDefault(),
projectId: process.env.GCLOUD_PROJECT || 'your-project-id'
});
const db = admin.firestore();
const app = express();
app.use(express.json());
// Middleware to set CORS headers
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization');
if (req.method === 'OPTIONS') {
res.sendStatus(204);
} else {
next();
}
});
// CREATE - Add new document
functions.http('createUser', async (req, res) => {
try {
const { email, name, role = 'user', ...additionalData } = req.body;
if (!email || !name) {
return res.status(400).json({
error: 'Email and name are required',
message: 'Please provide both email and name fields'
});
}
// Check if user already exists
const existingUser = await db.collection('users').where('email', '==', email).get();
if (!existingUser.empty) {
return res.status(409).json({
error: 'User already exists',
message: 'A user with this email already exists'
});
}
// Create new user document
const userDoc = {
email,
name,
role,
createdAt: admin.firestore.FieldValue.serverTimestamp(),
updatedAt: admin.firestore.FieldValue.serverTimestamp(),
status: 'active',
...additionalData
};
const userRef = await db.collection('users').add(userDoc);
// Return created user with ID
const createdUser = await userRef.get();
res.status(201).json({
success: true,
message: 'User created successfully',
data: {
id: createdUser.id,
...createdUser.data()
}
});
} catch (error) {
console.error('Error creating user:', error);
res.status(500).json({
error: 'Failed to create user',
message: error.message
});
}
});
// READ - Get single document
functions.http('getUser', async (req, res) => {
try {
const { userId } = req.params;
if (!userId) {
return res.status(400).json({
error: 'User ID is required',
message: 'Please provide userId parameter'
});
}
const userDoc = await db.collection('users').doc(userId).get();
if (!userDoc.exists) {
return res.status(404).json({
error: 'User not found',
message: 'No user found with the provided ID'
});
}
res.status(200).json({
success: true,
data: {
id: userDoc.id,
...userDoc.data()
}
});
} catch (error) {
console.error('Error getting user:', error);
res.status(500).json({
error: 'Failed to get user',
message: error.message
});
}
});
💻 Intégration Cloud Storage python
🟡 intermediate
⭐⭐⭐
Télécharger, téléverser et gérer des fichiers dans Google Cloud Storage
⏱️ 30 min
🏷️ gcp, cloud storage, file management, upload, download
Prerequisites:
Google Cloud Storage bucket, Python 3.8+, File upload knowledge
# Google Cloud Storage Integration
# Python - main.py + requirements.txt
import os
import functions_framework
import tempfile
import uuid
from datetime import datetime, timedelta
from google.cloud import storage
from google.cloud.exceptions import GoogleCloudError
import magic
from werkzeug.utils import secure_filename
# Initialize Cloud Storage client
storage_client = storage.Client()
# Allowed file types and max size
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'csv', 'docx', 'xlsx'}
MAX_FILE_SIZE = 10 * 1024 * 1024 # 10MB
def allowed_file(filename):
"""Check if file extension is allowed."""
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
def get_file_type(file_path):
"""Detect file type using python-magic."""
try:
return magic.from_file(file_path, mime=True)
except:
return 'application/octet-stream'
@functions_framework.http
def upload_file(request):
"""Upload file to Google Cloud Storage."""
if request.method != 'POST':
return {'error': 'Only POST method allowed'}, 405
try:
# Get file from request
if 'file' not in request.files:
return {'error': 'No file provided'}, 400
file = request.files['file']
if file.filename == '':
return {'error': 'No file selected'}, 400
# Validate file
if not allowed_file(file.filename):
return {'error': 'File type not allowed'}, 400
# Read file content
file_content = file.read()
# Check file size
if len(file_content) > MAX_FILE_SIZE:
return {'error': 'File too large (max 10MB)'}, 413
# Generate unique filename
filename = secure_filename(file.filename)
unique_filename = f"{uuid.uuid4()}_{filename}"
# Get bucket name from environment or use default
bucket_name = os.environ.get('STORAGE_BUCKET', 'your-bucket-name')
bucket = storage_client.bucket(bucket_name)
# Create temporary file to detect MIME type
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_file.write(file_content)
temp_file.flush()
# Detect file type
content_type = get_file_type(temp_file.name)
# Get file metadata
file_size = len(file_content)
# Clean up temp file
os.unlink(temp_file.name)
# Upload to Cloud Storage
blob = bucket.blob(unique_filename)
blob.upload_from_string(
file_content,
content_type=content_type
)
# Set metadata and make public if needed
blob.metadata = {
'original_filename': filename,
'upload_time': datetime.now().isoformat(),
'uploaded_by': 'cloud_function',
'file_size': str(file_size)
}
# Generate signed URL (valid for 1 hour)
signed_url = blob.generate_signed_url(
version="v4",
expiration=datetime.now() + timedelta(hours=1),
method="GET"
)
return {
'success': True,
'message': 'File uploaded successfully',
'data': {
'file_id': unique_filename,
'original_filename': filename,
'file_size': file_size,
'content_type': content_type,
'signed_url': signed_url,
'public_url': blob.public_url if hasattr(blob, 'public_url') else None,
'gs_uri': f"gs://{bucket_name}/{unique_filename}"
}
}, 200
except GoogleCloudError as e:
print(f"Google Cloud error: {e}")
return {
'error': 'Failed to upload file to Cloud Storage',
'message': str(e)
}, 500
except Exception as e:
print(f"Unexpected error: {e}")
return {
'error': 'Internal server error',
'message': str(e)
}, 500
# requirements.txt
"""
functions-framework==3.*
google-cloud-storage==2.*
python-magic==0.4.*
werkzeug==2.*
"""
💻 Service Cloud Run Container dockerfile
🔴 complex
⭐⭐⭐⭐
Déployer des applications conteneurisées sur Cloud Run avec auto-scaling
⏱️ 40 min
🏷️ gcp, cloud run, containers, docker, python, auto-scaling
Prerequisites:
Google Cloud account, Docker knowledge, Container orchestration basics, Cloud Run enabled
# Google Cloud Run Container Example
# Multi-language container with API service
# Dockerfile
FROM python:3.11-slim
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ENV PORT=8080
# Set work directory
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
&& rm -rf /var/lib/apt/lists/*
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Create non-root user
RUN adduser --disabled-password --gecos '' appuser
RUN chown -R appuser:appuser /app
USER appuser
# Expose port
EXPOSE 8080
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8080/health || exit 1
# Run the application
CMD ["gunicorn", "--bind", "0.0.0.0:8080", "--workers", "1", "--timeout", "0", "--preload", "app:app"]
# requirements.txt
"""
Flask==2.3.*
gunicorn==21.*
google-cloud-logging==3.*
google-cloud-monitoring==2.*
google-cloud-trace==1.*
google-cloud-pubsub==2.*
redis==4.*
sqlalchemy==2.*
cryptography==41.*
python-jose==3.*
passlib==1.*
pydantic==2.*
"""
# app.py
from flask import Flask, request, jsonify
import logging
import os
import uuid
import time
from datetime import datetime, timedelta
import google.cloud.logging
import redis
from prometheus_client import Counter, Histogram, generate_latest, CONTENT_TYPE_LATEST
# Initialize Flask app
app = Flask(__name__)
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Cloud Logging client
try:
cloud_logging_client = google.cloud.logging.Client()
cloud_logging_client.setup_logging()
logger.info("Cloud Logging initialized")
except Exception as e:
logger.warning(f"Cloud Logging not available: {e}")
# Prometheus metrics
REQUEST_COUNT = Counter('http_requests_total', 'Total HTTP requests', ['method', 'endpoint', 'status'])
REQUEST_DURATION = Histogram('http_request_duration_seconds', 'HTTP request duration')
CLOUD_RUN_REVISION = os.environ.get('K_REVISION', 'unknown')
# Redis connection for caching
try:
redis_client = redis.Redis(
host=os.environ.get('REDIS_HOST', 'localhost'),
port=int(os.environ.get('REDIS_PORT', 6379)),
decode_responses=True
)
redis_client.ping()
logger.info("Redis connection established")
except Exception as e:
logger.warning(f"Redis not available: {e}")
redis_client = None
@app.route('/health', methods=['GET'])
def health_check():
"""Health check endpoint for Cloud Run."""
health_status = {
'status': 'healthy',
'timestamp': datetime.now().isoformat(),
'revision': CLOUD_RUN_REVISION,
'version': '1.0.0'
}
# Check Redis connection
if redis_client:
try:
redis_client.ping()
health_status['redis'] = 'connected'
except Exception as e:
health_status['redis'] = 'disconnected'
health_status['status'] = 'degraded'
else:
health_status['redis'] = 'not_configured'
return jsonify(health_status), 200
@app.route('/metrics', methods=['GET'])
def metrics():
"""Prometheus metrics endpoint."""
return generate_latest(), 200, {'Content-Type': CONTENT_TYPE_LATEST}
@app.route('/', methods=['GET'])
def hello_world():
"""Hello world endpoint."""
trace_id = request.headers.get('X-Cloud-Trace-Context', 'unknown')
response_data = {
'message': 'Hello from Cloud Run!',
'timestamp': datetime.now().isoformat(),
'trace_id': trace_id,
'instance_id': os.environ.get('K_CONFIGURATION', 'unknown'),
'revision': CLOUD_RUN_REVISION,
'pod_name': os.environ.get('HOSTNAME', 'unknown')
}
logger.info(f"Hello request from {request.remote_addr}")
return jsonify(response_data)
@app.route('/api/data', methods=['GET', 'POST'])
def handle_data():
"""Data processing endpoint."""
if request.method == 'GET':
return handle_get_data()
else:
return handle_post_data()
def handle_get_data():
"""Get data with caching."""
try:
# Simulate database query
data = {
'items': [
{'id': 1, 'name': 'Item 1', 'value': 'Value 1'},
{'id': 2, 'name': 'Item 2', 'value': 'Value 2'},
{'id': 3, 'name': 'Item 3', 'value': 'Value 3'}
],
'total': 3,
'timestamp': datetime.now().isoformat(),
'cached': redis_client is not None
}
logger.info(f"Retrieved {len(data['items'])} items")
return jsonify(data), 200
except Exception as e:
logger.error(f"Error getting data: {e}")
return jsonify({'error': 'Internal server error'}), 500
def handle_post_data():
"""Create new data."""
try:
data = request.get_json()
if not data or 'item' not in data:
return jsonify({'error': 'item field is required'}), 400
# Simulate data creation
new_item = {
'id': uuid.uuid4().hex[:8],
'name': data['item'].get('name', 'Unknown'),
'value': data['item'].get('value', ''),
'created_at': datetime.now().isoformat(),
'created_by': 'cloud_run_app'
}
# Invalidate cache
if redis_client:
redis_client.flushdb()
logger.info("Cache invalidated after data creation")
logger.info(f"Created new item: {new_item['id']}")
return jsonify({
'success': True,
'message': 'Item created successfully',
'item': new_item
}), 201
except Exception as e:
logger.error(f"Error creating data: {e}")
return jsonify({'error': 'Internal server error'}), 500
# cloudbuild.yaml for automated deployment
steps:
# Build the container image
- name: 'gcr.io/cloud-builders/docker'
args: ['build', '-t', 'gcr.io/$PROJECT_ID/my-cloud-run-app:$BUILD_ID', '.']
# Push the container image to Container Registry
- name: 'gcr.io/cloud-builders/docker'
args: ['push', 'gcr.io/$PROJECT_ID/my-cloud-run-app:$BUILD_ID']
# Deploy container image to Cloud Run
- name: 'gcr.io/cloud-builders/gcloud'
args: [
'run', 'deploy', 'my-cloud-run-app',
'--image', 'gcr.io/$PROJECT_ID/my-cloud-run-app:$BUILD_ID',
'--region', 'us-central1',
'--platform', 'managed',
'--allow-unauthenticated',
'--max-instances', '100',
'--min-instances', '0',
'--memory', '256Mi',
'--cpu', '1',
'--timeout', '60s',
'--concurrency', '10'
]
images:
- 'gcr.io/$PROJECT_ID/my-cloud-run-app:$BUILD_ID'