🎯 Exemples recommandés
Balanced sample collections from various categories for you to explore
Exemples d'Architecture Serverless
Exemples de calcul serverless avec AWS Lambda, Azure Functions et Google Cloud Functions
💻 AWS Lambda Hello World javascript
🟢 simple
⭐⭐
Fonction AWS Lambda de base avec runtime Node.js
⏱️ 10 min
🏷️ serverless, aws, lambda, nodejs
Prerequisites:
Basic Node.js knowledge
// AWS Lambda Hello World Example
exports.handler = async (event) => {
console.log('Event: ', JSON.stringify(event, null, 2));
const response = {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
body: JSON.stringify({
message: 'Hello from AWS Lambda!',
input: event,
timestamp: new Date().toISOString(),
}),
};
return response;
};
// Handler with different HTTP methods
exports.apiHandler = async (event) => {
const httpMethod = event.httpMethod;
const path = event.path;
switch (httpMethod) {
case 'GET':
return {
statusCode: 200,
body: JSON.stringify({
message: 'GET request received',
path: path,
}),
};
case 'POST':
const requestBody = JSON.parse(event.body);
return {
statusCode: 201,
body: JSON.stringify({
message: 'POST request processed',
receivedData: requestBody,
}),
};
default:
return {
statusCode: 405,
body: JSON.stringify({
error: 'Method not allowed',
}),
};
}
};
// Lambda function with environment variables
exports.envHandler = async (event) => {
const environment = process.env.ENVIRONMENT || 'development';
const region = process.env.AWS_REGION || 'us-east-1';
return {
statusCode: 200,
body: JSON.stringify({
environment,
region,
allEnvVars: Object.keys(process.env).filter(key =>
key.startsWith('AWS_') || key.startsWith('LAMBDA_')
),
}),
};
};
⚙️ Configuration Serverless Framework yaml
🟡 intermediate
⭐⭐⭐
Configuration Serverless Framework pour déployer plusieurs fonctions
⏱️ 20 min
🏷️ serverless, configuration, deployment, aws
Prerequisites:
AWS account, Serverless Framework CLI
# serverless.yml
service: my-serverless-app
frameworkVersion: '3'
provider:
name: aws
runtime: nodejs18.x
region: us-east-1
stage: dev
environment:
NODE_ENV: ${self:custom.currentStage}
TABLE_NAME: ${self:service}-${self:custom.currentStage}
iam:
role:
statements:
- Effect: Allow
Action:
- dynamodb:Query
- dynamodb:Scan
- dynamodb:GetItem
- dynamodb:PutItem
- dynamodb:UpdateItem
- dynamodb:DeleteItem
Resource:
- "arn:aws:dynamodb:${self:provider.region}:*:table/${self:provider.environment.TABLE_NAME}"
functions:
hello:
handler: handler.hello
events:
- http:
path: hello
method: get
cors: true
users:
handler: handler.users
events:
- http:
path: users
method: get
cors: true
- http:
path: users
method: post
cors: true
getUser:
handler: handler.getUser
events:
- http:
path: users/{id}
method: get
cors: true
processImage:
handler: handler.processImage
events:
- s3:
bucket: my-image-bucket
event: s3:ObjectCreated:*
existing: true
timeout: 30
memorySize: 1024
scheduledTask:
handler: handler.scheduledTask
events:
- schedule:
rate: rate(5 minutes)
enabled: true
resources:
Resources:
UsersTable:
Type: AWS::DynamoDB::Table
Properties:
TableName: ${self:provider.environment.TABLE_NAME}
AttributeDefinitions:
- AttributeName: id
AttributeType: S
KeySchema:
- AttributeName: id
KeyType: HASH
BillingMode: PAY_PER_REQUEST
PointInTimeRecoverySpecification:
PointInTimeRecoveryEnabled: true
SSESpecification:
SSEEnabled: true
StreamSpecification:
StreamViewType: NEW_AND_OLD_IMAGES
plugins:
- serverless-offline
- serverless-dotenv-plugin
- serverless-webpack
custom:
currentStage: ${opt:stage, 'dev'}
webpack:
webpackConfig: 'webpack.config.js'
includeModules: true
serverless-offline:
httpPort: 3000
babelOptions:
presets: ["env"]
dotenv:
path: ./.env
include:
- API_KEY
- DATABASE_URL
package:
individually: true
exclude:
- .git/**
- .vscode/**
- node_modules/**
- "*.log"
💻 Azure Functions avec C# csharp
🟡 intermediate
⭐⭐⭐⭐
Exemples Azure Functions utilisant C# et .NET
⏱️ 25 min
🏷️ serverless, azure, csharp, dotnet, functions
Prerequisites:
C# and .NET knowledge, Azure subscription
// Azure Functions with C#
using System;
using System.IO;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
namespace MyServerlessApp
{
public static class HttpFunctions
{
[FunctionName("HelloWorld")]
public static async Task<IActionResult> Run(
[HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req,
ILogger log)
{
log.LogInformation("C# HTTP trigger function processed a request.");
string name = req.Query["name"];
string requestBody = await new StreamReader(req.Body).ReadToEndAsync();
dynamic data = JsonConvert.DeserializeObject(requestBody);
name = name ?? data?.name;
return name != null
? (ActionResult)new OkObjectResult($"Hello, {name}")
: new BadRequestObjectResult("Please pass a name on the query string or in the request body");
}
}
public static class QueueFunctions
{
[FunctionName("ProcessQueueMessage")]
public static void Run(
[QueueTrigger("myqueue-items", Connection = "AzureWebJobsStorage")] string myQueueItem,
ILogger log)
{
log.LogInformation($"C# Queue trigger function processed: {myQueueItem}");
// Process the queue message
var message = JsonConvert.DeserializeObject<MessageData>(myQueueItem);
// Business logic here
log.LogInformation($"Processing message for user: {message.UserId}");
}
}
public static class TimerFunctions
{
[FunctionName("DailyCleanup")]
public static void Run(
[TimerTrigger("0 0 3 * * *")] TimerInfo myTimer,
ILogger log)
{
log.LogInformation($"Daily cleanup function executed at: {DateTime.Now}");
if (myTimer.IsPastDue)
{
log.LogInformation("Timer is running late!");
}
// Cleanup logic here
PerformDailyCleanup();
}
private static void PerformDailyCleanup()
{
// Implement cleanup logic
// e.g., delete old files, clean up database, etc.
}
}
public static class BlobFunctions
{
[FunctionName("ProcessImageUpload")]
public static void Run(
[BlobTrigger("images/{name}", Connection = "AzureWebJobsStorage")] Stream myBlob,
string name,
ILogger log)
{
log.LogInformation($"C# Blob trigger function processed blob
Name:{name}
Size: {myBlob.Length} Bytes");
// Process uploaded image
ProcessImage(myBlob, name);
}
private static void ProcessImage(Stream imageStream, string fileName)
{
// Implement image processing logic
// e.g., resize, add watermark, generate thumbnails
}
}
public static class CosmosDBFunctions
{
[FunctionName("CreateUser")]
public static async Task<IActionResult> CreateUser(
[HttpTrigger(AuthorizationLevel.Function, "post", Route = "users")] HttpRequest req,
[CosmosDB(
databaseName: "MyDatabase",
collectionName: "Users",
ConnectionStringSetting = "CosmosDBConnection")] IAsyncCollector<User> usersOut,
ILogger log)
{
log.LogInformation("Creating a new user");
string requestBody = await new StreamReader(req.Body).ReadToEndAsync();
var user = JsonConvert.DeserializeObject<User>(requestBody);
user.Id = Guid.NewGuid().ToString();
user.CreatedAt = DateTime.UtcNow;
await usersOut.AddAsync(user);
return new CreatedResult($"/users/{user.Id}", user);
}
[FunctionName("GetUser")]
public static IActionResult GetUser(
[HttpTrigger(AuthorizationLevel.Function, "get", Route = "users/{id}")] HttpRequest req,
[CosmosDB(
databaseName: "MyDatabase",
collectionName: "Users",
Id = "{id}",
PartitionKey = "{id}",
ConnectionStringSetting = "CosmosDBConnection")] User user,
ILogger log)
{
log.LogInformation($"Getting user with id: {req.RouteValues["id"]}");
if (user == null)
{
return new NotFoundResult();
}
return new OkObjectResult(user);
}
}
// Data models
public class MessageData
{
public string UserId { get; set; }
public string Message { get; set; }
public DateTime Timestamp { get; set; }
}
public class User
{
[JsonProperty("id")]
public string Id { get; set; }
[JsonProperty("name")]
public string Name { get; set; }
[JsonProperty("email")]
public string Email { get; set; }
[JsonProperty("createdAt")]
public DateTime CreatedAt { get; set; }
}
}
💻 Google Cloud Functions avec Python python
🟡 intermediate
⭐⭐⭐⭐
Exemples Google Cloud Functions utilisant runtime Python
⏱️ 25 min
🏷️ serverless, gcp, python, cloud functions
Prerequisites:
Python knowledge, Google Cloud Platform account
# Google Cloud Functions with Python
import functions_framework
from flask import jsonify, request
import json
import logging
import os
from datetime import datetime
from google.cloud import storage
from google.cloud import datastore
# Configure logging
logging.basicConfig(level=logging.INFO)
@functions_framework.http
def hello_world(request):
"""HTTP Cloud Function that returns a JSON greeting."""
request_json = request.get_json(silent=True)
request_args = request.args
if request_json and 'name' in request_json:
name = request_json['name']
elif request_args and 'name' in request_args:
name = request_args['name']
else:
name = 'World'
response_data = {
'message': f'Hello, {name}!',
'timestamp': datetime.utcnow().isoformat(),
'method': request.method,
'headers': dict(request.headers)
}
return jsonify(response_data), 200
@functions_framework.http
def process_data(request):
"""HTTP Cloud Function that processes incoming data."""
if request.method != 'POST':
return jsonify({'error': 'Method not allowed'}), 405
try:
data = request.get_json()
if not data:
return jsonify({'error': 'No data provided'}), 400
# Validate required fields
if 'user_id' not in data or 'event_type' not in data:
return jsonify({'error': 'Missing required fields'}), 400
# Process the data
processed_data = {
'user_id': data['user_id'],
'event_type': data['event_type'],
'processed_at': datetime.utcnow().isoformat(),
'metadata': {
'source_ip': request.remote_addr,
'user_agent': request.headers.get('User-Agent', ''),
}
}
# Add additional processing logic here
if data.get('payload'):
processed_data['payload_size'] = len(json.dumps(data['payload']))
# Save to Datastore (optional)
save_to_datastore(processed_data)
return jsonify({
'status': 'success',
'processed_data': processed_data
}), 200
except Exception as e:
logging.error(f"Error processing data: {str(e)}")
return jsonify({'error': 'Internal server error'}), 500
@functions_framework.cloud_event
def process_pubsub_message(cloud_event):
"""Background Cloud Function to handle Pub/Sub messages."""
try:
# Extract message data
message_data = cloud_event.data.get('message', {})
if 'data' in message_data:
# Decode base64 message
import base64
decoded_message = base64.b64decode(message_data['data']).decode('utf-8')
message_json = json.loads(decoded_message)
else:
message_json = message_data
logging.info(f"Received Pub/Sub message: {message_json}")
# Process the message
result = process_message(message_json)
logging.info(f"Message processed successfully: {result}")
except Exception as e:
logging.error(f"Error processing Pub/Sub message: {str(e)}")
raise
@functions_framework.cloud_event
def process_gcs_event(cloud_event):
"""Background Cloud Function to handle GCS object changes."""
try:
# Extract bucket and file information
bucket = cloud_event.data.get('bucket')
name = cloud_event.data.get('name')
generation = cloud_event.data.get('generation')
eventType = cloud_event.type
logging.info(f"GCS Event: {eventType} for gs://{bucket}/{name} (generation: {generation})")
if eventType == 'google.storage.object.finalize':
# Process new file
process_uploaded_file(bucket, name)
elif eventType == 'google.storage.object.delete':
# Handle file deletion
logging.info(f"File deleted: gs://{bucket}/{name}")
except Exception as e:
logging.error(f"Error processing GCS event: {str(e)}")
raise
def save_to_datastore(data):
"""Save data to Google Cloud Datastore."""
try:
client = datastore.Client()
kind = 'ProcessedEvent'
# Create a new entity
key = client.key(kind)
entity = datastore.Entity(key=key)
# Set entity properties
entity.update(data)
# Save the entity
client.put(entity)
logging.info(f"Data saved to Datastore with key: {entity.key}")
except Exception as e:
logging.error(f"Error saving to Datastore: {str(e)}")
def process_message(message):
"""Process incoming Pub/Sub message."""
# Add your business logic here
return {
'processed': True,
'message_type': message.get('type', 'unknown'),
'timestamp': datetime.utcnow().isoformat()
}
def process_uploaded_file(bucket_name, file_name):
"""Process uploaded file in GCS."""
try:
client = storage.Client()
bucket = client.bucket(bucket_name)
blob = bucket.blob(file_name)
# Get file metadata
blob.reload()
file_size = blob.size
content_type = blob.content_type
logging.info(f"Processing file: {file_name} ({file_size} bytes, {content_type})")
# Add file processing logic here
if content_type and content_type.startswith('image/'):
# Process image
process_image_file(bucket_name, file_name)
elif content_type and 'csv' in content_type:
# Process CSV file
process_csv_file(bucket_name, file_name)
except Exception as e:
logging.error(f"Error processing uploaded file: {str(e)}")
def process_image_file(bucket_name, file_name):
"""Process uploaded image file."""
# Add image processing logic
logging.info(f"Processing image: {file_name}")
# e.g., resize, add watermark, generate thumbnails
def process_csv_file(bucket_name, file_name):
"""Process uploaded CSV file."""
# Add CSV processing logic
logging.info(f"Processing CSV: {file_name}")
# e.g., parse data, save to database
# Environment configuration helper
def get_config():
"""Get configuration from environment variables."""
return {
'project_id': os.getenv('GCP_PROJECT'),
'environment': os.getenv('ENVIRONMENT', 'development'),
'log_level': os.getenv('LOG_LEVEL', 'INFO')
}
⚙️ Intégration API Gateway Serverless yaml
🔴 complex
⭐⭐⭐⭐⭐
Configuration API Gateway pour les fonctions serverless
⏱️ 35 min
🏷️ serverless, api gateway, cloudformation, sam
Prerequisites:
AWS CloudFormation knowledge, API Gateway concepts
# AWS API Gateway with Lambda Integration
AWSTemplateFormatVersion: '2010-09-09'
Transform: AWS::Serverless-2016-10-31
Description: Serverless API with API Gateway
Globals:
Function:
Timeout: 30
MemorySize: 256
Runtime: python3.9
Parameters:
Stage:
Type: String
Default: dev
AllowedValues:
- dev
- staging
- prod
Description: Deployment stage
Resources:
# API Gateway REST API
ApiGatewayRestApi:
Type: AWS::Serverless::Api
Properties:
Name: !Sub '${AWS::StackName}-api-${Stage}'
StageName: !Ref Stage
DefinitionUri: s3://api-definition-bucket/openapi.yaml
Variables:
LambdaFunctionName: !Ref MyApiFunction
Cors:
AllowMethods: "'GET,POST,PUT,DELETE,OPTIONS'"
AllowHeaders: "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'"
AllowOrigin: "'*'"
# Lambda Functions
MyApiFunction:
Type: AWS::Serverless::Function
Properties:
FunctionName: !Sub '${AWS::StackName}-api-${Stage}'
CodeUri: src/
Handler: app.lambda_handler
Runtime: python3.9
Environment:
Variables:
TABLE_NAME: !Ref DataTable
REGION: !Ref AWS::Region
Policies:
- DynamoDBCrudPolicy:
TableName: !Ref DataTable
Events:
ApiEvents:
Type: Api
Properties:
RestApiId: !Ref ApiGatewayRestApi
Path: /{proxy+}
Method: ANY
# DynamoDB Table
DataTable:
Type: AWS::DynamoDB::Table
Properties:
TableName: !Sub '${AWS::StackName}-data-${Stage}'
AttributeDefinitions:
- AttributeName: id
AttributeType: S
- AttributeName: type
AttributeType: S
KeySchema:
- AttributeName: id
KeyType: HASH
- AttributeName: type
KeyType: RANGE
BillingMode: PAY_PER_REQUEST
StreamSpecification:
StreamViewType: NEW_AND_OLD_IMAGES
PointInTimeRecoverySpecification:
PointInTimeRecoveryEnabled: true
# Lambda Authorizer
ApiAuthorizer:
Type: AWS::Serverless::Function
Properties:
FunctionName: !Sub '${AWS::StackName}-authorizer-${Stage}'
CodeUri: authorizer/
Handler: authorizer.lambda_handler
Runtime: python3.9
Environment:
Variables:
JWT_SECRET: !Ref JwtSecret
Policies:
- Statement:
Effect: Allow
Action:
- secretsmanager:GetSecretValue
Resource: !Sub arn:aws:secretsmanager:${AWS::Region}:${AWS::AccountId}:secret:${JwtSecret}*
# Custom Domain
ApiDomainName:
Type: AWS::ApiGateway::DomainName
Properties:
DomainName: !Sub 'api-${Stage}.example.com'
EndpointConfiguration:
Types:
- REGIONAL
RegionalCertificateArn: !Ref SslCertificate
SecurityPolicy: TLS_1_2
# API Gateway Base Path Mapping
BasePathMapping:
Type: AWS::ApiGateway::BasePathMapping
Properties:
DomainName: !Ref ApiDomainName
RestApiId: !Ref ApiGatewayRestApi
Stage: !Ref Stage
# CloudWatch Log Group
ApiLogGroup:
Type: AWS::Logs::LogGroup
Properties:
LogGroupName: !Sub '/aws/lambda/${MyApiFunction}'
RetentionInDays: 30
# Output values
Outputs:
ApiEndpoint:
Description: API Gateway endpoint URL
Value: !Sub 'https://${ApiGatewayRestApi}.execute-api.${AWS::Region}.amazonaws.com/${Stage}'
ApiFunction:
Description: Lambda function ARN
Value: !GetAtt MyApiFunction.Arn
TableName:
Description: DynamoDB table name
Value: !Ref DataTable
# OpenAPI Specification for API Gateway
# openapi.yaml
openapi: 3.0.0
info:
title: Serverless API
version: 1.0.0
description: Serverless API with Lambda backend
servers:
- url: https://{domain}/prod
variables:
domain:
default: api.example.com
paths:
/health:
get:
summary: Health check endpoint
responses:
'200':
description: API is healthy
content:
application/json:
schema:
type: object
properties:
status:
type: string
timestamp:
type: string
/users:
get:
summary: Get all users
security:
- ApiKeyAuth: []
responses:
'200':
description: List of users
post:
summary: Create new user
security:
- ApiKeyAuth: []
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/User'
responses:
'201':
description: User created
'400':
description: Invalid input
/users/{userId}:
get:
summary: Get user by ID
security:
- ApiKeyAuth: []
parameters:
- name: userId
in: path
required: true
schema:
type: string
responses:
'200':
description: User details
'404':
description: User not found
put:
summary: Update user
security:
- ApiKeyAuth: []
parameters:
- name: userId
in: path
required: true
schema:
type: string
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/User'
responses:
'200':
description: User updated
'404':
description: User not found
delete:
summary: Delete user
security:
- ApiKeyAuth: []
parameters:
- name: userId
in: path
required: true
schema:
type: string
responses:
'204':
description: User deleted
'404':
description: User not found
components:
securitySchemes:
ApiKeyAuth:
type: apiKey
in: header
name: X-API-Key
BearerAuth:
type: http
scheme: bearer
bearerFormat: JWT
schemas:
User:
type: object
required:
- name
- email
properties:
id:
type: string
readOnly: true
name:
type: string
email:
type: string
format: email
role:
type: string
enum: [user, admin]
default: user