Exemples OpenTelemetry

Exemples de standards d'observabilité OpenTelemetry incluant instrumentation, traçage, métriques et logging dans plusieurs langages et frameworks

💻 Instrumentation OpenTelemetry Node.js javascript

🟡 intermediate ⭐⭐⭐⭐

Configuration complète OpenTelemetry pour applications Node.js incluant instrumentation automatique et manuelle

⏱️ 45 min 🏷️ opentelemetry, nodejs, observability, tracing
Prerequisites: Node.js, Express, Docker, Monitoring concepts
// OpenTelemetry Node.js Complete Setup
// Comprehensive instrumentation for Node.js applications

// 1. package.json dependencies
{
  "dependencies": {
    "@opentelemetry/api": "^1.7.0",
    "@opentelemetry/sdk-node": "^0.41.0",
    "@opentelemetry/auto-instrumentations": "^0.39.0",
    "@opentelemetry/exporter-jaeger": "^1.17.0",
    "@opentelemetry/exporter-prometheus": "^0.41.0",
    "@opentelemetry/exporter-otlp-grpc": "^0.41.0",
    "@opentelemetry/exporter-otlp-http": "^0.41.0",
    "@opentelemetry/instrumentation-express": "^0.31.0",
    "@opentelemetry/instrumentation-http": "^0.41.0",
    "@opentelemetry/instrumentation-mongodb": "^0.36.0",
    "@opentelemetry/instrumentation-redis": "^0.35.0",
    "@opentelemetry/instrumentation-pg": "^0.36.0",
    "@opentelemetry/semantic-conventions": "^1.17.0"
  }
}

// 2. otel-setup.js - Main instrumentation setup
const { NodeSDK } = require('@opentelemetry/sdk-node');
const { getNodeAutoInstrumentations } = require('@opentelemetry/auto-instrumentations');
const { OTLPTraceExporter } = require('@opentelemetry/exporter-otlp-http');
const { PrometheusExporter } = require('@opentelemetry/exporter-prometheus');
const { Resource } = require('@opentelemetry/resources');
const { SemanticResourceAttributes } = require('@opentelemetry/semantic-conventions');
const { trace, context, SpanKind } = require('@opentelemetry/api');

// Configure service resource
const resource = new Resource({
  [SemanticResourceAttributes.SERVICE_NAME]: 'my-nodejs-app',
  [SemanticResourceAttributes.SERVICE_VERSION]: '1.0.0',
  [SemanticResourceAttributes.SERVICE_INSTANCE_ID]: process.env.HOSTNAME || 'unknown',
  [SemanticResourceAttributes.DEPLOYMENT_ENVIRONMENT]: process.env.NODE_ENV || 'development',
  [SemanticResourceAttributes.HOST_NAME]: process.env.HOSTNAME,
  [SemanticResourceAttributes.PROCESS_PID]: process.pid,
});

// Initialize OpenTelemetry SDK
const sdk = new NodeSDK({
  resource,
  instrumentations: [getNodeAutoInstrumentations()],
  traceExporter: new OTLPTraceExporter({
    url: process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT || 'http://localhost:4318/v1/traces',
    headers: {
      'api-key': process.env.OTEL_API_KEY,
    },
  }),
  metricExporter: new PrometheusExporter({
    port: 9464,
    endpoint: '/metrics',
  }),
  sampler: {
    type: 'traceidratio',
    options: {
      ratio: parseFloat(process.env.OTEL_TRACES_SAMPLER_ARG || '1.0'),
    },
  },
  spanLimits: {
    attributeCountLimit: 100,
    eventCountLimit: 1000,
    linkCountLimit: 100,
  },
});

sdk.start();

console.log('OpenTelemetry instrumentation started');

// 3. Express application with OpenTelemetry
const express = require('express');
const tracer = trace.getTracer('express-app');
const { SpanStatusCode } = require('@opentelemetry/api');

const app = express();
app.use(express.json());

// Custom middleware for OpenTelemetry tracing
function telemetryMiddleware(req, res, next) {
  const tracer = trace.getTracer('express-middleware');
  const span = tracer.startSpan('http-request', {
    kind: SpanKind.SERVER,
    attributes: {
      'http.method': req.method,
      'http.url': req.url,
      'http.target': req.path,
      'user_agent': req.get('user-agent'),
      'http.remote_addr': req.ip,
    },
  });

  // Add span to context
  context.with(trace.setActiveContext(span), () => {
    // Add response listener
    res.on('finish', () => {
      span.setAttributes({
        'http.status_code': res.statusCode,
        'http.response_content_length': res.get('content-length'),
      });

      if (res.statusCode >= 400) {
        span.setStatus({
          code: SpanStatusCode.ERROR,
          message: `HTTP ${res.statusCode}`,
        });
      }

      span.end();
    });

    next();
  });
}

app.use(telemetryMiddleware);

// Manual tracing for custom business logic
async function processUserOrder(userId, orderId) {
  const tracer = trace.getTracer('order-service');
  const span = tracer.startSpan('process-order', {
    attributes: {
      'user.id': userId,
      'order.id': orderId,
    },
  });

  try {
    // Database operation span
    const dbSpan = tracer.startSpan('database-query', {
      attributes: {
        'db.system': 'postgresql',
        'db.operation': 'SELECT',
      },
    });

    // Simulate database query
    await new Promise(resolve => setTimeout(resolve, 100));

    const order = { id: orderId, userId, total: 100 };

    dbSpan.setAttributes({
      'db.rows_returned': 1,
    });
    dbSpan.end();

    span.setAttributes({
      'order.total': order.total,
      'order.currency': 'USD',
    });

    return order;
  } catch (error) {
    span.recordException(error);
    span.setStatus({
      code: SpanStatusCode.ERROR,
      message: error.message,
    });
    throw error;
  } finally {
    span.end();
  }
}

// API routes
app.get('/users/:userId/orders/:orderId', async (req, res) => {
  const tracer = trace.getTracer('api');
  const span = tracer.startSpan('get-user-order', {
    kind: SpanKind.SERVER,
    attributes: {
      'user.id': req.params.userId,
      'order.id': req.params.orderId,
    },
  });

  try {
    const order = await processUserOrder(req.params.userId, req.params.orderId);
    res.json({ success: true, order });
  } catch (error) {
    span.recordException(error);
    res.status(500).json({ error: error.message });
  } finally {
    span.end();
  }
});

// 4. Custom metrics collection
const { MeterProvider } = require('@opentelemetry/sdk-metrics');
const { MeterProvider } = require('@opentelemetry/sdk-metrics');
const { AggregationTemporality, MetricReader, PeriodicExportingMetricReader } = require('@opentelemetry/sdk-metrics');

// Custom metrics
const meter = tracer.getMeter('application-metrics');

// Counter for business metrics
const orderCounter = meter.createCounter('orders_processed', {
  description: 'Number of orders processed',
  unit: '1',
});

// Histogram for response times
const responseTimeHistogram = meter.createHistogram('http_request_duration', {
  description: 'HTTP request duration',
  unit: 'ms',
  advice: {
    explicitBucketBoundaries: [10, 50, 100, 200, 500, 1000, 2000, 5000],
  },
});

// Gauge for system resources
const activeConnectionsGauge = meter.createUpDownCounter('active_connections', {
  description: 'Number of active connections',
  unit: '1',
});

// Example usage
function trackOrderProcessing(orderId, amount) {
  orderCounter.add(1, {
    'order.id': orderId,
    'order.amount': amount.toString(),
  });
}

function trackResponseTime(duration, route, method) {
  responseTimeHistogram.record(duration, {
    'http.route': route,
    'http.method': method,
  });
}

// 5. Database instrumentation
const { registerInstrumentations } = require('@opentelemetry/instrumentation-pg');
const { Pool } = require('pg');

// Register PostgreSQL instrumentation
registerInstrumentations({
  tracerProvider: sdk._tracerProvider,
  meterProvider: sdk._meterProvider,
});

const pool = new Pool({
  connectionString: process.env.DATABASE_URL,
});

async function queryDatabase(sql, params = []) {
  const tracer = trace.getTracer('database');
  const span = tracer.startSpan('database-query', {
    attributes: {
      'db.system': 'postgresql',
      'db.statement': sql,
    },
  });

  try {
    const start = Date.now();
    const result = await pool.query(sql, params);
    const duration = Date.now() - start;

    span.setAttributes({
      'db.query.duration_ms': duration,
      'db.rows_affected': result.rowCount || result.rows?.length || 0,
    });

    return result;
  } catch (error) {
    span.recordException(error);
    throw error;
  } finally {
    span.end();
  }
}

// 6. Error handling and logging with correlation
const winston = require('winston');

const logger = winston.createLogger({
  level: 'info',
  format: winston.format.combine(
    winston.format.timestamp(),
    winston.format.json(),
    winston.format.printf(({ timestamp, level, message, traceId, spanId, ...meta }) => {
      return JSON.stringify({
        timestamp,
        level,
        message,
        traceId,
        spanId,
        ...meta,
      });
    })
  ),
  transports: [
    new winston.transports.Console(),
  ],
});

// Logger middleware that adds trace context
function loggingMiddleware(req, res, next) {
  const activeSpan = trace.getActiveSpan();
  const traceId = activeSpan?.spanContext().traceId;
  const spanId = activeSpan?.spanContext().spanId;

  // Add trace context to request
  req.traceId = traceId;
  req.spanId = spanId;

  // Log the request
  logger.info('HTTP Request', {
    method: req.method,
    url: req.url,
    userAgent: req.get('user-agent'),
    traceId,
    spanId,
  });

  // Override res.end to log response
  const originalEnd = res.end;
  res.end = function(...args) {
    logger.info('HTTP Response', {
      statusCode: res.statusCode,
      traceId,
      spanId,
    });
    originalEnd.apply(this, args);
  };

  next();
}

app.use(loggingMiddleware);

// 7. Environment configuration
// .env file
OTEL_SERVICE_NAME=my-nodejs-app
OTEL_SERVICE_VERSION=1.0.0
OTEL_RESOURCE_ATTRIBUTES=service.namespace=production,service.instance.id=instance-1
OTEL_TRACES_EXPORTER=otlp
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4318/v1/traces
OTEL_METRICS_EXPORTER=prometheus
OTEL_LOGS_EXPORTER=console
OTEL_TRACES_SAMPLER=traceidratio
OTEL_TRACES_SAMPLER_ARG=1.0
OTEL_NODE_RESOURCE_DETECTORS=env,host,os
OTEL_EXPORTER_JAEGER_AGENT_HOST=localhost
OTEL_EXPORTER_JAEGER_AGENT_PORT=6831

// 8. Docker Compose for local development
// docker-compose.yml
version: '3.8'

services:
  app:
    build: .
    ports:
      - "3000:3000"
    environment:
      - NODE_ENV=development
      - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://otel-collector:4318/v1/traces
      - OTEL_SERVICE_NAME=my-nodejs-app
      - DATABASE_URL=postgresql://postgres:password@postgres:5432/myapp
    depends_on:
      - otel-collector
      - postgres
    volumes:
      - .:/app
      - /app/node_modules

  otel-collector:
    image: otel/opentelemetry-collector-contrib:latest
    command:
      - --config=/etc/otel-collector-config.yaml
    volumes:
      - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
    ports:
      - "4317:4317"   # OTLP gRPC receiver
      - "4318:4318"   # OTLP HTTP receiver
      - "8888:8888"   # Prometheus metrics
      - "8889:8889"   # Prometheus exporter
      - "13133:13133" # health check extension
      - "55679:55679" # zpages extension
    depends_on:
      - jaeger
      - prometheus

  jaeger:
    image: jaegertracing/all-in-one:latest
    ports:
      - "16686:16686"
      - "14268:14268"
    environment:
      - COLLECTOR_OTLP_ENABLED=true

  prometheus:
    image: prom/prometheus:latest
    ports:
      - "9090:9090"
    volumes:
      - ./prometheus.yml:/etc/prometheus/prometheus.yml

  postgres:
    image: postgres:15
    environment:
      - POSTGRES_DB=myapp
      - POSTGRES_USER=postgres
      - POSTGRES_PASSWORD=password
    ports:
      - "5432:5432"
    volumes:
      - postgres_data:/var/lib/postgresql/data

volumes:
  postgres_data:

// 9. Collector configuration
// otel-collector-config.yaml
receivers:
  otlp:
    protocols:
      grpc:
        endpoint: 0.0.0.0:4317
      http:
        endpoint: 0.0.0.0:4318

  prometheus:
    config:
      scrape_configs:
        - job_name: 'otel-collector'
          static_configs:
            - targets: ['localhost:8888', 'localhost:9464']

processors:
  batch:
  memory_limiter:
    limit_mib: 512

extensions:
  health_check:
    endpoint: 0.0.0.0:13133
  zpages:
    endpoint: 0.0.0.0:55679

exporters:
  otlphttp:
    endpoint: "https://your-otel-endpoint.com/v1/traces"
    headers:
      "Authorization": "Bearer YOUR_TOKEN"
  prometheus:
    endpoint: "0.0.0.0:8889"
  logging:
    loglevel: info

service:
  extensions: [health_check, zpages]
  pipelines:
    traces:
      receivers: [otlp]
      processors: [memory_limiter, batch]
      exporters: [otlphttp, logging]
    metrics:
      receivers: [otlp, prometheus]
      processors: [memory_limiter, batch]
      exporters: [prometheus, logging]

// 10. Testing with OpenTelemetry
const { NodeSDK } = require('@opentelemetry/sdk-node');
const { InMemorySpanExporter, SimpleSpanProcessor } = require('@opentelemetry/sdk-trace-base');

// Setup for testing
function setupTestTracing() {
  const memoryExporter = new InMemorySpanExporter();
  const testSdk = new NodeSDK({
    instrumentations: [getNodeAutoInstrumentations()],
    spanProcessors: [new SimpleSpanProcessor(memoryExporter)],
  });

  testSdk.start();

  return { testSdk, memoryExporter };
}

// Example test
const { expect } = require('chai');

describe('OpenTelemetry Tracing', () => {
  let sdk, exporter;

  beforeEach(() => {
    ({ sdk, exporter } = setupTestTracing());
  });

  afterEach(() => {
    sdk.shutdown();
  });

  it('should trace HTTP requests', async () => {
    const response = await fetch('http://localhost:3000/users/123/orders/456');
    expect(response.status).to.equal(200);

    const spans = exporter.getFinishedSpans();
    expect(spans.length).to.be.greaterThan(0);

    const httpSpan = spans.find(span => span.name === 'HTTP GET');
    expect(httpSpan).to.exist;
    expect(httpSpan.attributes['http.status_code']).to.equal(200);
  });

  it('should include trace context in logs', async () => {
    const response = await fetch('http://localhost:3000/users/123/orders/456');
    expect(response.status).to.equal(200);

    const spans = exporter.getFinishedSpans();
    const span = spans[0];

    expect(span.spanContext().traceId).to.be.a('string');
    expect(span.spanContext().spanId).to.be.a('string');
  });
});

💻 Instrumentation OpenTelemetry Python python

🟡 intermediate ⭐⭐⭐⭐

Configuration complète OpenTelemetry pour applications Python incluant Flask, Django et frameworks asynchrones

⏱️ 45 min 🏷️ opentelemetry, python, observability, flask
Prerequisites: Python, Flask, Async programming, Monitoring concepts
# OpenTelemetry Python Complete Setup
# Comprehensive instrumentation for Python applications

# 1. requirements.txt
opentelemetry-api==1.21.0
opentelemetry-sdk==1.21.0
opentelemetry-instrumentation==0.42b0
opentelemetry-instrumentation-flask==0.42b0
opentelemetry-instrumentation-django==0.42b0
opentelemetry-instrumentation-requests==0.42b0
opentelemetry-instrumentation-asyncpg==0.42b0
opentelemetry-instrumentation-psycopg2==0.42b0
opentelemetry-instrumentation-redis==0.42b0
opentelemetry-instrumentation-mysql==0.42b0
opentelemetry-exporter-jaeger==1.21.0
opentelemetry-exporter-prometheus==1.21.0
opentelemetry-exporter-otlp==1.21.0
opentelemetry-propagator-b3==1.21.0
opentelemetry-propagator-jaeger==1.21.0

# 2. otel_setup.py - Main instrumentation setup
import os
from opentelemetry import trace, metrics, logs
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.sdk.metrics import MeterProvider
from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.logs import LoggerProvider, LoggingHandler
from opentelemetry.exporter.jaeger.thrift import JaegerExporter
from opentelemetry.exporter.prometheus import PrometheusMetricReader
from opentelemetry.exporter.otlp.proto.grpc.exporter import OTLPSpanExporter
from opentelemetry.instrumentation.auto_instrumentation import AutoInstrumenter
from opentelemetry import context
from opentelemetry.propagate import inject, extract
import logging

def setup_opentelemetry():
    """Setup OpenTelemetry with all necessary components"""

    # Configure service resource
    resource = Resource.create({
        "service.name": os.getenv("OTEL_SERVICE_NAME", "python-app"),
        "service.version": os.getenv("OTEL_SERVICE_VERSION", "1.0.0"),
        "service.instance.id": os.getenv("HOSTNAME", "localhost"),
        "deployment.environment": os.getenv("OTEL_ENVIRONMENT", "development"),
        "host.name": os.getenv("HOSTNAME", "localhost"),
        "process.pid": os.getpid(),
    })

    # Setup Tracing
    tracer_provider = TracerProvider(resource=resource)

    # Add Jaeger exporter
    jaeger_exporter = JaegerExporter(
        agent_host_name=os.getenv("JAEGER_AGENT_HOST", "localhost"),
        agent_port=int(os.getenv("JAEGER_AGENT_PORT", "6831")),
    )

    # Add OTLP exporter
    otlp_exporter = OTLPSpanExporter(
        endpoint=os.getenv("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT", "http://localhost:4318/v1/traces"),
        headers={
            "api-key": os.getenv("OTEL_API_KEY", ""),
        }
    )

    tracer_provider.add_span_processor(
        BatchSpanProcessor(jaeger_exporter)
    )
    tracer_provider.add_span_processor(
        BatchSpanProcessor(otlp_exporter)
    )

    trace.set_tracer_provider(tracer_provider)

    # Setup Metrics
    metric_reader = PrometheusMetricReader()

    meter_provider = MeterProvider(resource=resource)
    meter_provider.register_metric_reader(metric_reader)
    metrics.set_meter_provider(meter_provider)

    # Setup Logging
    logger_provider = LoggerProvider(resource=resource)
    logs.set_logger_provider(logger_provider)

    # Configure Python logging to use OpenTelemetry
    logging_handler = LoggingHandler(level=logging.NOTSET, logger_provider=logger_provider)
    logging.getLogger().addHandler(logging_handler)

    # Auto-instrumentation
    auto_instrumenter = AutoInstrumenter()
    auto_instrumenter.instrument()

    return tracer_provider, meter_provider, logger_provider

# Initialize OpenTelemetry
if __name__ == "__main__":
    setup_opentelemetry()

# 3. Flask application with OpenTelemetry
from flask import Flask, request, jsonify
from opentelemetry import trace
from opentelemetry.trace import SpanKind, StatusCode
from opentelemetry.propagate import inject
import time
import requests

app = Flask(__name__)

# Get tracer
tracer = trace.get_tracer(__name__)

# Custom middleware for tracing
@app.before_request
def before_request():
    """Extract trace context from incoming request"""
    span = trace.get_current_span()
    span.set_attribute("http.method", request.method)
    span.set_attribute("http.url", request.url)
    span.set_attribute("http.target", request.path)
    span.set_attribute("http.user_agent", request.headers.get("User-Agent", ""))
    span.set_attribute("http.remote_addr", request.remote_addr)

@app.after_request
def after_request(response):
    """Add response attributes to span"""
    span = trace.get_current_span()
    span.set_attribute("http.status_code", response.status_code)

    if response.status_code >= 400:
        span.set_status(
            StatusCode.ERROR,
            f"HTTP {response.status_code}"
        )

    return response

# Manual tracing for business logic
def process_user_payment(user_id, amount, payment_method):
    """Process user payment with tracing"""
    with tracer.start_as_current_span(
        "process-payment",
        kind=trace.SpanKind.INTERNAL
    ) as span:
        span.set_attributes({
            "user.id": str(user_id),
            "payment.amount": str(amount),
            "payment.method": payment_method,
        })

        try:
            # Simulate payment processing
            with tracer.start_as_current_span("payment-gateway-call"):
                time.sleep(0.1)  # Simulate network call
                payment_id = f"pay_{int(time.time())}"

                span.set_attribute("payment.id", payment_id)
                span.set_attribute("payment.status", "success")

                return {
                    "payment_id": payment_id,
                    "status": "success",
                    "amount": amount
                }

        except Exception as e:
            span.record_exception(e)
            span.set_status(StatusCode.ERROR, str(e))
            raise

# API routes
@app.route("/api/users/<int:user_id>/payments", methods=["POST"])
def create_payment(user_id):
    """Create payment for user"""
    with tracer.start_as_current_span(
        "create-payment-endpoint",
        kind=trace.SpanKind.SERVER
    ) as span:
        try:
            data = request.get_json()
            amount = data.get("amount")
            payment_method = data.get("payment_method")

            if not amount or not payment_method:
                return jsonify({"error": "Missing required fields"}), 400

            result = process_user_payment(user_id, amount, payment_method)

            # Create metric counter
            meter = metrics.get_meter(__name__)
            payment_counter = meter.create_counter("payments_created")
            payment_counter.add(1, {
                "payment.method": payment_method,
                "user.id": str(user_id),
            })

            return jsonify(result)

        except Exception as e:
            return jsonify({"error": str(e)}), 500

# Propagate context to external services
@app.route("/api/external-service")
def call_external_service():
    """Example of propagating trace context to external service"""
    with tracer.start_as_current_span("external-service-call"):
        headers = {}
        inject(headers)  # Inject trace context into headers

        response = requests.get(
            "https://api.example.com/data",
            headers=headers
        )

        return jsonify({
            "status": response.status_code,
            "data": response.json() if response.headers.get("content-type", "").startswith("application/json") else None
        })

# 4. Custom metrics collection
from opentelemetry.metrics import Observation

def setup_custom_metrics():
    """Setup custom application metrics"""
    meter = metrics.get_meter("application-metrics")

    # Counter for business metrics
    order_counter = meter.create_counter(
        "orders_processed",
        description="Number of orders processed"
    )

    # Histogram for response times
    response_time_histogram = meter.create_histogram(
        "http_request_duration_seconds",
        description="HTTP request duration in seconds"
    )

    # Gauge for active users
    active_users_gauge = meter.create_up_down_counter(
        "active_users",
        description="Number of active users"
    )

    return {
        "order_counter": order_counter,
        "response_time_histogram": response_time_histogram,
        "active_users_gauge": active_users_gauge
    }

# Initialize metrics
metrics_data = setup_custom_metrics()

# 5. Async application with OpenTelemetry
import asyncio
import aiohttp
from opentelemetry.instrumentation.aiohttp.client import AioHttpClientInstrumentor

# Instrument aiohttp client
AioHttpClientInstrumentor().instrument()

async def async_request_example():
    """Example of async request with tracing"""
    tracer = trace.get_tracer(__name__)

    with tracer.start_as_current_span("async-http-request"):
        async with aiohttp.ClientSession() as session:
            headers = {}
            inject(headers)  # Inject trace context

            async with session.get(
                "https://api.example.com/data",
                headers=headers
            ) as response:
                data = await response.json()
                return data

# 6. Database instrumentation
import asyncpg
from opentelemetry.instrumentation.asyncpg import AsyncPGInstrumentor

# Instrument asyncpg
AsyncPGInstrumentor().instrument()

async def database_example():
    """Example of database operation with tracing"""
    tracer = trace.get_tracer(__name__)

    with tracer.start_as_current_span("database-operation") as span:
        span.set_attributes({
            "db.system": "postgresql",
            "db.operation": "SELECT",
            "db.name": "myapp"
        })

        conn = await asyncpg.connect("postgresql://user:pass@localhost/myapp")

        try:
            result = await conn.fetch(
                "SELECT * FROM users WHERE id = $1",
                123
            )

            span.set_attributes({
                "db.rows_returned": len(result)
            })

            return result

        finally:
            await conn.close()

# 7. Background task instrumentation
import threading
from concurrent.futures import ThreadPoolExecutor

def background_task_with_tracing(task_id, data):
    """Background task with manual tracing"""
    tracer = trace.get_tracer(__name__)

    # Create new span for background task
    with tracer.start_as_current_span(
        f"background-task-{task_id}",
        kind=trace.SpanKind.INTERNAL
    ) as span:
        span.set_attribute("task.id", str(task_id))
        span.set_attribute("task.data_size", len(str(data)))

        # Simulate work
        time.sleep(0.5)

        # Record some metrics
        metrics_data["order_counter"].add(1, {
            "task.type": "background",
            "task.status": "completed"
        })

        return f"Task {task_id} completed"

@app.route("/api/tasks/<int:task_id>", methods=["POST"])
def create_background_task(task_id):
    """Create and run background task"""
    data = request.get_json() or {}

    # Use thread pool for background tasks
    with ThreadPoolExecutor(max_workers=4) as executor:
        future = executor.submit(
            background_task_with_tracing,
            task_id,
            data
        )

        # In a real app, you'd store the future and check status later
        result = future.result(timeout=10)

        return jsonify({"result": result})

# 8. Environment configuration
# .env
OTEL_SERVICE_NAME=python-app
OTEL_SERVICE_VERSION=1.0.0
OTEL_RESOURCE_ATTRIBUTES=service.namespace=production,service.instance.id=instance-1
OTEL_TRACES_EXPORTER=jaeger,otlp
OTEL_EXPORTER_JAEGER_AGENT_HOST=localhost
OTEL_EXPORTER_JAEGER_AGENT_PORT=6831
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://localhost:4318/v1/traces
OTEL_METRICS_EXPORTER=prometheus
OTEL_LOGS_EXPORTER=console
OTEL_PYTHON_IDE_AUTO_INSTRUMENTATION=true
OTEL_LOG_LEVEL=INFO

# 9. Dockerfile
FROM python:3.11-slim

WORKDIR /app

# Install system dependencies
RUN apt-get update && apt-get install -y \
    gcc \
    && rm -rf /var/lib/apt/lists/*

# Copy requirements and install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt

# Copy application code
COPY .

# Expose port
EXPOSE 5000

# Run the application
CMD ["python", "app.py"]

# 10. Testing with OpenTelemetry
import unittest
from unittest.mock import patch
from opentelemetry.sdk.trace.export import InMemorySpanExporter, SimpleSpanProcessor
from opentelemetry.sdk.trace import TracerProvider

class TestTracing(unittest.TestCase):

    def setUp(self):
        """Setup test tracer"""
        self.memory_exporter = InMemorySpanExporter()

        tracer_provider = TracerProvider()
        tracer_provider.add_span_processor(
            SimpleSpanProcessor(self.memory_exporter)
        )

        trace.set_tracer_provider(tracer_provider)

    def test_manual_tracing(self):
        """Test manual span creation"""
        tracer = trace.get_tracer(__name__)

        with tracer.start_as_current_span("test-span") as span:
            span.set_attribute("test.attribute", "test-value")

        spans = self.memory_exporter.get_finished_spans()
        self.assertEqual(len(spans), 1)
        self.assertEqual(spans[0].name, "test-span")
        self.assertEqual(spans[0].attributes["test.attribute"], "test-value")

    def test_context_propagation(self):
        """Test trace context propagation"""
        tracer = trace.get_tracer(__name__)

        with tracer.start_as_current_span("parent-span"):
            with tracer.start_as_current_span("child-span") as child_span:
                pass

        spans = self.memory_exporter.get_finished_spans()
        self.assertEqual(len(spans), 2)

        # Check that child has same trace ID as parent
        parent_trace_id = spans[0].get_span_context().trace_id
        child_trace_id = spans[1].get_span_context().trace_id
        self.assertEqual(parent_trace_id, child_trace_id)

if __name__ == "__main__":
    setup_opentelemetry()
    app.run(debug=True, port=5000)