ELK Stack Log Analysis Samples

Comprehensive ELK Stack (Elasticsearch, Logstash, Kibana) samples for log aggregation, processing, and visualization in distributed systems

📝 Application Log Aggregation Format

🟢 simple

Structured JSON log format for application logs with correlation IDs and metadata for distributed tracing

⏱️ 5 min 🏷️ log-format, json, structured-logging, correlation-id, metadata
{
  "@timestamp": "2025-12-07T10:30:45.123Z",
  "level": "INFO",
  "message": "User authentication successful",
  "service": "auth-service",
  "version": "2.1.3",
  "trace_id": "550e8400-e29b-41d4-a716-446655440000",
  "span_id": "550e8400-e29b-41d4-a716-446655440001",
  "parent_span_id": "550e8400-e29b-41d4-a716-446655440000",
  "user_id": "user_12345",
  "request_id": "req_67890",
  "session_id": "sess_abcde",
  "correlation_id": "corr_fghij",

  "context": {
    "host": "auth-server-01",
    "pod": "auth-service-7f8d9c2b-k4l5m",
    "namespace": "production",
    "environment": "prod",
    "region": "us-west-2"
  },

  "request": {
    "method": "POST",
    "path": "/api/v1/auth/login",
    "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
    "client_ip": "192.168.1.100",
    "response_time": 145.67,
    "status_code": 200
  },

  "user": {
    "id": "user_12345",
    "email": "[email protected]",
    "roles": ["user", "premium"],
    "tenant": "company_xyz"
  },

  "security": {
    "auth_method": "jwt",
    "mfa_enabled": true,
    "login_attempts": 2,
    "risk_score": "low"
  },

  "performance": {
    "cpu_usage": 12.5,
    "memory_usage": 67.8,
    "disk_io": 2.1,
    "network_io": 15.3
  },

  "tags": ["authentication", "successful", "mfa"],
  "metadata": {
    "source": "application",
    "format_version": "1.0",
    "encoding": "utf-8"
  }
}

📝 Elasticsearch Index Mapping Configuration

🔴 complex ⭐⭐⭐

Advanced index mapping with custom analyzers, field types, and optimization for log data

⏱️ 15 min 🏷️ elasticsearch, index, mapping, schema, analyzers, optimization
{
  "mappings": {
    "properties": {
      "@timestamp": { "type": "date" },
      "level": {
        "type": "keyword",
        "fields": {
          "text": { "type": "text" }
        }
      },
      "message": {
        "type": "text",
        "analyzer": "standard",
        "fields": {
          "keyword": { "type": "keyword" }
        }
      },
      "service": { "type": "keyword" },
      "trace_id": { "type": "keyword" },
      "span_id": { "type": "keyword" },
      "user_id": { "type": "keyword" },
      "request_id": { "type": "keyword" },
      "response_time": { "type": "float" },
      "status_code": { "type": "integer" },
      "error": {
        "type": "object",
        "properties": {
          "type": { "type": "keyword" },
          "message": { "type": "text" },
          "stack_trace": { "type": "text" }
        }
      },
      "tags": { "type": "keyword" },
      "host": {
        "properties": {
          "name": { "type": "keyword" },
          "ip": { "type": "ip" },
          "os": { "type": "keyword" }
        }
      },
      "geoip": {
        "properties": {
          "location": { "type": "geo_point" },
          "country_name": { "type": "keyword" },
          "city_name": { "type": "keyword" }
        }
      }
    }
  },
  "settings": {
    "analysis": {
      "analyzer": {
        "log_analyzer": {
          "type": "custom",
          "tokenizer": "standard",
          "filter": ["lowercase", "stop"]
        }
      }
    },
    "number_of_shards": 3,
    "number_of_replicas": 1
  }
}

📝 Logstash Data Processing Pipeline

🔴 complex ⭐⭐⭐⭐

Complete Logstash pipeline configuration for parsing, filtering, and enriching log data from multiple sources

⏱️ 25 min 🏷️ logstash, pipeline, grok, filter, parse, enrich, transform
input {
  beats {
    port => 5044
  }
  tcp {
    port => 5000
    codec => json_lines
  }
  file {
    path => "/var/log/*.log"
    start_position => "beginning"
  }
}

filter {
  # Parse JSON logs
  if [message] =~ /^{.*}$/ {
    json {
      source => "message"
      target => "parsed"
    }
  }

  # Parse timestamp
  date {
    match => [ "[@timestamp]", "ISO8601", "yyyy-MM-dd HH:mm:ss.SSS" ]
    target => "@timestamp"
  }

  # Add GeoIP information
  if [client_ip] {
    geoip {
      source => "client_ip"
      target => "geoip"
    }
  }

  # Parse user agent
  if [user_agent] {
    useragent {
      source => "user_agent"
      target => "ua"
    }
  }

  # Extract fields from log messages
  grok {
    match => {
      "message" => "%{TIMESTAMP_ISO8601:timestamp} %{LOGLEVEL:level} \[%{DATA:thread}\] %{DATA:logger} - %{GREEDYDATA:log_message}"
    }
    tag_on_failure => ["_grokparsefailure"]
  }

  # Mutate and clean up fields
  mutate {
    add_field => { "service_name" => "%{[fields][service]}" }
    add_field => { "environment" => "%{[fields][environment]}" }
    remove_field => [ "host", "agent", "ecs", "input" ]
    convert => {
      "response_time" => "float"
      "status_code" => "integer"
    }
  }

  # Add fingerprint for deduplication
  fingerprint {
    source => ["message", "@timestamp", "service_name"]
    target => "fingerprint"
  }

  # Conditional routing based on log level
  if [level] == "ERROR" or [level] == "FATAL" {
    mutate {
      add_tag => ["error", "alert"]
    }
  }
}

output {
  elasticsearch {
    hosts => ["http://elasticsearch:9200"]
    index => "logs-%{+YYYY.MM.dd}"
    template_name => "logs"
    template_pattern => "logs-*"
    template => "/usr/share/logstash/templates/logs-template.json"
  }

  # Send critical errors to separate index
  if "alert" in [tags] {
    elasticsearch {
      hosts => ["http://elasticsearch:9200"]
      index => "alerts-%{+YYYY.MM.dd}"
    }
  }

  # Debug output to console
  if [@metadata][debug] {
    stdout {
      codec => rubydebug
    }
  }
}

📝 Kibana Visualization Dashboard

🔴 complex ⭐⭐⭐

Comprehensive Kibana dashboard with multiple visualizations for log analysis and monitoring metrics

⏱️ 20 min 🏷️ kibana, dashboard, visualization, charts, metrics, monitoring
{
  "dashboard": {
    "title": "System Log Analysis Dashboard",
    "panelsJSON": "[{"gridData":{"x":0,"y":0,"w":24,"h":15,"i":"1"},"panelIndex":"1","embeddableConfig":{},"panelRefName":"panel_1"},{"gridData":{"x":24,"y":0,"w":24,"h":15,"i":"2"},"panelIndex":"2","embeddableConfig":{},"panelRefName":"panel_2"},{"gridData":{"x":0,"y":15,"w":48,"h":15,"i":"3"},"panelIndex":"3","embeddableConfig":{},"panelRefName":"panel_3"}]",
    "timeRestore": false,
    "timeTo": "now",
    "timeFrom": "now-24h",
    "refreshInterval": {
      "pause": false,
      "value": 30000
    },
    "kibanaSavedObjectMeta": {
      "searchSourceJSON": "{"query":{"match_all":{}},"filter":[]}"
    },
    "description": "Comprehensive dashboard for monitoring system logs, errors, and performance metrics",
    "version": "8.0.0"
  },
  "timeField": "@timestamp",
  "panels": [
    {
      "id": "1",
      "type": "metric",
      "title": "Total Log Events",
      "visState": "{ "type": "metric", "aggs": [{ "id": "1", "type": "count", "schema": "metric", "params": {} }] }",
      "description": "Total number of log events in the selected time range"
    },
    {
      "id": "2",
      "type": "histogram",
      "title": "Log Levels Distribution",
      "visState": "{ "type": "histogram", "aggs": [{ "id": "1", "type": "count", "schema": "metric", "params": {} }, { "id": "2", "type": "terms", "schema": "segment", "params": { "field": "level", "size": 10 } }] }",
      "description": "Distribution of log levels across all services"
    },
    {
      "id": "3",
      "type": "line",
      "title": "Logs Timeline",
      "visState": "{ "type": "line", "aggs": [{ "id": "1", "type": "count", "schema": "metric", "params": {} }, { "id": "2", "type": "date_histogram", "schema": "segment", "params": { "field": "@timestamp", "interval": "1h" } }] }",
      "description": "Timeline of log events over time"
    }
  ]
}

📝 Advanced Elasticsearch Query Examples

🔴 complex ⭐⭐⭐⭐

Complex Elasticsearch queries including aggregations, filters, and full-text search for log analysis

⏱️ 30 min 🏷️ elasticsearch, query, aggregation, filter, search, dsl
{
  "query": {
    "bool": {
      "must": [
        {
          "range": {
            "@timestamp": {
              "gte": "now-24h",
              "lte": "now"
            }
          }
        },
        {
          "bool": {
            "should": [
              {
                "term": {
                  "level": "ERROR"
                }
              },
              {
                "term": {
                  "level": "FATAL"
                }
              }
            }
          }
        }
      ],
      "filter": [
        {
          "terms": {
            "service": ["auth-service", "api-gateway", "user-service"]
          }
        }
      ]
    }
  },
  "aggs": {
    "services": {
      "terms": {
        "field": "service",
        "size": 10
      },
      "aggs": {
        "error_types": {
          "terms": {
            "field": "error.type",
            "size": 5
          }
        },
        "hourly_distribution": {
          "date_histogram": {
            "field": "@timestamp",
            "calendar_interval": "1h"
          }
        },
        "avg_response_time": {
          "avg": {
            "field": "response_time"
          }
        }
      }
    },
    "geoip_distribution": {
      "terms": {
        "field": "geoip.country_name",
        "size": 20
      }
    },
    "user_analysis": {
      "cardinality": {
        "field": "user_id"
      }
    },
    "response_time_stats": {
      "stats": {
        "field": "response_time"
      }
    },
    "error_timeline": {
      "date_histogram": {
        "field": "@timestamp",
        "calendar_interval": "10m"
      },
      "aggs": {
        "top_error_messages": {
          "top_hits": {
            "size": 3,
            "sort": [
              {
                "@timestamp": {
                  "order": "desc"
                }
              }
            ],
            "_source": ["message", "error.message", "service"]
          }
        }
      }
    }
  },
  "sort": [
    {
      "@timestamp": {
        "order": "desc"
      }
    }
  ],
  "size": 100,
  "highlight": {
    "fields": {
      "message": {},
      "error.message": {}
    }
  }
}