🎯 Ejemplos recomendados
Balanced sample collections from various categories for you to explore
Ejemplos Elastic Stack (ELK)
Configuración completa de Elastic Stack incluyendo Elasticsearch, Logstash, Kibana y Beats para gestión y análisis de logs
💻 Configuración de Cluster Elasticsearch yaml
🟡 intermediate
⭐⭐⭐⭐
Configuración de cluster Elasticsearch lista para producción con seguridad, optimización de rendimiento y monitoring
⏱️ 45 min
🏷️ elasticsearch, cluster, configuration, security
Prerequisites:
Elasticsearch basics, Linux administration, Docker knowledge
# Elasticsearch Cluster Configuration
# elasticsearch.yml
cluster.name: "production-cluster"
# Node configuration
node.name: "es-node-01"
node.roles: [master, data, ingest]
node.attr.zone: "zone-a"
# Network configuration
network.host: 0.0.0.0
http.port: 9200
transport.port: 9300
# Discovery settings
discovery.seed_hosts: ["es-node-01", "es-node-02", "es-node-03"]
cluster.initial_master_nodes: ["es-node-01", "es-node-02"]
# Security configuration
xpack.security.enabled: true
xpack.security.transport.ssl.enabled: true
xpack.security.transport.ssl.verification_mode: certificate
xpack.security.transport.ssl.keystore.path: certs/elastic-certificates.p12
xpack.security.transport.ssl.truststore.path: certs/elastic-certificates.p12
xpack.security.http.ssl.enabled: true
xpack.security.http.ssl.keystore.path: certs/elastic-certificates.p12
xpack.security.http.ssl.truststore.path: certs/elastic-certificates.p12
# Authentication and authorization
xpack.security.authc.anonymous.roles: kibana_system
xpack.security.authc.realms.native.enabled: true
xpack.security.authc.realms.ldap.enabled: false
# Memory settings
bootstrap.memory_lock: true
# Performance tuning
indices.queries.cache.size: 20%
indices.memory.index_buffer_size: 10%
indices.fielddata.cache.size: 40%
# Index lifecycle management
xpack.ilm.enabled: true
# Monitoring
xpack.monitoring.collection.enabled: true
xpack.monitoring.collection.interval: 30s
# Shard allocation
cluster.routing.allocation.awareness.attributes: zone
cluster.routing.allocation.disk.threshold_enabled: true
cluster.routing.allocation.disk.watermark.low: 85%
cluster.routing.allocation.disk.watermark.high: 90%
cluster.routing.allocation.disk.watermark.flood_stage: 95%
# JVM Options (jvm.options file)
-Xms4g
-Xmx4g
-XX:+UseG1GC
-XX:G1HeapRegionSize=16m
-XX:+UseStringDeduplication
# Lock memory (required for bootstrap.memory_lock)
-XX:+AlwaysPreTouch
-Xss1m
# GC logging
-Xlog:gc*
-XX:+HeapDumpOnOutOfMemoryError
-XX:HeapDumpPath=/var/log/elasticsearch
# System limits (sysctl.conf)
vm.max_map_count=262144
vm.swappiness=1
# Docker Compose for Elasticsearch Cluster
# docker-compose.yml
version: '3.8'
services:
elasticsearch-01:
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.0
container_name: es-node-01
environment:
- node.name=es-node-01
- cluster.name=production-cluster
- discovery.seed_hosts=es-node-02,es-node-03
- cluster.initial_master_nodes=es-node-01,es-node-02,es-node-03
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms2g -Xmx2g"
- xpack.security.enabled=true
- xpack.security.transport.ssl.enabled=true
- xpack.security.transport.ssl.certificate=certs/es-node-01/es-node-01.crt
- xpack.security.transport.ssl.key=certs/es-node-01/es-node-01.key
- xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- data-01:/usr/share/elasticsearch/data
- ./certs:/usr/share/elasticsearch/config/certs
- ./elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
ports:
- "9200:9200"
- "9300:9300"
networks:
- elastic
elasticsearch-02:
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.0
container_name: es-node-02
environment:
- node.name=es-node-02
- cluster.name=production-cluster
- discovery.seed_hosts=es-node-01,es-node-03
- cluster.initial_master_nodes=es-node-01,es-node-02,es-node-03
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms2g -Xmx2g"
- xpack.security.enabled=true
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- data-02:/usr/share/elasticsearch/data
- ./certs:/usr/share/elasticsearch/config/certs
networks:
- elastic
elasticsearch-03:
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.0
container_name: es-node-03
environment:
- node.name=es-node-03
- cluster.name=production-cluster
- discovery.seed_hosts=es-node-01,es-node-02
- cluster.initial_master_nodes=es-node-01,es-node-02,es-node-03
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms2g -Xmx2g"
- xpack.security.enabled=true
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- data-03:/usr/share/elasticsearch/data
- ./certs:/usr/share/elasticsearch/config/certs
networks:
- elastic
volumes:
data-01:
data-02:
data-03:
networks:
elastic:
driver: bridge
# Index Templates
# PUT _index_template/application-logs
{
"index_patterns": ["application-*"],
"template": {
"settings": {
"number_of_shards": 3,
"number_of_replicas": 1,
"index.lifecycle.name": "application-logs-policy",
"index.lifecycle.rollover_alias": "application-logs"
},
"mappings": {
"properties": {
"@timestamp": {"type": "date"},
"level": {
"type": "keyword",
"fields": {
"text": {"type": "text"}
}
},
"message": {"type": "text"},
"logger_name": {"type": "keyword"},
"thread_name": {"type": "keyword"},
"exception": {
"properties": {
"class": {"type": "keyword"},
"message": {"type": "text"},
"stacktrace": {"type": "text"}
}
},
"host": {
"properties": {
"name": {"type": "keyword"},
"ip": {"type": "ip"},
"architecture": {"type": "keyword"},
"os": {
"properties": {
"family": {"type": "keyword"},
"platform": {"type": "keyword"},
"version": {"type": "keyword"}
}
}
}
},
"service": {
"properties": {
"name": {"type": "keyword"},
"version": {"type": "keyword"},
"environment": {"type": "keyword"}
}
},
"trace": {
"properties": {
"id": {"type": "keyword"},
"span_id": {"type": "keyword"}
}
},
"transaction": {
"properties": {
"id": {"type": "keyword"},
"type": {"type": "keyword"},
"duration": {"type": "long"}
}
},
"user": {
"properties": {
"id": {"type": "keyword"},
"email": {"type": "keyword"},
"username": {"type": "keyword"}
}
},
"labels": {"type": "object"},
"tags": {"type": "keyword"}
}
}
},
"composed_of": ["ecs-template"],
"priority": 100
}
# Index Lifecycle Management Policy
# PUT _ilm/policy/application-logs-policy
{
"policy": {
"phases": {
"hot": {
"actions": {
"rollover": {
"max_size": "10GB",
"max_age": "7d",
"max_docs": 10000000
},
"set_priority": {
"priority": 100
}
}
},
"warm": {
"min_age": "7d",
"actions": {
"set_priority": {
"priority": 50
},
"allocate": {
"number_of_replicas": 0
}
}
},
"cold": {
"min_age": "30d",
"actions": {
"set_priority": {
"priority": 0
}
}
},
"delete": {
"min_age": "90d"
}
}
}
}
# Security Configuration
# Setup passwords and users
echo "y" | elasticsearch-setup-passwords interactive
# Create roles
# POST _security/role/application_admins
{
"indices": [
{
"names": ["application-*"],
"privileges": ["all"]
}
],
"run_as": ["app_user"]
}
# Create users
# POST _security/user/app_admin
{
"password": "secure_password",
"roles": ["application_admins", "kibana_admin"],
"full_name": "Application Admin",
"email": "[email protected]"
}
# API Keys for application access
# POST _security/api_key
{
"name": "application-api-key",
"expiration": "1y",
"role_descriptors": {
"application_writer": {
"indices": [
{
"names": ["application-*"],
"privileges": ["create", "index", "write"]
}
]
}
}
}
💻 Configuración de Pipeline Logstash ruby
🟡 intermediate
⭐⭐⭐⭐
Configuraciones completas de pipeline Logstash para varias fuentes de logs con procesamiento y filtrado
⏱️ 40 min
🏷️ logstash, pipeline, grok, filtering
Prerequisites:
Logstash basics, Regular expressions, JSON knowledge
# Main Logstash Configuration
# /etc/logstash/logstash.yml
node.name: "logstash-01"
path.data: /var/lib/logstash
path.logs: /var/log/logstash
pipeline.workers: 4
pipeline.batch.size: 125
pipeline.batch.delay: 50
pipeline.unsafe_shutdown: false
# HTTP input for Beats
http.host: "0.0.0.0"
http.port: 8080
# Monitoring
xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.hosts: ["https://elasticsearch:9200"]
xpack.monitoring.elasticsearch.username: "logstash_system"
xpack.monitoring.elasticsearch.password: "your_password"
xpack.monitoring.elasticsearch.ssl.certificate_authorities: ["/usr/share/logstash/certs/ca.crt"]
# Pipeline Configuration
# /etc/logstash/conf.d/01-beats-input.conf
input {
beats {
port => 5044
ssl => true
ssl_certificate => "/usr/share/logstash/certs/logstash.crt"
ssl_key => "/usr/share/logstash/certs/logstash.key"
}
}
# Syslog Input
# /etc/logstash/conf.d/02-syslog-input.conf
input {
syslog {
port => 5140
type => syslog
tags => ["syslog"]
}
syslog {
port => 5141
protocol => "tcp"
type => syslog
tags => ["syslog", "tcp"]
}
}
# File Input for Application Logs
# /etc/logstash/conf.d/03-file-input.conf
input {
file {
path => "/var/log/applications/*.log"
start_position => "beginning"
sincedb_path => "/var/lib/logstash/sincedb"
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601}"
negate => true
what => "previous"
}
tags => ["application"]
}
}
# Kafka Input
# /etc/logstash/conf.d/04-kafka-input.conf
input {
kafka {
bootstrap_servers => "kafka:9092"
topics => ["application-logs", "security-events"]
group_id => "logstash-consumer"
consumer_threads => 3
decorate_events => true
codec => json
}
}
# JDBC Input
# /etc/logstash/conf.d/05-jdbc-input.conf
input {
jdbc {
jdbc_driver_library => "/usr/share/logstash/mysql-connector-java.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://mysql:3306/appdb"
jdbc_user => "logstash"
jdbc_password => "password"
schedule => "* * * * *"
statement => "SELECT * FROM audit_logs WHERE updated_at > :sql_last_value"
use_column_value => true
tracking_column => "updated_at"
tracking_column_type => "timestamp"
last_run_metadata_path => "/usr/share/logstash/jdbc_last_run"
tags => ["database", "audit"]
}
}
# Filter Configuration
# /etc/logstash/conf.d/10-grok-filter.conf
filter {
if "syslog" in [tags] {
grok {
match => {
"message" => "%{SYSLOGBASE}"
}
}
date {
match => [ "timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
if "application" in [tags] {
grok {
match => {
"message" => "%{TIMESTAMP_ISO8601:timestamp} [%{LOGLEVEL:level}] %{GREEDYDATA:message}"
}
overwrite => ["message"]
}
}
}
# JSON Filter for structured logs
# /etc/logstash/conf.d/11-json-filter.conf
filter {
if [message] {
json {
source => "message"
target => "parsed"
}
if [parsed][@timestamp] {
date {
match => [ "[parsed][@timestamp]", "ISO8601" ]
target => "@timestamp"
}
}
# Merge parsed fields into root
if [parsed] {
mutate {
merge => { "root" => "parsed" }
}
}
}
}
# GeoIP Filter
# /etc/logstash/conf.d/12-geoip-filter.conf
filter {
if [client_ip] {
geoip {
source => "client_ip"
target => "geoip"
fields => ["city_name", "country_name", "location", "organization"]
}
}
if [source_ip] {
geoip {
source => "source_ip"
target => "geoip_source"
}
}
}
# User Agent Filter
# /etc/logstash/conf.d/13-useragent-filter.conf
filter {
if [user_agent] {
useragent {
source => "user_agent"
target => "ua"
}
}
}
# Date Filter
# /etc/logstash/conf.d/14-date-filter.conf
filter {
# Handle various date formats
if [timestamp] {
date {
match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z", "yyyy-MM-dd HH:mm:ss", "ISO8601" ]
}
}
# Add ingest timestamp
mutate {
add_field => { "[@metadata][ingest_timestamp]" => "%{+yyyy-MM-dd'T'HH:mm:ss.SSSZ}" }
}
}
# Mutate Filter for field manipulation
# /etc/logstash/conf.d/15-mutate-filter.conf
filter {
# Clean up fields
mutate {
remove_field => ["host", "version"]
}
# Add environment tag
mutate {
add_field => { "environment" => "production" }
}
# Convert fields to appropriate types
if [response_time] {
mutate {
convert => { "response_time" => "float" }
}
}
if [status_code] {
mutate {
convert => { "status_code" => "integer" }
}
}
}
# Ruby Filter for complex processing
# /etc/logstash/conf.d/16-ruby-filter.conf
filter {
ruby {
code => "
# Calculate response time bucket
if event.get('response_time')
rt = event.get('response_time').to_f
if rt < 0.1
event.set('response_time_bucket', 'fast')
elsif rt < 0.5
event.set('response_time_bucket', 'medium')
else
event.set('response_time_bucket', 'slow')
end
end
# Add day of week
require 'date'
event.set('day_of_week', Date::ABBR_DAYNAMES[Time.parse(event.get('@timestamp')).wday])
# Add processing timestamp
event.set('@metadata][processed_at]', Time.now.iso8601)
"
}
}
# Drop Filter for unwanted logs
# /etc/logstash/conf.d/17-drop-filter.conf
filter {
# Drop health check logs
if [message] =~ /health.*check|ping/i {
drop {}
}
# Drop debug logs in production
if [environment] == "production" and [level] == "DEBUG" {
drop {}
}
}
# Output Configuration
# /etc/logstash/conf.d/20-elasticsearch-output.conf
output {
elasticsearch {
hosts => ["https://elasticsearch:9200"]
user => "logstash_writer"
password => "your_password"
ssl_certificate_verification => true
cacert => "/usr/share/logstash/certs/ca.crt"
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
# Template management
template_name => "logstash"
template => "/usr/share/logstash/templates/logstash.json"
template_overwrite => true
# Error handling
retry_on_conflict => 3
retry_max_interval => 5
retry_max_items => 500
}
}
# Conditional routing
# /etc/logstash/conf.d/21-conditional-output.conf
output {
# Security events to dedicated index
if "security" in [tags] or "threat" in [tags] {
elasticsearch {
hosts => ["https://elasticsearch:9200"]
user => "logstash_writer"
password => "your_password"
index => "security-events-%{+YYYY.MM.dd}"
ssl_certificate_verification => true
}
}
# High error rate logs
else if [level] == "ERROR" {
elasticsearch {
hosts => ["https://elasticsearch:9200"]
index => "error-logs-%{+YYYY.MM.dd}"
ssl_certificate_verification => true
}
}
# Application logs
else {
elasticsearch {
hosts => ["https://elasticsearch:9200"]
index => "application-logs-%{+YYYY.MM.dd}"
ssl_certificate_verification => true
}
}
}
# Debug output
# /etc/logstash/conf.d/99-debug-output.conf
output {
if [@metadata][debug] {
stdout {
codec => rubydebug
}
}
# File output for backup
if [environment] == "development" {
file {
path => "/tmp/logstash-debug-%{+YYYY-MM-dd}.log"
codec => json_lines
}
}
}
# Pipeline configuration for specific use cases
# /etc/logstash/pipelines.yml
- pipeline.id: main
path.config: "/etc/logstash/conf.d/*.conf"
pipeline.workers: 4
pipeline.batch.size: 125
queue.type: persisted
queue.max_bytes: 2gb
queue.checkpoint.writes: 1024
- pipeline.id: security
path.config: "/etc/logstash/conf.d/security/*.conf"
pipeline.workers: 2
pipeline.batch.size: 50
- pipeline.id: metrics
path.config: "/etc/logstash/conf.d/metrics/*.conf"
pipeline.workers: 1
pipeline.batch.size: 250
# Docker Compose for Logstash
# docker-compose.yml
version: '3.8'
services:
logstash:
image: docker.elastic.co/logstash/logstash:8.11.0
container_name: logstash
environment:
- "LS_JAVA_OPTS=-Xmx1g -Xms1g"
volumes:
- ./logstash.yml:/usr/share/logstash/config/logstash.yml
- ./conf.d:/usr/share/logstash/pipeline
- ./certs:/usr/share/logstash/certs
- logstash-data:/usr/share/logstash/data
ports:
- "5044:5044"
- "5140:5140/udp"
- "5141:5141/tcp"
- "8080:8080"
networks:
- elastic
depends_on:
- elasticsearch
volumes:
logstash-data:
networks:
elastic:
external: true
# Performance Monitoring for Logstash
# /etc/logstash/conf.d/monitoring.conf
input {
pipeline {
address => "monitoring"
}
}
filter {
# Add processing metrics
ruby {
code => "
event.set('processing_duration', (event.get('@timestamp').to_f - event.get('[@metadata][ingest_timestamp]').to_f) * 1000)
"
}
}
output {
elasticsearch {
hosts => ["https://elasticsearch:9200"]
index => "logstash-monitoring-%{+YYYY.MM}"
ssl_certificate_verification => true
}
}
💻 Configuración de Dashboards Kibana json
🟡 intermediate
⭐⭐⭐
Configuración completa de Kibana con dashboards, visualizaciones y patrones de índice para análisis de logs completo
⏱️ 35 min
🏷️ kibana, dashboard, visualization, analytics
Prerequisites:
Kibana basics, JSON knowledge, Elasticsearch knowledge
{
"index_patterns": [
{
"id": "application-logs",
"attributes": {
"title": "application-*",
"timeFieldName": "@timestamp",
"fields": [
{
"name": "@timestamp",
"type": "date",
"searchable": true,
"aggregatable": true
},
{
"name": "level",
"type": "string",
"searchable": true,
"aggregatable": true
},
{
"name": "message",
"type": "string",
"searchable": true,
"aggregatable": false
},
{
"name": "host.name",
"type": "string",
"searchable": true,
"aggregatable": true
},
{
"name": "service.name",
"type": "string",
"searchable": true,
"aggregatable": true
},
{
"name": "response_time",
"type": "number",
"searchable": true,
"aggregatable": true
},
{
"name": "status_code",
"type": "number",
"searchable": true,
"aggregatable": true
},
{
"name": "user.id",
"type": "string",
"searchable": true,
"aggregatable": true
}
]
}
}
],
"visualizations": [
{
"id": "error-rate-timeline",
"type": "line",
"attributes": {
"title": "Error Rate Timeline",
"visState": {
"title": "Error Rate Timeline",
"type": "line",
"params": {
"grid": {
"categoryLines": false,
"style": {
"color": "#eee"
}
},
"categoryAxes": [
{
"id": "CategoryAxis-1",
"type": "category",
"position": "bottom",
"show": true,
"style": {},
"scale": {
"type": "linear"
},
"labels": {
"show": true,
"truncate": 100,
"rotation": 0
},
"title": {}
}
],
"valueAxes": [
{
"name": "LeftAxis-1",
"id": "ValueAxis-1",
"type": "value",
"position": "left",
"show": true,
"style": {},
"scale": {
"type": "linear",
"mode": "normal"
},
"labels": {
"show": true,
"rotate": 0,
"filter": false,
"truncate": 100
},
"title": {
"text": "Count"
}
}
],
"seriesParams": [
{
"show": "true",
"type": "line",
"mode": "normal",
"data": {
"label": "Error Count",
"id": "1"
},
"valueAxis": "ValueAxis-1",
"drawLinesBetweenPoints": true,
"showCircles": true
}
],
"addTooltip": true,
"addLegend": true,
"legendPosition": "right",
"times": [
{
"id": "time",
"type": "histogram",
"field": "@timestamp",
"interval": "auto",
"min": "now-24h",
"max": "now"
}
],
"addTimeMarker": false
},
"aggs": [
{
"id": "1",
"enabled": true,
"type": "count",
"schema": "metric",
"params": {}
},
{
"id": "2",
"enabled": true,
"type": "date_histogram",
"schema": "segment",
"params": {
"field": "@timestamp",
"interval": "auto",
"customInterval": "2h",
"min_doc_count": 1,
"extended_bounds": {}
}
}
]
},
"uiStateJSON": "{}",
"description": "",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{"index":"application-*","query":{"match_all":{}},"filter":[]}"
}
}
},
{
"id": "top-errors",
"type": "table",
"attributes": {
"title": "Top Error Messages",
"visState": {
"title": "Top Error Messages",
"type": "table",
"params": {
"perPage": 10,
"showPartialRows": false,
"showMeticsAtAllLevels": false,
"sort": {
"columnIndex": null,
"direction": null
},
"showTotal": false,
"totalFunc": "sum"
},
"aggs": [
{
"id": "1",
"enabled": true,
"type": "count",
"schema": "metric",
"params": {}
},
{
"id": "2",
"enabled": true,
"type": "terms",
"schema": "bucket",
"params": {
"field": "message",
"size": 10,
"order": "desc",
"orderBy": "1"
}
}
]
},
"uiStateJSON": "{}",
"description": "",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{"index":"application-*","query":{"match":{"level":"ERROR"}},"filter":[]}"
}
}
},
{
"id": "response-time-histogram",
"type": "histogram",
"attributes": {
"title": "Response Time Distribution",
"visState": {
"title": "Response Time Distribution",
"type": "histogram",
"params": {
"grid": {
"categoryLines": false,
"style": {
"color": "#eee"
}
},
"categoryAxes": [
{
"id": "CategoryAxis-1",
"type": "category",
"position": "bottom",
"show": true,
"style": {},
"scale": {
"type": "linear"
},
"labels": {
"show": true,
"truncate": 100,
"rotation": 0
},
"title": {}
}
],
"valueAxes": [
{
"name": "LeftAxis-1",
"id": "ValueAxis-1",
"type": "value",
"position": "left",
"show": true,
"style": {},
"scale": {
"type": "linear",
"mode": "normal"
},
"labels": {
"show": true,
"rotate": 0,
"filter": false,
"truncate": 100
},
"title": {
"text": "Count"
}
}
],
"seriesParams": [
{
"show": "true",
"type": "histogram",
"mode": "stacked",
"data": {
"label": "Response Time",
"id": "1"
},
"valueAxis": "ValueAxis-1",
"drawLinesBetweenPoints": true,
"showCircles": true
}
],
"addTooltip": true,
"addLegend": true,
"legendPosition": "right",
"times": []
},
"aggs": [
{
"id": "1",
"enabled": true,
"type": "count",
"schema": "metric",
"params": {}
},
{
"id": "2",
"enabled": true,
"type": "histogram",
"schema": "bucket",
"params": {
"field": "response_time",
"interval": "10",
"min_doc_count": 1
}
}
]
},
"uiStateJSON": "{}",
"description": "",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{"index":"application-*","query":{"range":{"response_time":{"gte":0}}},"filter":[]}"
}
}
}
],
"dashboards": [
{
"id": "application-monitoring",
"attributes": {
"title": "Application Monitoring Dashboard",
"hits": 0,
"description": "Comprehensive application monitoring dashboard with metrics, errors, and performance data",
"panelsJSON": "[{"gridData":{"x":0,"y":0,"w":24,"h":15,"i":"1"},"version":"8.11.0","panelIndex":"1","embeddableConfig":{},"panelRefName":"panel_1"},{"gridData":{"x":24,"y":0,"w":24,"h":15,"i":"2"},"version":"8.11.0","panelIndex":"2","embeddableConfig":{},"panelRefName":"panel_2"},{"gridData":{"x":0,"y":15,"w":48,"h":15,"i":"3"},"version":"8.11.0","panelIndex":"3","embeddableConfig":{},"panelRefName":"panel_3"}]",
"timeRestore": false,
"timeTo": "now",
"timeFrom": "now-24h",
"refreshInterval": {
"pause": false,
"value": 30000
},
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{"query":{"match_all":{}},"filter":[]}"
}
}
},
{
"id": "security-operations",
"attributes": {
"title": "Security Operations Dashboard",
"hits": 0,
"description": "Security monitoring dashboard for threat detection and incident response",
"panelsJSON": "[{"gridData":{"x":0,"y":0,"w":48,"h":15,"i":"1"},"version":"8.11.0","panelIndex":"1","embeddableConfig":{},"panelRefName":"panel_1"},{"gridData":{"x":0,"y":15,"w":24,"h":15,"i":"2"},"version":"8.11.0","panelIndex":"2","embeddableConfig":{},"panelRefName":"panel_2"},{"gridData":{"x":24,"y":15,"w":24,"h":15,"i":"3"},"version":"8.11.0","panelIndex":"3","embeddableConfig":{},"panelRefName":"panel_3"}]",
"timeRestore": false,
"timeTo": "now",
"timeFrom": "now-24h",
"refreshInterval": {
"pause": false,
"value": 60000
}
}
}
],
"saved_objects": [
{
"type": "visualization",
"id": "http-status-codes",
"attributes": {
"title": "HTTP Status Codes",
"visState": "{"title":"HTTP Status Codes","type":"pie","params":{"addTooltip":true,"addLegend":true,"legendPosition":"right","isDonut":true},"aggs":[{"id":"1","enabled":true,"type":"count","schema":"metric","params":{}},{"id":"2","enabled":true,"type":"terms","schema":"bucket","params":{"field":"status_code","size":10,"order":"desc","orderBy":"1"}}]}",
"uiStateJSON": "{}",
"description": "",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{"index":"nginx-access","query":{"match_all":{}},"filter":[]}"
}
}
},
{
"type": "visualization",
"id": "geographic-distribution",
"attributes": {
"title": "Geographic Distribution",
"visState": "{"title":"Geographic Distribution","type":"map","params":{"addTooltip":true,"mapType":"Scaled Circle Markers","wms":{"enabled":false},"isDesaturated":false,"heatBlurRadius":3,"heatMaxZoom":16,"heatMinOpacity":0.1,"heatNormalizeData":true,"heatRadius":25,"legendPosition":"bottomright","mapZoom":2,"centerLat":39.0,"centerLon":-98.0,"colorSchema":"Reds","colorsRange":[{"from":0,"to":10000}]}",
"uiStateJSON": "{"mapZoom":2,"centerLat":39.0,"centerLon":-98.0}",
"description": "",
"kibanaSavedObjectMeta": {
"searchSourceJSON": "{"index":"application-*","query":{"exists":{"field":"geoip.location"}},"filter":[]}"
}
}
}
]
}
# Kibana Configuration File
# /etc/kibana/kibana.yml
server.host: "0.0.0.0"
server.port: 5601
# Elasticsearch connection
elasticsearch.hosts: ["https://elasticsearch:9200"]
elasticsearch.username: "kibana_system"
elasticsearch.password: "your_password"
elasticsearch.ssl.certificateAuthorities: ["/usr/share/kibana/certs/ca.crt"]
elasticsearch.ssl.verificationMode: "certificate"
# Security
xpack.security.enabled: true
xpack.encryptedSavedObjects.encryptionKey: "your_encryption_key_here"
xpack.reporting.encryptionKey: "your_reporting_key_here"
# Logging
logging.appenders.file:
type: file
fileName: /var/log/kibana/kibana.log
layout:
type: json
rootLogger:
level: info
appenders:
default:
type: console
layout:
type: json
# Monitoring
xpack.monitoring.ui.container.elasticsearch.enabled: true
# Reporting
xpack.reporting.enabled: true
xpack.reporting.capture.browser.chromium.disableSandbox: true
# Search
search.maxBucketsLimit: 20000
# Server settings
server.maxPayloadBytes: 10485760
# Internationalization
i18n.locale: "en"
# Map settings
map.regionmap: "countries"
map.includeElasticMapsService: true
# Tile service
map.tilemap.url: "https://tiles.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana_app"
map.tilemap.options.attribution: "© [Elastic Maps Service](https://www.elastic.co/elastic-maps-service)"
map.tilemap.options.maxZoom: 18
map.tilemap.options.minZoom: 1
map.tilemap.options.subdomains: ["tiles0", "tiles1", "tiles2", "tiles3"]
# Traffic monitoring
xpack.monitoring.kibana.collection.enabled: true
xpack.monitoring.kibana.collection.interval: 60000
# CORS settings
server.cors.enabled: true
server.cors.origin: ["http://localhost:3000"]
# Custom themes
xpack.uiSettings.enabled: true
# Saved objects
savedObjects.maxImportPayloadBytes: 10485760
# Advanced settings
monitoring.ui.container.elasticsearch.enabled: true
# Backup and restore
path.data: /usr/share/kibana/data
# Plugin configuration
xpack.security.encryptionKey: "something_at_least_32_characters"
# Tilemap configuration
map.tilemap.url: "https://tiles.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana"
# Screenshotting
xpack.screenshotting.browser.chromium.disableSandbox: true
# Docker Compose for Kibana
# docker-compose.yml
version: '3.8'
services:
kibana:
image: docker.elastic.co/kibana/kibana:8.11.0
container_name: kibana
environment:
- "ELASTICSEARCH_HOSTS=https://elasticsearch:9200"
- "ELASTICSEARCH_USERNAME=kibana_system"
- "ELASTICSEARCH_PASSWORD=your_password"
- "SERVER_HOST=0.0.0.0"
- "SERVER_PORT=5601"
- "xpack.security.enabled=true"
volumes:
- ./kibana.yml:/usr/share/kibana/config/kibana.yml
- ./certs:/usr/share/kibana/certs
- kibana-data:/usr/share/kibana/data
ports:
- "5601:5601"
networks:
- elastic
depends_on:
- elasticsearch
volumes:
kibana-data:
networks:
elastic:
external: true
# API Examples for Kibana Management
# Create Index Pattern
# POST /api/saved_objects/_import
curl -X POST "localhost:5601/api/saved_objects/_import" -H "kbn-xsrf: true" -H "Content-Type: application/json" --form [email protected]
# Create Dashboard
# POST /api/saved_objects/_import
curl -X POST "localhost:5601/api/saved_objects/_import" -H "kbn-xsrf: true" -H "Content-Type: application/json" --form [email protected]
# Export Objects
# GET /api/saved_objects/_export
curl -X GET "localhost:5601/api/saved_objects/_export?type=dashboard&type=visualization" -H "kbn-xsrf: true" -o exported_objects.ndjson
# Search Objects
# GET /api/saved_objects/_search
curl -X GET "localhost:5601/api/saved_objects/_search?type=dashboard" -H "kbn-xsrf: true" -H "Content-Type: application/json"
# Create Short URL
# POST /api/short_url
curl -X POST "localhost:5601/api/short_url" -H "kbn-xsrf: true" -H "Content-Type: application/json" -d '{
"locatorId": "MY_DASHBOARD_HASH",
"slug": "my-dashboard"
}'
💻 Configuración de Filebeat y Metricbeat yaml
🟡 intermediate
⭐⭐⭐
Configuración completa de agentes Filebeat y Metricbeat para recolección de logs y monitoring del sistema
⏱️ 40 min
🏷️ filebeat, metricbeat, beats, log collection
Prerequisites:
Elastic Stack basics, YAML knowledge, System administration
# Filebeat Configuration
# /etc/filebeat/filebeat.yml
#============================== Filebeat inputs ===============================
filebeat.inputs:
# Application logs
- type: log
enabled: true
paths:
- /var/log/applications/*.log
- /var/log/applications/*/*.log
fields:
logtype: application
environment: production
fields_under_root: true
multiline.pattern: '^%{TIMESTAMP_ISO8601}'
multiline.negate: true
multiline.match: after
scan_frequency: 10s
harvester_buffer_size: 16384
max_bytes: 10485760
# Nginx access logs
- type: log
enabled: true
paths:
- /var/log/nginx/access.log*
fields:
logtype: nginx_access
service: nginx
fields_under_root: true
processors:
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
# Nginx error logs
- type: log
enabled: true
paths:
- /var/log/nginx/error.log*
fields:
logtype: nginx_error
service: nginx
fields_under_root: true
# System logs
- type: syslog
enabled: true
protocol.udp:
host: "localhost"
port: 514
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
# MySQL logs
- type: log
enabled: true
paths:
- /var/log/mysql/mysql.log*
- /var/log/mysql/error.log*
fields:
logtype: mysql
service: mysql
fields_under_root: true
# Audit logs
- type: log
enabled: true
paths:
- /var/log/audit/audit.log*
fields:
logtype: audit
service: audit
fields_under_root: true
#================================ Processors ====================================
processors:
# Decode JSON logs
- decode_json_fields:
fields: ["message"]
target: ""
overwrite_keys: true
# Add host metadata
- add_host_metadata:
when.not.contains.tags: forwarded
# Add Docker metadata
- add_docker_metadata: ~
# Add Kubernetes metadata
- add_kubernetes_metadata: ~
# Add cloud metadata
- add_cloud_metadata: ~
# Rename fields
- rename:
fields:
- from: "agent.ephemeral_id"
to: "agent.id"
- from: "log.file.path"
to: "log.path"
ignore_missing: true
# Drop unnecessary fields
- drop_fields:
fields: ["agent.ephemeral_id", "host.architecture"]
ignore_missing: true
# Add timestamp for processing
- timestamp:
field: ingest_timestamp
layouts:
- '2006-01-02T15:04:05Z07:00'
test:
- '2019-06-22T16:33:51.000Z'
# Parse custom fields
- dissect:
tokenizer: "%{key}=%{value}"
field: "message"
target_prefix: "custom"
ignore_missing: true
# GeoIP processing
- geoip:
field: client_ip
target_field: geoip
ignore_missing: true
# User agent parsing
- user_agent:
field: user_agent
ignore_missing: true
#================================ Outputs =====================================
output.elasticsearch:
# Array of hosts to connect to.
hosts: ["https://elasticsearch:9200"]
# Protocol - either `http` or `https`
protocol: "https"
# Authentication credentials
username: "filebeat_writer"
password: "your_password"
# SSL configuration
ssl.enabled: true
ssl.certificate_authorities: ["/etc/filebeat/certs/ca.crt"]
ssl.verification_mode: "certificate"
# Index settings
index: "filebeat-%{[agent.version]}-%{+yyyy.MM.dd}"
# Template settings
template.name: "filebeat"
template.pattern: "filebeat-*"
template.settings:
index.number_of_shards: 1
index.number_of_replicas: 1
# Pipeline settings
pipeline: filebeat-pipeline
# Worker settings
worker: 4
bulk_max_size: 50
# Error handling
retry.max_attempts: 3
max_retries: 3
backoff.init: 1s
backoff.max: 60s
#================================ Logging =====================================
# Sets log level. The default log level is info.
logging.level: info
logging.to_files: true
logging.files:
path: /var/log/filebeat
name: filebeat
keepfiles: 7
permissions: 0644
#================================ Monitor =====================================
monitoring.enabled: true
#================================ HTTP Endpoint =====================================
http.enabled: true
http.port: 5066
#================================ Processors ======================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
#================================ Cloud ==================================
cloud.id: "your-cloud-id"
cloud.auth: "elastic:your-password"
#================================ Tags ==================================
tags: ["production", "logs"]
#============================== Filebeat global options ===============================
# Name of the shipper
name: "filebeat-node-01"
# Path for the data files
path.data: /var/lib/filebeat
# Registry file
path.data: /var/lib/filebeat
#============================== Monitors =====================================
filebeat.monitoring.enabled: true
#================================ Monitoring =====================================
monitoring.elasticsearch:
hosts: ["https://elasticsearch:9200"]
username: "filebeat_writer"
password: "your_password"
ssl.certificate_authorities: ["/etc/filebeat/certs/ca.crt"]
#================================ Kibana =====================================
setup.kibana:
host: "https://kibana:5601"
username: "elastic"
password: "your_password"
ssl.certificate_authorities: ["/etc/filebeat/certs/ca.crt"]
#================================ Modules =====================================
filebeat.modules:
- module: nginx
access:
enabled: true
var.paths: ["/var/log/nginx/access.log*"]
error:
enabled: true
var.paths: ["/var/log/nginx/error.log*"]
- module: system
syslog:
enabled: true
var.paths: ["/var/log/syslog*"]
auth:
enabled: true
var.paths: ["/var/log/auth.log*"]
- module: mysql
error:
enabled: true
var.paths: ["/var/log/mysql/error.log*"]
slowlog:
enabled: true
var.paths: ["/var/log/mysql/mysql-slow.log*"]
- module: redis
slowlog:
enabled: true
var.paths: ["/var/log/redis/redis-slow.log*"]
#================================ Metricbeat Configuration ===============================
# /etc/metricbeat/metricbeat.yml
#============================== Metricbeat global options ===============================
metricbeat.config.modules:
path: ${path.config}/modules.d/*.yml
#============================== Autodiscover ===============================
metricbeat.autodiscover:
providers:
- type: docker
hints.enabled: true
hints.default_config:
period: 10s
hosts: ["unix:///var/run/docker.sock"]
deprecations.enabled: false
deprecations.silent: true
metricsets:
- container
- cpu
- diskio
- memory
- network
hints.deprecated:
- deprecation
- deprecation_warning
- type: kubernetes
hints.enabled: true
hints.default_config:
add_resource_metadata:
namespace: true
node: true
pod: true
container: true
hints.deprecated:
- deprecation
- deprecation_warning
hints.deprecated.ignore:
- deprecation_warning
#============================== Modules ===============================
metricbeat.modules:
#------------------------------ System Module ------------------------------
- module: system
metricsets:
- cpu
- load
- memory
- network
- process
- process_summary
- core
- diskio
- socket
enabled: true
period: 10s
processes: ['.*']
#------------------------------ Docker Module ------------------------------
- module: docker
metricsets:
- "container"
- "cpu"
- "diskio"
- "event"
- "healthcheck"
- "info"
- "memory"
- "network"
enabled: true
hosts: ["unix:///var/run/docker.sock"]
period: 10s
#------------------------------ Nginx Module ------------------------------
- module: nginx
metricsets:
- "stubstatus"
enabled: true
period: 10s
hosts: ["http://localhost"]
#------------------------------ MySQL Module ------------------------------
- module: mysql
metricsets:
- "status"
- "galera_status"
- "performance"
enabled: true
period: 10s
hosts: ["tcp(127.0.0.1:3306)/"]
username: metricbeat
password: password
#------------------------------ Redis Module ------------------------------
- module: redis
metricsets:
- "info"
- "keyspace"
enabled: true
period: 10s
hosts: ["redis://localhost:6379"]
#------------------------------ Kubernetes Module ------------------------------
- module: kubernetes
metricsets:
- node
- state_node
- state_deployment
- state_replicaset
- state_statefulset
- state_pod
- state_container
- volume
- system
- pod
- container
- proxy
enabled: true
hosts: ["https://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT}"]
bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token
ssl.verification_mode: none
period: 10s
#------------------------------ Jolokia Module ------------------------------
- module: jolokia
metricsets: ["jmx"]
enabled: true
period: 10s
hosts: ["localhost:8778"]
jmx.mappings:
- mbean: "java.lang:type=Memory"
attributes:
HeapMemoryUsage:
alias: "heap_memory_usage"
NonHeapMemoryUsage:
alias: "non_heap_memory_usage"
- mbean: "java.lang:type=Threading"
attributes:
ThreadCount:
alias: "thread_count"
PeakThreadCount:
alias: "peak_thread_count"
#================================ Processors =====================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
#================================ Outputs =====================================
output.elasticsearch:
# Array of hosts to connect to.
hosts: ["https://elasticsearch:9200"]
# Protocol
protocol: "https"
# Authentication credentials
username: "metricbeat_writer"
password: "your_password"
# SSL configuration
ssl.enabled: true
ssl.certificate_authorities: ["/etc/metricbeat/certs/ca.crt"]
ssl.verification_mode: "certificate"
# Index settings
index: "metricbeat-%{[agent.version]}-%{+yyyy.MM.dd}"
# Template settings
template.name: "metricbeat"
template.pattern: "metricbeat-*"
template.settings:
index.number_of_shards: 1
index.number_of_replicas: 1
# Pipeline settings
pipeline: metricbeat-pipeline
#================================ Logging =====================================
logging.level: info
logging.to_files: true
logging.files:
path: /var/log/metricbeat
name: metricbeat
keepfiles: 7
permissions: 0644
#================================ Monitoring =====================================
monitoring.enabled: true
#================================ HTTP Endpoint =====================================
http.enabled: true
http.port: 5067
#================================ Tags ==================================
tags: ["production", "metrics"]
#================================ Processors ======================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
#================================ Cloud ==================================
cloud.id: "your-cloud-id"
cloud.auth: "elastic:your-password"
#================================ Kibana =====================================
setup.kibana:
host: "https://kibana:5601"
username: "elastic"
password: "your_password"
ssl.certificate_authorities: ["/etc/metricbeat/certs/ca.crt"]
#================================ Monitoring =====================================
monitoring.elasticsearch:
hosts: ["https://elasticsearch:9200"]
username: "metricbeat_writer"
password: "your_password"
ssl.certificate_authorities: ["/etc/metricbeat/certs/ca.crt"]
#================================ Docker Compose Configuration ===============================
# docker-compose.yml
version: '3.8'
services:
filebeat:
image: docker.elastic.co/beats/filebeat:8.11.0
container_name: filebeat
user: root
environment:
- "ELASTICSEARCH_HOSTS=https://elasticsearch:9200"
- "ELASTICSEARCH_USERNAME=filebeat_writer"
- "ELASTICSEARCH_PASSWORD=your_password"
- "KIBANA_HOST=https://kibana:5601"
volumes:
- ./filebeat.yml:/usr/share/filebeat/filebeat.yml:ro
- /var/log:/var/log:ro
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /var/run/docker.sock:/var/run/docker.sock:ro
- ./certs:/usr/share/filebeat/certs:ro
- filebeat-data:/usr/share/filebeat/data
ports:
- "5066:5066"
networks:
- elastic
depends_on:
- elasticsearch
metricbeat:
image: docker.elastic.co/beats/metricbeat:8.11.0
container_name: metricbeat
user: root
environment:
- "ELASTICSEARCH_HOSTS=https://elasticsearch:9200"
- "ELASTICSEARCH_USERNAME=metricbeat_writer"
- "ELASTICSEARCH_PASSWORD=your_password"
- "KIBANA_HOST=https://kibana:5601"
volumes:
- ./metricbeat.yml:/usr/share/metricbeat/metricbeat.yml:ro
- /proc:/hostfs/proc:ro
- /sys/fs/cgroup:/hostfs/sys/fs/cgroup:ro
- /:/hostfs:ro
- ./certs:/usr/share/metricbeat/certs:ro
- metricbeat-data:/usr/share/metricbeat/data
ports:
- "5067:5067"
networks:
- elastic
depends_on:
- elasticsearch
volumes:
filebeat-data:
metricbeat-data:
networks:
elastic:
external: true
# Module Configuration Files
# /etc/filebeat/modules.d/nginx.yml
- module: nginx
access:
enabled: true
var.paths:
- "/var/log/nginx/access.log*"
var.custom_fields:
service: nginx
environment: production
error:
enabled: true
var.paths:
- "/var/log/nginx/error.log*"
# /etc/metricbeat/modules.d/docker.yml
- module: docker
metricsets:
- container
- cpu
- diskio
- event
- healthcheck
- info
- memory
- network
hosts: ["unix:///var/run/docker.sock"]
period: 10s
processors:
- add_docker_metadata: ~
# SSL Certificate Generation
#!/bin/bash
# Create certificates directory
mkdir -p certs
# Generate CA
openssl genrsa -out certs/ca/ca.key 2048
openssl req -new -x509 -key certs/ca/ca.key -out certs/ca/ca.crt -days 365 -subj "/C=US/ST=CA/O=Company/CN=CA"
# Generate server certificate
openssl genrsa -out certs/server/server.key 2048
openssl req -new -key certs/server/server.key -out certs/server/server.csr -subj "/C=US/ST=CA/O=Company/CN=server"
# Sign server certificate
openssl x509 -req -in certs/server/server.csr -CA certs/ca/ca.crt -CAkey certs/ca/ca.key -CAcreateserial -out certs/server/server.crt -days 365
# Generate client certificate for Filebeat
openssl genrsa -out certs/filebeat/filebeat.key 2048
openssl req -new -key certs/filebeat/filebeat.key -out certs/filebeat/filebeat.csr -subj "/C=US/ST=CA/O=Company/CN=filebeat"
openssl x509 -req -in certs/filebeat/filebeat.csr -CA certs/ca/ca.crt -CAkey certs/ca/ca.key -CAcreateserial -out certs/filebeat/filebeat.crt -days 365
# Copy CA certificate to all clients
cp certs/ca/ca.crt certs/filebeat/
cp certs/ca/ca.crt certs/metricbeat/