Istio Ingress Gateway - ElasticSearch

Dashboard

Dashboard for istio ingress gateway
Last updated: 9 months ago

Downloads: 130

Reviews: 1

  • a.png
    a.png
  • b.png
    b.png
  • c.png
    c.png

Dashboard for ingress gateway logs uses ElasticSearch, filebeat and logstash. Requirements:

  • Elasticsearch 5.x.x or 6.x.x
  • Filebeat
  • Logstash

Troubleshooting:

If you are facing this error: Error: Missing geohash value grafana elasticsearch

-> Are you using elastic version 6.x.x? I got this error when I used elasticsearch version 6.3.0 and I fixed it by creating a new index with the template below before pushing logs to elasticsearch:

{
    "template": "logstash-*",
    "version": 60001,
    "settings": {
        "index.refresh_interval": "5s"
    },
    "mappings": {
        "_default_": {
            "dynamic_templates": [
                {
                    "message_field": {
                        "path_match": "message",
                        "match_mapping_type": "string",
                        "mapping": {
                            "type": "text",
                            "norms": false
                        }
                    }
                },
                {
                    "string_fields": {
                        "match": "*",
                        "match_mapping_type": "string",
                        "mapping": {
                            "type": "text",
                            "norms": false,
                            "fields": {
                                "keyword": {
                                    "type": "keyword",
                                    "ignore_above": 256
                                }
                            }
                        }
                    }
                }
            ],
            "properties": {
                "@timestamp": {
                    "type": "date"
                },
                "@version": {
                    "type": "keyword"
                },
                "geoip": {
                    "dynamic": true,
                    "properties": {
                        "ip": {
                            "type": "ip"
                        },
                        "location": {
                            "type": "geo_point"
                        },
                        "latitude": {
                            "type": "half_float"
                        },
                        "longitude": {
                            "type": "half_float"
                        }
                    }
                }
            }
        }
    }
}

And I used kubernetes cronjob to create new index automatically. We have sample python code below.

#python3
import requests
import json
import datetime

today = datetime.date.today()
tomorrow = today + datetime.timedelta(days=1)
raw = '{"template":"logstash-*","version":60001,"settings":{"index.refresh_interval":"5s"},"mappings":{"_default_":{"dynamic_templates":[{"message_field":{"path_match":"message","match_mapping_type":"string","mapping":{"type":"text","norms":false}}},{"string_fields":{"match":"*","match_mapping_type":"string","mapping":{"type":"text","norms":false,"fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}],"properties":{"@timestamp":{"type":"date"},"@version":{"type":"keyword"},"geoip":{"dynamic":true,"properties":{"ip":{"type":"ip"},"location":{"type":"geo_point"},"latitude":{"type":"half_float"},"longitude":{"type":"half_float"}}}}}}}'
data = json.loads(raw)
create_index = requests.put('http://elasticsearch-client:9200/logstash-{}'.format(tomorrow.strftime('%Y.%m.%d')), json=data)

cronjob.yml

apiVersion: batch/v1beta1
kind: CronJob
metadata:
  name: elk-automation
spec:
  schedule: "0 2 * * *"
  jobTemplate:
    spec:
      template:
        spec:
          containers:
          - name: elk-automation
            image: <your image>
          restartPolicy: Never

Link github sample code: https://github.com/khainguyen95/elasticsearch-template

Collector Configuration Details

Install filebeat:

https://github.com/helm/charts/tree/master/stable/filebeat

filebeat.inputs:
    - type: docker
      containers.ids:
      - "*"
      processors:
        - add_kubernetes_metadata:
            in_cluster: true
        - drop_event:
            when:
              not:
                equals:
                  kubernetes.container.name: "istio-proxy"

  output.logstash:
    hosts: ["logstash:5044"]

Install logstash:

https://github.com/helm/charts/tree/master/stable/logstash

inputs:
  main: |-
    input {
      beats {
        port => 5044
      }
    }

filters:
   main: |-
     filter {
        grok {
             match => { "message" => '\[%{TIMESTAMP_ISO8601:[@metadata][timestamp]}\] "(?:%{WORD:method} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?)" %{NUMBER:response_code} %{GREEDYDATA:response_flage}" %{NUMBER:bytes_recieved} %{NUMBER:bytes_sent} %{NUMBER:duration} %{NUMBER:resp} "%{IP:client_ip},%{GREEDYDATA:other_ip}" "%{GREEDYDATA:user_agent}" "%{GREEDYDATA:request_id}" "%{DATA:domain}" "%{IP:upstream_host}:%{NUMBER:upstream_port}" %{GREEDYDATA:outbound} - %{GREEDYDATA:other}' }
            }
            date {
                match => ["[@metadata][timestamp]", "YYYY-MM-dd HH:mm:ssZ"]
                target => "@timestamp"
            }
            mutate {
                convert => { "duration" => "integer" }
                convert => { "response_code" => "integer" }
                convert => { "bytes_recieved" => "integer" }
                convert => { "bytes_sent" => "integer" }
                convert => { "resp" => "integer" }
            }
            geoip {
              source => "client_ip"
            }
     }

outputs:
  main: |-
    output {
      elasticsearch {
        hosts => ["${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}"]
        manage_template => false
        index => "logstash-%{+YYYY.MM.dd}"
      }
    }