Downloads: 158

Reviews: 0

  • Screen Shot 2020-10-11 at 03.44.38.png
    Screen Shot 2020-10-11 at 03.44.38.png
  • Screen Shot 2020-10-11 at 03.33.00.png
    Screen Shot 2020-10-11 at 03.33.00.png
  • Screen Shot 2020-10-11 at 03.32.40.png
    Screen Shot 2020-10-11 at 03.32.40.png
  • Screen Shot 2020-10-11 at 03.49.59.png
    Screen Shot 2020-10-11 at 03.49.59.png
  • Screen Shot 2020-10-11 at 03.51.56.png
    Screen Shot 2020-10-11 at 03.51.56.png
  • Screen Shot 2020-10-11 at 04.11.55.png
    Screen Shot 2020-10-11 at 04.11.55.png

ISP-Checker

ISP-Checker implements a set of Telegraf checks that send metrics to InfluxDB (an OpenSource, time-series based database) and runs several kinds of metrics collectors to get average/aggregation/integral of values at first glance and focusing on service quality.

https://github.com/fmdlc/ISP-Checker/raw/master/img/demo.gif

ISP-Checker tries to test things like ICMP packet loss, the average time for DNS queries resolution, HTTP Response times, ICMP latencies, ICMP Standard Deviation, Upload/Download speed (by using Speedtest-CLI), and a Graphical MTR/Traceroute version.

It's easily extensible and it was built on top of Docker to make it portable and easy to run everywhere, importing automatically all components needed to perform checks.

You need to have MTR and Speedtest-CLI as a dependency.

Check https://github.com/fmdlc/ISP-Checker for more information.

https://github.com/fmdlc/ISP-Checker/blob/master/img/img_1.png?raw=true

Collector Configuration Details

[global_tags]

[agent]
  interval = "10s"
  round_interval = true
  metric_batch_size = 1000
  metric_buffer_limit = 10000
  collection_jitter = "0s"
  flush_interval = "10s"
  flush_jitter = "0s"
  precision = ""
  hostname = ""
  omit_hostname = false
  debug = false

[[outputs.influxdb]]
  urls = ["http://influxdb:8086"]
  database = "$INFLUXDB_DB"
  timeout = "60s"
  username = "$INFLUXDB_ADMIN_USER"
  password = "$INFLUXDB_ADMIN_PASSWORD"
  user_agent = "telegraf"
  skip_database_creation = false

[[inputs.system]]

[[inputs.dns_query]]
   servers = ["4.2.2.1", "8.8.8.8"]
   domains = ["www.google.com", "www.twitter.com", "www.amazon.com", "www.wikipedia.org"]
   record_type = "A"
   port = 53
   timeout = 3

 [[inputs.http_response]]
    urls = [
      "http://www.google.com",
      "http://www.twitter.com",
      "http://www.amazon.com",
      "http://www.yahoo.com"
    ]

    response_timeout = "5s"
    method = "GET"
    follow_redirects = true

[[inputs.internal]]
  collect_memstats = true

[[inputs.net]]

[[inputs.netstat]]

[[inputs.ping]]
  urls = [
      "google.com",
      "twitter.com",
      "amazon.com",
      "yahoo.com"
    ]
  method = "exec"
  count = 1
  ping_interval = 1.0
  timeout = 5.0
  deadline = 10
  binary = "ping"
  ipv6 = false

[[inputs.exec]]
  commands=["mtr -C google.com s3-website.ap-northeast-2.amazonaws.com s3-website.eu-central-1.amazonaws.com s3-website.af-south-1.amazonaws.com"]
  timeout = "3m"
  interval = "10m"
  data_format = "csv"
  csv_skip_rows = 1
  csv_column_names=[ "", "", "status","dest","hop","ip","loss","","", "","avg","best","worst","stdev"]
  name_override = "mtr"
  csv_tag_columns = ["dest", "hop", "ip"]

[[inputs.exec]]
  commands = ["/usr/bin/speedtest-cli --csv --bytes"]
  name_override = "speedtest"
  timeout = "3m"
  interval = "10m"
  data_format = "csv"
  csv_column_names = ['Server ID','Sponsor','Server Name','Timestamp','Distance','Ping','Download','Upload','Share','IP Address']
  csv_timestamp_column = "Timestamp"
  csv_timestamp_format = "2006-01-02T15:04:05Z07:00"