Add Prometheus Blackbox for synthetics

Now the only thing I'm missing is an alerting system that actually works
and we'll be off to the races.
This commit is contained in:
Salt 2024-07-09 18:03:39 -05:00
parent 2efb5b2554
commit 45904e221d
4 changed files with 109 additions and 34 deletions

View File

@ -3,3 +3,6 @@
- name: restart prometheus container
docker_container: name="prometheus" state=started restart=yes
become: yes
- name: restart blackbox container
docker_container: name="prometheus-blackbox" state=started restart=yes
become: yes

View File

@ -1,33 +1,65 @@
# vim:ft=ansible:
- name: ensure prometheus dirs
ansible.builtin.file:
state: directory
owner: 5476
group: 5476
mode: "0750"
path: "{{ item }}"
with_items:
- /data/prometheus/config
- /data/prometheus/data
notify: restart prometheus container
- name: template out configuration file
ansible.builtin.template:
src: prometheus.yml.j2
owner: 5476
group: 5476
mode: "0640"
dest: /data/prometheus/config/prometheus.yml
notify: restart prometheus container
- name: docker deploy prometheus
community.docker.docker_container:
name: prometheus
image: prom/prometheus:latest
user: 5476:5476
env:
TZ: "America/Chicago"
networks:
- name: web
aliases: [ "prometheus" ]
volumes:
- /data/prometheus/config:/etc/prometheus
- /data/prometheus/data:/prometheus
- name: deploy prometheus
block:
- name: ensure prometheus dirs
ansible.builtin.file:
state: directory
owner: 5476
group: 5476
mode: "0750"
path: "{{ item }}"
with_items:
- /data/prometheus/config
- /data/prometheus/data
notify: restart prometheus container
- name: template out configuration file
ansible.builtin.template:
src: prometheus.yml.j2
owner: 5476
group: 5476
mode: "0640"
dest: /data/prometheus/config/prometheus.yml
notify: restart prometheus container
- name: docker deploy prometheus
community.docker.docker_container:
name: prometheus
image: prom/prometheus:latest
user: 5476:5476
env:
TZ: "America/Chicago"
networks:
- name: web
aliases: [ "prometheus" ]
volumes:
- /data/prometheus/config:/etc/prometheus
- /data/prometheus/data:/prometheus
- name: deploy prometheus blackbox
block:
- name: ensure blackbox dirs
ansible.builtin.file:
state: directory
owner: 5476
group: 5476
mode: "0750"
path: /data/prometheus/blackbox
notify: restart blackbox container
- name: template out configuration file
ansible.builtin.template:
src: blackbox.yml.j2
owner: 5476
group: 5476
mode: "0640"
dest: /data/prometheus/blackbox/blackbox.yml
notify: restart blackbox container
- name: docker deploy prometheus blackbox
community.docker.docker_container:
name: prometheus-blackbox
image: quay.io/prometheus/blackbox-exporter:latest
user: 5476:5476
command:
- '--config.file=/config/blackbox.yml'
networks:
- name: web
aliases: [ "blackbox" ]
volumes:
- /data/prometheus/blackbox:/config

View File

@ -0,0 +1,12 @@
# https://github.com/prometheus/blackbox_exporter/blob/master/CONFIGURATION.md
# vim:ft=ansible:
modules:
http_2xx:
prober: http
timeout: 5s
http:
preferred_ip_protocol: "ip4"
follow_redirects: true
valid_http_versions: ["HTTP/1.1", "HTTP/2.0"]
valid_status_codes: [] # Defaults to 2xx
method: GET

View File

@ -1,4 +1,3 @@
# my global config
# vim:ft=ansible:
---
global:
@ -9,7 +8,36 @@ scrape_configs:
# Default Prometheus job to monitor itself
- job_name: "prometheus"
static_configs:
- targets: ["localhost:9090"]
- targets: ['localhost:9090']
# This is shipped by the Ansible role that deploys Prometheus
- job_name: "blackbox"
metrics_path: /probe
params:
module: [http_2xx]
static_configs:
- targets:
{% for host in groups['tags_nagios'] %}
{% for service in vars.services %}
{% for tag in service.tags %}
{% if tag.slug == "nagios-checkhttps" %}
{% for port in service.ports %}
- "https://{{ service.name }}:{{ port }}"
{% endfor %}
{% endif %}
{% endfor %}
{% endfor %}
{% endfor %}
relabel_configs:
- source_labels: [__address__]
target_label: __param_target
- source_labels: [__param_target]
target_label: instance
- target_label: __address__
replacement: blackbox:9115
- job_name: "blackbox-exporter"
static_configs:
- targets: ['blackbox:9115']
# These two jobs are included for every node in our inventory
- job_name: "node-exporter"