Compare commits
No commits in common. "master" and "cleanup" have entirely different histories.
@ -81,6 +81,11 @@ Common:
|
|||||||
stage: play-main
|
stage: play-main
|
||||||
script:
|
script:
|
||||||
- ansible-playbook --skip-tags no-auto playbooks/site_common.yml --ssh-common-args='-o ProxyCommand="ssh -W %h:%p -q ansible@bastion1.dallas.mgmt.desu.ltd"' --vault-password-file ~/.vault_pass
|
- ansible-playbook --skip-tags no-auto playbooks/site_common.yml --ssh-common-args='-o ProxyCommand="ssh -W %h:%p -q ansible@bastion1.dallas.mgmt.desu.ltd"' --vault-password-file ~/.vault_pass
|
||||||
|
Nagios:
|
||||||
|
stage: play-main
|
||||||
|
retry: 1
|
||||||
|
script:
|
||||||
|
- ansible-playbook -l vm-general-1.ashburn.mgmt.desu.ltd playbooks/prod_web.yml --tags nagios --ssh-common-args='-o ProxyCommand="ssh -W %h:%p -q ansible@bastion1.dallas.mgmt.desu.ltd"' --vault-password-file ~/.vault_pass
|
||||||
|
|
||||||
# CLEANUP
|
# CLEANUP
|
||||||
Cleanup:
|
Cleanup:
|
||||||
|
@ -30,26 +30,26 @@ adminuser_ssh_authorized_keys:
|
|||||||
# For backups
|
# For backups
|
||||||
backup_s3_bucket: !vault |
|
backup_s3_bucket: !vault |
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
66316231643933316261303631656432376339663264666661663634616465326537303331626634
|
61393939633736616361336162633564356434363963303737366236373332653265366132393439
|
||||||
6235616564316638386434366534663639656236393861630a303530333835353432326131653735
|
3333643463306561616261636466303631373866353962310a356561633833633533353937323265
|
||||||
30313734383265376238306333323330366338646636336137653661373365633365393732386466
|
64656235616637366363323330346134656366663733393462346333613535633838333938653434
|
||||||
3263373233653261330a663435643835643430326464623834303864646363373265336134643136
|
6133326433613239650a386333626339363263323134313830353963326265666336306130656534
|
||||||
6162
|
6534
|
||||||
backup_s3_aws_access_key_id: !vault |
|
backup_s3_aws_access_key_id: !vault |
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
62343334333230643465623639633334363331353266366533366464643162333238333363633763
|
61353734383466366564333832643738313238666235336332303539383639626263633231396261
|
||||||
3431663162666566393738396165396639353230633537610a393863663234626134373962393132
|
6165393062393266343661643466633163383164383032340a333833656566336331323565386162
|
||||||
33356236626337313435383362336233366637646336663465366638343461663533373362316161
|
35646665353539616538353339616531346564636466643639326366353165313861373761396537
|
||||||
3639313537393734350a636365366137353763333032366338323334333936633330333439376161
|
3731653463643838330a383065313135343763636534656133343666363237356462326236643631
|
||||||
62613232363231346562643064383066393761353566366438363766353536386461
|
34366564373661396434663633346635663331393538363362376265653334623538
|
||||||
backup_s3_aws_secret_access_key: !vault |
|
backup_s3_aws_secret_access_key: !vault |
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
32616664316437316638636263653237386665396632313639363962376361393763373535356130
|
64316231613337333231383837333930336561633164393762343838646136393165626361346637
|
||||||
6136353736616263326166633261356233383530613462370a353039303261306231366465326662
|
3364643830346533623137643530323438366665393632320a633032336664616261353734343661
|
||||||
39326233306565306639366165393930656461383334383931323263363031623333313462316433
|
36646565383532616133353530343331663731663965656662363830363063303361373861663762
|
||||||
3635616437373236650a353661343131303332376161316664333833393833373830623130666633
|
3032613362626233350a613464333230363830383334363032303730646134306331383733363036
|
||||||
66356130646434653039363863346630363931383832353637636131626530616434
|
34346334306633306664323337643433356336366633396239306539613539633535386238346662
|
||||||
backup_s3_aws_endpoint_url: "https://s3.us-east-005.backblazeb2.com"
|
6232313138393062626631386135383234376361643362353966
|
||||||
|
|
||||||
|
|
||||||
# For zerotier
|
# For zerotier
|
||||||
@ -86,6 +86,78 @@ secret_ara_secret_key: !vault |
|
|||||||
31346465336361316433383865613233373836643366346538633330616232386132636662643963
|
31346465336361316433383865613233373836643366346538633330616232386132636662643963
|
||||||
303938396531623561653335646231616239
|
303938396531623561653335646231616239
|
||||||
|
|
||||||
|
# For Firefly III
|
||||||
|
secret_firefly_app_key: !vault |
|
||||||
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
|
36326365626537313464373434303833373261303835643035666431326335633634376364376233
|
||||||
|
3664323235383337313266316466363734643331313862630a636164616462623965353331373266
|
||||||
|
65653363353039653231316464653366303938656363333239313165313662636163323366303433
|
||||||
|
6432633664666339660a383938333531333536666361633762633831363132366563396237346330
|
||||||
|
32323266346363656336396264626231653331343862636632646466353236393438363037623466
|
||||||
|
6535373866616238323339326338316330383064336138646663
|
||||||
|
secret_firefly_db_pass: !vault |
|
||||||
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
|
31386133326239313637393462633930626634653562303361326634323633363037303862313361
|
||||||
|
3133356362333833653636623761386163646435633239370a613632646461303534353134346431
|
||||||
|
36613930393235653862376639626238326561633064333565383564626330636639633136643365
|
||||||
|
3565316233663262360a353631323762313130326361643532626334363263636539313233646362
|
||||||
|
37633961633162353936386366623136633436306235336235363566616563366563
|
||||||
|
secret_firefly_access_token: !vault |
|
||||||
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
|
65663238653330636264353332336237306565373135666462623937363564393366636231333662
|
||||||
|
6130333864613462323864333832343261393730383332340a383032353036626630366564396231
|
||||||
|
31396233383763653739363939343938663866666664623463616462376337383433646436303932
|
||||||
|
6265396236383437380a633432633233663562303062316164343463636565356437353633663964
|
||||||
|
32356462393036346433306366613333613236656535643662666237663335646461613434613738
|
||||||
|
33626634333235323561633134653362636461306439663834623136376134353733653039653635
|
||||||
|
61323863663566336265323831633532396337653432376362366533313064303635366539623033
|
||||||
|
38353063366135646566376338333536376335653766666336306664616664616633616562663339
|
||||||
|
32373138666262326666616234303938353631333663303834376161396232633635393133313235
|
||||||
|
65626337356536383430346538616366336134383731643536656235376563303063306263306562
|
||||||
|
62343631613837346138393936366135646636643463333637656137313931346661643261633437
|
||||||
|
35343261643339343861636235323331346432656435323564396535376539303764663031393164
|
||||||
|
63353932653866366634656631633133633333303564626466333265363138376638636534316166
|
||||||
|
36353839383264613634336237343463366662313432376161643532356566626162313362383339
|
||||||
|
64663739343365346264316363653939663663656231373262653439653765613764346336306465
|
||||||
|
65336561396363323637396432633362376537626361383765326363363635306537613533356436
|
||||||
|
62303439656661343337353933643963623730653732393236616533626564386339383965623334
|
||||||
|
38366332666131303230636431626237623561623333313236636438613564333631633237663961
|
||||||
|
61386435633832656639363962653138363863363861616162326430623133373330336236336232
|
||||||
|
34636134333230393064303234343962633166323462363939323732336263346662643066633436
|
||||||
|
37666234393733306364346161653138616564646534393266326632666435303531333866633332
|
||||||
|
38323638393066623937646237393738343433393637346338356164346439333632343033366233
|
||||||
|
66356163326164313735353738386637336365623331383337306538326663373666373639393238
|
||||||
|
33363537376633373336376633666136386530633961373430313666313463616637663161303436
|
||||||
|
32363265313739646164666534323165373562303766326338623534396434323162623533386337
|
||||||
|
33653262663935306365393438613137373162353063306431666439383161613937653062313366
|
||||||
|
35376630376530643464363364626561373137646165353464363937613235353635353833386661
|
||||||
|
38613862303236316632646532373635303865643531663665386536613233373863346331633138
|
||||||
|
33303561303637366138663834633634653861623462666634396237393663613465653032306237
|
||||||
|
36303566356163666363653535616632366533633365306265333962303134306132656131316464
|
||||||
|
39343864386139616230643238356335653736623064336163393832386332656130306465353566
|
||||||
|
37393364323263623838663464346439373038303766643033356137633238343439303234326330
|
||||||
|
65373037613435366232306530653734623036353333383935353937376136326237316361303538
|
||||||
|
62343033333339613935393061323039396332646537656263386230373664336661653237663365
|
||||||
|
66613961366531316631653334373563353032396462303265636464326261353531643132633764
|
||||||
|
63663133636264386364393435323736303831313162646336646166396361643834313865303536
|
||||||
|
65343734386630326432633930343462643065383535393033663132383933626337613732623536
|
||||||
|
64323964396133326432336538616130303631306330343361366339343736373062313861663431
|
||||||
|
63303031326561303566303164376531376535646665386263653630303832636661393561373233
|
||||||
|
37663039633934666332336132343262626132613764343138376165633637656237353565646536
|
||||||
|
34663965626333353034666134363966366531356635323739363331383761396638356265666537
|
||||||
|
38326235613035383235396166323663343139663439613834306462666364643530633038373763
|
||||||
|
31393431393464393530656435326531656665343362646634303734646436633364366339626139
|
||||||
|
35326636343031626631653230633636393561663736623931316637323435626336383430613365
|
||||||
|
32663237313161376261656261313737636465316664643531313639356533616265646264393636
|
||||||
|
32646465663035336537363236643461666663653838626531333130383261653637313762623735
|
||||||
|
35616362343331313035396232656361313032633630656530613833313064376335393365636439
|
||||||
|
39646334663436643466633561646364373265366230656662633364646463373435623963306464
|
||||||
|
61346164623739303335306138636531333938363566326336393462666132383838613837326664
|
||||||
|
34613334306336656564636636393934303963626533616365363634353232326235653735663666
|
||||||
|
33623938373530373166386162353635333135613837626437383435656439643064303961326664
|
||||||
|
65613139313836663038393164363264383738376564363730616635326233376533313161303564
|
||||||
|
66636639663531333166616635396630616237666232343464653139646364653339
|
||||||
|
|
||||||
# For GVM
|
# For GVM
|
||||||
secret_gvm_db_pass: !vault |
|
secret_gvm_db_pass: !vault |
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
@ -150,16 +222,6 @@ secret_gitlab_db_pass: !vault |
|
|||||||
3365636636316534660a373562346462363935393565366636353061343932663763663532383565
|
3365636636316534660a373562346462363935393565366636353061343932663763663532383565
|
||||||
36666438366337303362373838626234363266646132363235323436653131363735
|
36666438366337303362373838626234363266646132363235323436653131363735
|
||||||
|
|
||||||
# For Grafana
|
|
||||||
secret_grafana_matrix_token: !vault |
|
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
|
||||||
62313634386364663564353664656437623863366137343938666635663638313464663838343135
|
|
||||||
6361366536363232396434333136653632376539343432390a623033636534313865306465373563
|
|
||||||
31343565343937376336393263616134373333336237623166333966633639646535613234316638
|
|
||||||
6634313534336635610a373363313737643165346264333736316362316438376662643665333661
|
|
||||||
30326666616362366133396562323433323435613232666337336430623230383765346333343232
|
|
||||||
3765346238303835633337636233376263366130303436336439
|
|
||||||
|
|
||||||
# For Nagios
|
# For Nagios
|
||||||
secret_nagios_admin_pass: !vault |
|
secret_nagios_admin_pass: !vault |
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
@ -176,48 +238,6 @@ secret_nagios_matrix_token: !vault |
|
|||||||
6433376138386531380a383762393137613738643538343438633730313135613730613139393536
|
6433376138386531380a383762393137613738643538343438633730313135613730613139393536
|
||||||
35666133666262383862663637623738643836383633653864626231623034613662646563623936
|
35666133666262383862663637623738643836383633653864626231623034613662646563623936
|
||||||
3763356331333561383833386162616664376335333139376363
|
3763356331333561383833386162616664376335333139376363
|
||||||
nagios_contacts:
|
|
||||||
- name: matrix
|
|
||||||
host_notification_commands: notify-host-by-matrix
|
|
||||||
service_notification_commands: notify-service-by-matrix
|
|
||||||
host_notification_period: ansible-not-late-at-night
|
|
||||||
service_notification_period: ansible-not-late-at-night
|
|
||||||
extra:
|
|
||||||
- key: contactgroups
|
|
||||||
value: ansible
|
|
||||||
- name: salt
|
|
||||||
host_notification_commands: notify-host-by-email
|
|
||||||
service_notification_commands: notify-service-by-email
|
|
||||||
extra:
|
|
||||||
- key: email
|
|
||||||
value: alerts@babor.tech
|
|
||||||
nagios_commands:
|
|
||||||
# This command is included in the container image
|
|
||||||
- name: check_nrpe
|
|
||||||
command: "$USER1$/check_nrpe -H $HOSTADDRESS$ -c $ARG1$"
|
|
||||||
- name: check_by_ssh
|
|
||||||
command: "$USER1$/check_by_ssh -H $HOSTADDRESS$ -F /opt/nagios/etc/ssh_config -t 30 -q -i /opt/nagios/etc/id_ed25519 -l nagios-checker -C \"$ARG1$\""
|
|
||||||
- name: notify-host-by-matrix
|
|
||||||
command: "/usr/bin/printf \"%b\" \"$NOTIFICATIONTYPE$\\n$HOSTNAME$ is $HOSTSTATE$\\nAddress: $HOSTADDRESS$\\nInfo: $HOSTOUTPUT$\\nDate/Time: $LONGDATETIME$\" | /opt/Custom-Nagios-Plugins/notify-by-matrix"
|
|
||||||
- name: notify-service-by-matrix
|
|
||||||
command: "/usr/bin/printf \"%b\" \"$NOTIFICATIONTYPE$\\nService $HOSTALIAS$ - $SERVICEDESC$ is $SERVICESTATE$\\nInfo: $SERVICEOUTPUT$\\nDate/Time: $LONGDATETIME$\" | /opt/Custom-Nagios-Plugins/notify-by-matrix"
|
|
||||||
nagios_services:
|
|
||||||
# check_by_ssh checks
|
|
||||||
- name: Last Ansible Play
|
|
||||||
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_file_age /var/lib/ansible-last-run -w 432000 -c 604800
|
|
||||||
- name: Reboot Required
|
|
||||||
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_reboot_required
|
|
||||||
- name: Unit backup.service
|
|
||||||
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit backup.service
|
|
||||||
hostgroup: "ansible,!role-hypervisor"
|
|
||||||
- name: Unit backup.timer
|
|
||||||
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit backup.timer
|
|
||||||
hostgroup: "ansible,!role-hypervisor"
|
|
||||||
# Tag-specific checks
|
|
||||||
# zerotier
|
|
||||||
- name: Unit zerotier-one.service
|
|
||||||
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit zerotier-one.service
|
|
||||||
hostgroup: tag-zt-personal
|
|
||||||
|
|
||||||
# For Netbox
|
# For Netbox
|
||||||
secret_netbox_user_pass: !vault |
|
secret_netbox_user_pass: !vault |
|
||||||
|
@ -26,3 +26,11 @@ secret_keepalived_pass: !vault |
|
|||||||
64613934346234316564613564363863356663653063333432316434353633333138643561316638
|
64613934346234316564613564363863356663653063333432316434353633333138643561316638
|
||||||
6563386233656364310a626363663234623161363537323035663663383333353138386239623934
|
6563386233656364310a626363663234623161363537323035663663383333353138386239623934
|
||||||
65613231666661633262633439393462316337393532623263363630353133373236
|
65613231666661633262633439393462316337393532623263363630353133373236
|
||||||
|
secret_firefly_db_pass: !vault |
|
||||||
|
$ANSIBLE_VAULT;1.1;AES256
|
||||||
|
31656262333131613762373430323032663634316133346661333762323631323931633633623666
|
||||||
|
6665373939396238383965653635653039336635313361350a333133303239323262383938303436
|
||||||
|
64396137343737346362646330323662333731376332306663336638333161313835626261343031
|
||||||
|
3165643531336534650a393237623435663566346332313838616137343831643030333230356230
|
||||||
|
65386234316565666465376538333661623938326234323136303764376239326135
|
||||||
|
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
#!/usr/bin/env ansible-playbook
|
|
||||||
# vim:ft=ansible:
|
|
||||||
---
|
|
||||||
# Home desktops
|
|
||||||
- hosts: localhost
|
|
||||||
roles:
|
|
||||||
- role: backup
|
|
||||||
vars:
|
|
||||||
backup_s3backup_tar_args_extra: h
|
|
||||||
backup_s3backup_list_extra:
|
|
||||||
- /home/salt/.backup/
|
|
||||||
tags: [ backup ]
|
|
@ -21,3 +21,9 @@
|
|||||||
- hosts: dsk-ryzen-1.ws.mgmt.desu.ltd
|
- hosts: dsk-ryzen-1.ws.mgmt.desu.ltd
|
||||||
roles:
|
roles:
|
||||||
- role: desktop
|
- role: desktop
|
||||||
|
- role: backup
|
||||||
|
vars:
|
||||||
|
backup_s3backup_tar_args_extra: h
|
||||||
|
backup_s3backup_list_extra:
|
||||||
|
- /home/salt/.backup/
|
||||||
|
tags: [ backup ]
|
@ -92,9 +92,11 @@
|
|||||||
value: [ "{{ item.value }}" ]
|
value: [ "{{ item.value }}" ]
|
||||||
with_items:
|
with_items:
|
||||||
# Public
|
# Public
|
||||||
- record: git.desu.ltd
|
- record: firefly.desu.ltd
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
value: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
- record: grafana.desu.ltd
|
- record: firefly-importer.desu.ltd
|
||||||
|
value: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
|
- record: git.desu.ltd
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
value: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
- record: matrix.desu.ltd
|
- record: matrix.desu.ltd
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
value: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
@ -106,13 +108,7 @@
|
|||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
value: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
- record: netbox.desu.ltd
|
- record: netbox.desu.ltd
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
value: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
- record: prometheus.desu.ltd
|
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
|
||||||
# Public media stuff
|
# Public media stuff
|
||||||
- record: music.desu.ltd
|
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
|
||||||
- record: lidarr.media.desu.ltd
|
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
|
||||||
- record: prowlarr.media.desu.ltd
|
- record: prowlarr.media.desu.ltd
|
||||||
value: vm-general-1.ashburn.mgmt.desu.ltd
|
value: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
- record: sonarr.media.desu.ltd
|
- record: sonarr.media.desu.ltd
|
||||||
|
@ -3,19 +3,6 @@
|
|||||||
# Database servers
|
# Database servers
|
||||||
---
|
---
|
||||||
- hosts: vm-general-1.ashburn.mgmt.desu.ltd
|
- hosts: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
tasks:
|
|
||||||
- name: assure prometheus psql exporter
|
|
||||||
ansible.builtin.docker_container:
|
|
||||||
name: prometheus-psql-exporter
|
|
||||||
image: quay.io/prometheuscommunity/postgres-exporter
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
env:
|
|
||||||
DATA_SOURCE_URI: "10.0.0.2:5432/postgres"
|
|
||||||
DATA_SOURCE_USER: "nagios"
|
|
||||||
DATA_SOURCE_PASS: "{{ secret_postgresql_monitoring_password }}"
|
|
||||||
ports:
|
|
||||||
- 9102:9187/tcp
|
|
||||||
tags: [ db, psql, prometheus, monitoring, docker ]
|
|
||||||
roles:
|
roles:
|
||||||
- role: geerlingguy.postgresql
|
- role: geerlingguy.postgresql
|
||||||
vars:
|
vars:
|
||||||
@ -38,25 +25,49 @@
|
|||||||
# Used for internal access from Docker
|
# Used for internal access from Docker
|
||||||
- { type: host, database: all, user: all, address: '172.16.0.0/12', auth_method: md5 }
|
- { type: host, database: all, user: all, address: '172.16.0.0/12', auth_method: md5 }
|
||||||
postgresql_users:
|
postgresql_users:
|
||||||
|
- name: ara-desultd
|
||||||
|
password: "{{ secret_ara_db_pass }}"
|
||||||
|
- name: firefly-desultd
|
||||||
|
password: "{{ secret_firefly_db_pass }}"
|
||||||
- name: gitea-desultd
|
- name: gitea-desultd
|
||||||
password: "{{ secret_gitea_db_pass }}"
|
password: "{{ secret_gitea_db_pass }}"
|
||||||
|
- name: gitlab-desultd
|
||||||
|
password: "{{ secret_gitlab_db_pass }}"
|
||||||
- name: nagios
|
- name: nagios
|
||||||
password: "{{ secret_postgresql_monitoring_password }}"
|
password: "{{ secret_postgresql_monitoring_password }}"
|
||||||
- name: netbox-desultd
|
- name: netbox-desultd
|
||||||
password: "{{ secret_netbox_db_pass }}"
|
password: "{{ secret_netbox_db_pass }}"
|
||||||
- name: nextcloud-desultd
|
- name: nextcloud-desultd
|
||||||
password: "{{ secret_nextcloud_db_pass }}"
|
password: "{{ secret_nextcloud_db_pass }}"
|
||||||
|
- name: peertube-cowfee
|
||||||
|
password: "{{ secret_peertube_db_pass }}"
|
||||||
|
- name: pleroma-cowfee
|
||||||
|
password: "{{ secret_pleroma_9iron_db_pass }}"
|
||||||
- name: synapse-desultd
|
- name: synapse-desultd
|
||||||
password: "{{ secret_synapse_db_pass }}"
|
password: "{{ secret_synapse_db_pass }}"
|
||||||
|
- name: vaultwarden-desultd
|
||||||
|
password: "{{ secret_vaultwarden_db_pass }}"
|
||||||
postgresql_databases:
|
postgresql_databases:
|
||||||
|
- name: ara-desultd
|
||||||
|
owner: ara-desultd
|
||||||
|
- name: firefly-desultd
|
||||||
|
owner: firefly-desultd
|
||||||
- name: gitea-desultd
|
- name: gitea-desultd
|
||||||
owner: gitea-desultd
|
owner: gitea-desultd
|
||||||
|
- name: gitlab-desultd
|
||||||
|
owner: gitlab-desultd
|
||||||
- name: netbox-desultd
|
- name: netbox-desultd
|
||||||
owner: netbox-desultd
|
owner: netbox-desultd
|
||||||
- name: nextcloud-desultd
|
- name: nextcloud-desultd
|
||||||
owner: nextcloud-desultd
|
owner: nextcloud-desultd
|
||||||
|
- name: pleroma_cowfee
|
||||||
|
owner: pleroma-cowfee
|
||||||
|
- name: peertube
|
||||||
|
owner: peertube-cowfee
|
||||||
- name: synapse-desultd
|
- name: synapse-desultd
|
||||||
lc_collate: C
|
lc_collate: C
|
||||||
lc_ctype: C
|
lc_ctype: C
|
||||||
owner: synapse-desultd
|
owner: synapse-desultd
|
||||||
|
- name: vaultwarden-desultd
|
||||||
|
owner: vaultwarden-desultd
|
||||||
tags: [ db, psql ]
|
tags: [ db, psql ]
|
||||||
|
@ -16,33 +16,25 @@
|
|||||||
- name: include tasks for applications
|
- name: include tasks for applications
|
||||||
include_tasks: tasks/{{ item }}
|
include_tasks: tasks/{{ item }}
|
||||||
with_items:
|
with_items:
|
||||||
# Applications
|
|
||||||
- app/gitlab-runner.yml
|
- app/gitlab-runner.yml
|
||||||
- app/redis.yml
|
- app/redis.yml
|
||||||
# Frontend web services
|
|
||||||
- web/9iron.yml
|
- web/9iron.yml
|
||||||
- web/desultd.yml
|
- web/desultd.yml
|
||||||
- web/element-web.yml
|
- web/element-web.yml
|
||||||
|
- web/firefly-iii.yml
|
||||||
- web/gitea.yml
|
- web/gitea.yml
|
||||||
- web/grafana.yml
|
|
||||||
- web/netbox.yml
|
- web/netbox.yml
|
||||||
- web/nextcloud.yml
|
- web/nextcloud.yml
|
||||||
- web/synapse.yml
|
|
||||||
# Backend web services
|
|
||||||
- web/lidarr.yml
|
|
||||||
- web/navidrome.yml
|
|
||||||
- web/prowlarr.yml
|
- web/prowlarr.yml
|
||||||
- web/radarr.yml
|
- web/radarr.yml
|
||||||
- web/sonarr.yml
|
- web/sonarr.yml
|
||||||
- web/srv.yml
|
- web/srv.yml
|
||||||
|
- web/synapse.yml
|
||||||
- web/transmission.yml
|
- web/transmission.yml
|
||||||
# Games
|
|
||||||
- game/factorio.yml
|
- game/factorio.yml
|
||||||
- game/minecraft-createfarming.yml
|
- game/minecraft-vanilla.yml
|
||||||
- game/minecraft-magicpack.yml
|
- game/minecraft-direwolf20.yml
|
||||||
- game/minecraft-weedie.yml
|
|
||||||
- game/zomboid.yml
|
- game/zomboid.yml
|
||||||
- game/satisfactory.yml
|
|
||||||
tags: [ always ]
|
tags: [ always ]
|
||||||
roles:
|
roles:
|
||||||
- role: backup
|
- role: backup
|
||||||
@ -51,39 +43,184 @@
|
|||||||
- /app/gitea/gitea
|
- /app/gitea/gitea
|
||||||
- /data
|
- /data
|
||||||
backup_s3backup_exclude_list_extra:
|
backup_s3backup_exclude_list_extra:
|
||||||
- /data/minecraft/magicpack/backups
|
- /var/lib/gitea/log
|
||||||
|
- /data/gitea/data/gitea/log
|
||||||
|
- /data/minecraft/oldpack/backups
|
||||||
|
- /data/minecraft/stoneblock/backups
|
||||||
|
- /data/minecraft/create-extra/backups
|
||||||
- /data/minecraft/direwolf20/backups
|
- /data/minecraft/direwolf20/backups
|
||||||
- /data/minecraft/weedie/backups
|
- /data/minecraft/prominence/FeedTheBeast/world/.git
|
||||||
|
- /data/sb-mirror
|
||||||
- /data/shared/media
|
- /data/shared/media
|
||||||
- /data/shared/downloads
|
- /data/shared/downloads
|
||||||
- /data/zomboid/ZomboidDedicatedServer/steamapps/workshop
|
- /data/terraria/generic/backups
|
||||||
tags: [ backup ]
|
tags: [ backup ]
|
||||||
|
# - role: docker-tmodloader14
|
||||||
|
# tags: [ terraria, tmodloader ]
|
||||||
|
# - role: docker-tmodloader14
|
||||||
|
# vars:
|
||||||
|
# tmodloader_external_port: "7778"
|
||||||
|
# tmodloader_name: "test"
|
||||||
|
# tags: [ terraria-test, tmodloader-test ]
|
||||||
- role: git
|
- role: git
|
||||||
vars:
|
vars:
|
||||||
git_repos:
|
git_repos:
|
||||||
- repo: https://git.desu.ltd/salt/gitea-custom
|
- repo: https://git.desu.ltd/salt/gitea-custom
|
||||||
dest: /data/gitea/data/gitea/custom
|
dest: /data/gitea/data/gitea/custom
|
||||||
tags: [ web, git ]
|
tags: [ web, git ]
|
||||||
- role: prometheus
|
|
||||||
tags: [ prometheus, monitoring, no-test ]
|
|
||||||
- role: nagios
|
- role: nagios
|
||||||
vars:
|
vars:
|
||||||
# Definitions for contacts and checks are defined in inventory vars
|
|
||||||
# See group_vars/all.yml if you need to change those
|
|
||||||
nagios_matrix_server: "https://matrix.desu.ltd"
|
nagios_matrix_server: "https://matrix.desu.ltd"
|
||||||
nagios_matrix_room: "!NWNCKlNmOTcarMcMIh:desu.ltd"
|
nagios_matrix_room: "!NWNCKlNmOTcarMcMIh:desu.ltd"
|
||||||
nagios_matrix_token: "{{ secret_nagios_matrix_token }}"
|
nagios_matrix_token: "{{ secret_nagios_matrix_token }}"
|
||||||
nagios_data_dir: /data/nagios
|
nagios_data_dir: /data/nagios
|
||||||
nagios_admin_pass: "{{ secret_nagios_admin_pass }}"
|
nagios_admin_pass: "{{ secret_nagios_admin_pass }}"
|
||||||
|
nagios_contacts:
|
||||||
|
- name: matrix
|
||||||
|
host_notification_commands: notify-host-by-matrix
|
||||||
|
service_notification_commands: notify-service-by-matrix
|
||||||
|
host_notification_period: ansible-not-late-at-night
|
||||||
|
service_notification_period: ansible-not-late-at-night
|
||||||
|
extra:
|
||||||
|
- key: contactgroups
|
||||||
|
value: ansible
|
||||||
|
- name: salt
|
||||||
|
host_notification_commands: notify-host-by-email
|
||||||
|
service_notification_commands: notify-service-by-email
|
||||||
|
extra:
|
||||||
|
- key: email
|
||||||
|
value: alerts@babor.tech
|
||||||
|
nagios_commands:
|
||||||
|
# This command is included in the container image
|
||||||
|
- name: check_nrpe
|
||||||
|
command: "$USER1$/check_nrpe -H $HOSTADDRESS$ -c $ARG1$"
|
||||||
|
- name: check_by_ssh
|
||||||
|
command: "$USER1$/check_by_ssh -H $HOSTADDRESS$ -F /opt/nagios/etc/ssh_config -t 30 -q -i /opt/nagios/etc/id_ed25519 -l nagios-checker -C \"$ARG1$\""
|
||||||
|
- name: notify-host-by-matrix
|
||||||
|
command: "/usr/bin/printf \"%b\" \"$NOTIFICATIONTYPE$\\n$HOSTNAME$ is $HOSTSTATE$\\nAddress: $HOSTADDRESS$\\nInfo: $HOSTOUTPUT$\\nDate/Time: $LONGDATETIME$\" | /opt/Custom-Nagios-Plugins/notify-by-matrix"
|
||||||
|
- name: notify-service-by-matrix
|
||||||
|
command: "/usr/bin/printf \"%b\" \"$NOTIFICATIONTYPE$\\nService $HOSTALIAS$ - $SERVICEDESC$ is $SERVICESTATE$\\nInfo: $SERVICEOUTPUT$\\nDate/Time: $LONGDATETIME$\" | /opt/Custom-Nagios-Plugins/notify-by-matrix"
|
||||||
|
nagios_services:
|
||||||
|
# Agentless checks
|
||||||
|
- name: HTTP
|
||||||
|
command: check_http
|
||||||
|
hostgroup: tag-nagios-checkhttp
|
||||||
|
- name: HTTPS
|
||||||
|
command: check_http!--ssl
|
||||||
|
hostgroup: tag-nagios-checkhttp
|
||||||
|
- name: SSH
|
||||||
|
command: check_ssh
|
||||||
|
# check_by_ssh checks
|
||||||
|
- name: CPU Utilization
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_cpu_stats -w 75 -c 90
|
||||||
|
- name: DNS Resolution
|
||||||
|
command: check_by_ssh!/usr/lib/nagios/plugins/check_etc_resolv
|
||||||
|
- name: Executables in tmp
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_executables_in_tmp
|
||||||
|
- name: Last Ansible Play
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_file_age /var/lib/ansible-last-run -w 432000 -c 604800
|
||||||
|
- name: Memory Usage
|
||||||
|
command: check_by_ssh!/usr/lib/nagios/plugins/check_memory -w 10% -c 5%
|
||||||
|
hostgroup: "ansible,!tag-prov-zfs"
|
||||||
|
- name: Ping Self over DNS
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_ping_by_hostname
|
||||||
|
- name: Reboot Required
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_reboot_required
|
||||||
|
- name: Unit atd.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit atd.service
|
||||||
|
- name: Unit backup.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit backup.service
|
||||||
|
hostgroup: "ansible,!role-hypervisor"
|
||||||
|
- name: Unit backup.timer
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit backup.timer
|
||||||
|
hostgroup: "ansible,!role-hypervisor"
|
||||||
|
- name: Unit cron.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit cron.service
|
||||||
|
- name: Unit dbus.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit dbus.service
|
||||||
|
- name: Unit ssh.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit ssh.service
|
||||||
|
- name: Unit systemd-resolved.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit systemd-resolved.service
|
||||||
|
hostgroup: "ansible,!role-hypervisor"
|
||||||
|
- name: Users
|
||||||
|
command: check_by_ssh!/usr/lib/nagios/plugins/check_users -w 3 -c 5
|
||||||
|
# Privileged checks
|
||||||
|
# Required because check_disk may attempt to get the free space of
|
||||||
|
# restricted mountpoints
|
||||||
|
- name: Disk Usage
|
||||||
|
command: check_by_ssh!/usr/bin/sudo /usr/lib/nagios/plugins/check_disk -M -u GB -X nfs -X tracefs -X cgroup -X tmpfs -X overlay -X shm -w 15% -c 10% -W 15% -K 10% -A -I '^/run/' -I '^udev$' -I '^/var/lib/kubelet/' -I '^/tmp/.mount_' -I '^/dev/loop'
|
||||||
|
# Device type checks
|
||||||
|
# R720
|
||||||
|
- name: CPU0 Temperature
|
||||||
|
command: check_by_ssh!/usr/bin/sudo /usr/local/bin/monitoring-scripts/check_temp -n -w 65 -c 75 --sensor coretemp-isa-0000
|
||||||
|
hostgroup: device-type-r720
|
||||||
|
- name: CPU1 Temperature
|
||||||
|
command: check_by_ssh!/usr/bin/sudo /usr/local/bin/monitoring-scripts/check_temp -n -w 65 -c 75 --sensor coretemp-isa-0001
|
||||||
|
hostgroup: device-type-r720
|
||||||
|
# Pi 4 4G
|
||||||
|
- name: CPU Temperature
|
||||||
|
command: check_by_ssh!/usr/bin/sudo /usr/local/bin/monitoring-scripts/check_temp -n -w 65 -c 75 --sensor cpu_thermal-virtual-0
|
||||||
|
hostgroup: device-type-pi4b-2g,device-type-pi4b-4g,device-type-pi4b-4g-storage
|
||||||
|
# Device role checks
|
||||||
|
# hypervisor (which is assumed to be Proxmox)
|
||||||
|
- name: PVE Unit pve-firewall.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit pve-firewall.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
- name: PVE Unit spiceproxy.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit spiceproxy.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
- name: PVE Unit pve-ha-crm.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit pve-ha-crm.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
- name: PVE Unit pvedaemon.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit pvedaemon.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
- name: PVE Unit pvefw-logger.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit pvefw-logger.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
- name: PVE Unit pveproxy.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit pveproxy.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
- name: PVE Unit pve-cluster.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit pve-cluster.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
- name: PVE Unit pvestatd.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit pvestatd.service
|
||||||
|
hostgroup: role-hypervisor
|
||||||
|
# Tag-specific checks
|
||||||
|
# docker
|
||||||
|
- name: Unit docker.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit docker.service
|
||||||
|
hostgroup: "ansible,!tag-no-docker"
|
||||||
|
- name: Docker Status
|
||||||
|
command: check_by_ssh!/usr/bin/sudo /usr/local/bin/monitoring-scripts/check_docker --no-ok --status running
|
||||||
|
hostgroup: tag-nagios-checkdocker
|
||||||
|
# nagios-checkpgsql
|
||||||
|
- name: PSQL
|
||||||
|
command: "check_by_ssh!/usr/lib/nagios/plugins/check_pgsql -H localhost -l nagios -p {{ secret_postgresql_monitoring_password }} -w 2 -c 5"
|
||||||
|
hostgroup: tag-nagios-checkpgsql
|
||||||
|
- name: PSQL Connections
|
||||||
|
command: "check_by_ssh!/usr/lib/nagios/plugins/check_pgsql -H localhost -l nagios -p {{ secret_postgresql_monitoring_password }} -w 2 -c 5 -q 'select (select count(*)::float used from pg_stat_activity) / (select setting::int max_conn from pg_settings where name=\\$\\$max_connections\\$\\$)' -W 0.7-0.8 -C 0.8-1.0"
|
||||||
|
hostgroup: tag-nagios-checkpgsql
|
||||||
|
# https://rhaas.blogspot.com/2020/02/useless-vacuuming.html
|
||||||
|
- name: PSQL Old Xacts
|
||||||
|
command: "check_by_ssh!/usr/lib/nagios/plugins/check_pgsql -H localhost -l nagios -p {{ secret_postgresql_monitoring_password }} -w 2 -c 5 -q 'select count(*)::float from pg_prepared_xacts where age(transaction) > 5000000' -W 500-1000 -C 1000-1000000"
|
||||||
|
hostgroup: tag-nagios-checkpgsql
|
||||||
|
- name: Unit postgresql.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit postgresql.service
|
||||||
|
hostgroup: tag-nagios-checkpgsql
|
||||||
|
# nagios-checkswap
|
||||||
|
- name: Swap Usage
|
||||||
|
command: check_by_ssh!/usr/lib/nagios/plugins/check_swap -w 20% -c 10%
|
||||||
|
hostgroup: tag-nagios-checkswap
|
||||||
|
# zerotier
|
||||||
|
- name: Unit zerotier-one.service
|
||||||
|
command: check_by_ssh!/usr/local/bin/monitoring-scripts/check_systemd_unit zerotier-one.service
|
||||||
|
hostgroup: tag-zt-personal
|
||||||
tags: [ nagios, no-auto ]
|
tags: [ nagios, no-auto ]
|
||||||
- role: ingress
|
- role: ingress
|
||||||
vars:
|
vars:
|
||||||
ingress_head: |
|
|
||||||
# Used by Grafana, required for its API or some shit
|
|
||||||
map $http_upgrade $connection_upgrade {
|
|
||||||
default upgrade;
|
|
||||||
'' close;
|
|
||||||
}
|
|
||||||
ingress_servers:
|
ingress_servers:
|
||||||
# desu.ltd
|
# desu.ltd
|
||||||
- name: desu.ltd
|
- name: desu.ltd
|
||||||
@ -97,18 +234,15 @@
|
|||||||
contents: |
|
contents: |
|
||||||
default_type application/json;
|
default_type application/json;
|
||||||
return 200 '{"m.homeserver":{"base_url":"https://matrix.desu.ltd"}}';
|
return 200 '{"m.homeserver":{"base_url":"https://matrix.desu.ltd"}}';
|
||||||
|
- name: firefly.desu.ltd
|
||||||
|
proxy_pass: http://firefly:8080
|
||||||
|
- name: firefly-importer.desu.ltd
|
||||||
|
directives:
|
||||||
|
- "allow {{ common_home_address }}/{{ common_home_address_mask }}"
|
||||||
|
- "deny all"
|
||||||
|
proxy_pass: http://firefly-importer:8080
|
||||||
- name: git.desu.ltd
|
- name: git.desu.ltd
|
||||||
proxy_pass: http://gitea:3000
|
proxy_pass: http://gitea:3000
|
||||||
- name: grafana.desu.ltd
|
|
||||||
proxy_pass: http://grafana:3000
|
|
||||||
locations:
|
|
||||||
- location: "/api/live/"
|
|
||||||
contents: |
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection $connection_upgrade;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_pass http://grafana:3000;
|
|
||||||
- name: matrix.desu.ltd
|
- name: matrix.desu.ltd
|
||||||
proxies:
|
proxies:
|
||||||
- location: "~* ^(\/_matrix|\/_synapse|\/client|\/health)"
|
- location: "~* ^(\/_matrix|\/_synapse|\/client|\/health)"
|
||||||
@ -117,12 +251,12 @@
|
|||||||
pass: http://element:80
|
pass: http://element:80
|
||||||
directives:
|
directives:
|
||||||
- "client_max_body_size 0"
|
- "client_max_body_size 0"
|
||||||
|
- name: nagios.desu.ltd
|
||||||
|
proxy_pass: http://nagios:80
|
||||||
- name: nc.desu.ltd
|
- name: nc.desu.ltd
|
||||||
directives:
|
directives:
|
||||||
- "add_header Strict-Transport-Security \"max-age=31536000\""
|
- "add_header Strict-Transport-Security \"max-age=31536000\""
|
||||||
- "client_max_body_size 0"
|
- "client_max_body_size 0"
|
||||||
- "keepalive_requests 99999"
|
|
||||||
- "keepalive_timeout 600"
|
|
||||||
proxy_pass: http://nextcloud:80
|
proxy_pass: http://nextcloud:80
|
||||||
locations:
|
locations:
|
||||||
- location: "^~ /.well-known"
|
- location: "^~ /.well-known"
|
||||||
@ -133,16 +267,6 @@
|
|||||||
try_files $uri $uri/ =404;
|
try_files $uri $uri/ =404;
|
||||||
- name: netbox.desu.ltd
|
- name: netbox.desu.ltd
|
||||||
proxy_pass: http://netbox:8080
|
proxy_pass: http://netbox:8080
|
||||||
- name: prometheus.desu.ltd
|
|
||||||
directives:
|
|
||||||
- "allow {{ common_home_address }}/{{ common_home_address_mask }}"
|
|
||||||
- "allow 10.0.0.0/8"
|
|
||||||
- "allow 172.16.0.0/12"
|
|
||||||
- "allow 192.168.0.0/16"
|
|
||||||
# TODO: Replace this with a dynamically-generated list of public IPs from inv
|
|
||||||
- "allow 45.79.58.44/32" # bastion1.dallas.mgmt.desu.ltd
|
|
||||||
- "deny all"
|
|
||||||
proxy_pass: http://prometheus:9090
|
|
||||||
# desu.ltd media bullshit
|
# desu.ltd media bullshit
|
||||||
- name: prowlarr.media.desu.ltd
|
- name: prowlarr.media.desu.ltd
|
||||||
directives:
|
directives:
|
||||||
|
@ -3,11 +3,34 @@
|
|||||||
---
|
---
|
||||||
- hosts: tags_autoreboot
|
- hosts: tags_autoreboot
|
||||||
gather_facts: no
|
gather_facts: no
|
||||||
|
module_defaults:
|
||||||
|
nagios:
|
||||||
|
author: Ansible
|
||||||
|
action: downtime
|
||||||
|
cmdfile: /data/nagios/var/rw/nagios.cmd
|
||||||
|
comment: "Ansible tags_autoreboot task"
|
||||||
|
host: "{{ inventory_hostname }}"
|
||||||
|
minutes: 10
|
||||||
serial: 1
|
serial: 1
|
||||||
tasks:
|
tasks:
|
||||||
- name: check for reboot-required
|
- name: check for reboot-required
|
||||||
ansible.builtin.stat: path=/var/run/reboot-required
|
ansible.builtin.stat: path=/var/run/reboot-required
|
||||||
register: s
|
register: s
|
||||||
- name: reboot
|
- name: reboot
|
||||||
ansible.builtin.reboot: reboot_timeout=600
|
block:
|
||||||
|
- name: attempt to schedule downtime
|
||||||
|
block:
|
||||||
|
- name: register nagios host downtime
|
||||||
|
nagios:
|
||||||
|
service: host
|
||||||
|
delegate_to: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
|
- name: register nagios service downtime
|
||||||
|
nagios:
|
||||||
|
service: all
|
||||||
|
delegate_to: vm-general-1.ashburn.mgmt.desu.ltd
|
||||||
|
rescue:
|
||||||
|
- name: notify of failure to reboot
|
||||||
|
ansible.builtin.debug: msg="Miscellaneous failure when scheduling downtime"
|
||||||
|
- name: reboot
|
||||||
|
ansible.builtin.reboot: reboot_timeout=600
|
||||||
when: s.stat.exists
|
when: s.stat.exists
|
||||||
|
@ -2,65 +2,39 @@
|
|||||||
# vim:ft=ansible:
|
# vim:ft=ansible:
|
||||||
---
|
---
|
||||||
- hosts: tags_nagios
|
- hosts: tags_nagios
|
||||||
gather_facts: yes
|
gather_facts: no
|
||||||
|
roles:
|
||||||
|
- role: git
|
||||||
|
vars:
|
||||||
|
git_repos:
|
||||||
|
- repo: https://git.desu.ltd/salt/monitoring-scripts
|
||||||
|
dest: /usr/local/bin/monitoring-scripts
|
||||||
|
tags: [ nagios, git ]
|
||||||
tasks:
|
tasks:
|
||||||
|
- name: assure nagios plugin packages
|
||||||
|
ansible.builtin.apt: name=monitoring-plugins,nagios-plugins-contrib
|
||||||
|
tags: [ nagios ]
|
||||||
- name: assure nagios user
|
- name: assure nagios user
|
||||||
ansible.builtin.user: name=nagios-checker state=absent remove=yes
|
ansible.builtin.user: name=nagios-checker state=present system=yes
|
||||||
|
tags: [ nagios ]
|
||||||
|
- name: assure nagios user ssh key
|
||||||
|
authorized_key:
|
||||||
|
user: nagios-checker
|
||||||
|
state: present
|
||||||
|
key: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKNavw28C0mKIQVRLQDW2aoovliU1XCGaenDhIMwumK/ Nagios monitoring"
|
||||||
tags: [ nagios ]
|
tags: [ nagios ]
|
||||||
- name: assure nagios user sudo rule file
|
- name: assure nagios user sudo rule file
|
||||||
ansible.builtin.file: path=/etc/sudoers.d/50-nagios-checker state=absent
|
ansible.builtin.file: path=/etc/sudoers.d/50-nagios-checker mode=0750 owner=root group=root state=touch modification_time=preserve access_time=preserve
|
||||||
|
tags: [ nagios, sudo ]
|
||||||
|
- name: assure nagios user sudo rules
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: /etc/sudoers.d/50-nagios-checker
|
||||||
|
line: "nagios-checker ALL = (root) NOPASSWD: {{ item }}"
|
||||||
|
with_items:
|
||||||
|
- /usr/lib/nagios/plugins/check_disk
|
||||||
|
- /usr/local/bin/monitoring-scripts/check_docker
|
||||||
|
- /usr/local/bin/monitoring-scripts/check_temp
|
||||||
tags: [ nagios, sudo ]
|
tags: [ nagios, sudo ]
|
||||||
- name: assure prometheus containers for docker hosts
|
|
||||||
block:
|
|
||||||
- name: assure prometheus node exporter
|
|
||||||
# https://github.com/prometheus/node_exporter
|
|
||||||
ansible.builtin.docker_container:
|
|
||||||
name: prometheus-node-exporter
|
|
||||||
image: quay.io/prometheus/node-exporter:latest
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
command:
|
|
||||||
- '--path.rootfs=/host'
|
|
||||||
- '--collector.interrupts'
|
|
||||||
- '--collector.processes'
|
|
||||||
network_mode: host
|
|
||||||
pid_mode: host
|
|
||||||
volumes:
|
|
||||||
- /:/host:ro,rslave
|
|
||||||
tags: [ prometheus ]
|
|
||||||
- name: assure prometheus cadvisor exporter
|
|
||||||
ansible.builtin.docker_container:
|
|
||||||
name: prometheus-cadvisor-exporter
|
|
||||||
image: gcr.io/cadvisor/cadvisor:latest
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 9101:8080/tcp
|
|
||||||
volumes:
|
|
||||||
- /:/rootfs:ro
|
|
||||||
- /var/run:/var/run:ro
|
|
||||||
- /sys:/sys:ro
|
|
||||||
- /var/lib/docker:/var/lib/docker:ro
|
|
||||||
- /dev/disk:/dev/disk:ro
|
|
||||||
devices:
|
|
||||||
- /dev/kmsg
|
|
||||||
when: ansible_pkg_mgr != "atomic_container"
|
|
||||||
- name: assure prometheus containers for coreos
|
|
||||||
block:
|
|
||||||
- name: assure prometheus node exporter
|
|
||||||
# https://github.com/prometheus/node_exporter
|
|
||||||
containers.podman.podman_container:
|
|
||||||
name: prometheus-node-exporter
|
|
||||||
image: quay.io/prometheus/node-exporter:latest
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
command:
|
|
||||||
- '--path.rootfs=/host'
|
|
||||||
- '--collector.interrupts'
|
|
||||||
- '--collector.processes'
|
|
||||||
network_mode: host
|
|
||||||
pid_mode: host
|
|
||||||
volumes:
|
|
||||||
- /:/host:ro,rslave
|
|
||||||
tags: [ prometheus ]
|
|
||||||
when: ansible_pkg_mgr == "atomic_container"
|
|
||||||
- hosts: all
|
- hosts: all
|
||||||
gather_facts: no
|
gather_facts: no
|
||||||
tasks:
|
tasks:
|
||||||
|
@ -2,38 +2,20 @@
|
|||||||
- name: docker deploy minecraft - create farming and delights
|
- name: docker deploy minecraft - create farming and delights
|
||||||
docker_container:
|
docker_container:
|
||||||
name: minecraft-createfarming
|
name: minecraft-createfarming
|
||||||
state: absent
|
state: started
|
||||||
image: itzg/minecraft-server:latest
|
image: itzg/minecraft-server:latest
|
||||||
|
restart_policy: unless-stopped
|
||||||
|
pull: yes
|
||||||
env:
|
env:
|
||||||
# Common envvars
|
|
||||||
EULA: "true"
|
EULA: "true"
|
||||||
OPS: "VintageSalt"
|
|
||||||
SNOOPER_ENABLED: "false"
|
|
||||||
SPAWN_PROTECTION: "0"
|
|
||||||
USE_AIKAR_FLAGS: "true"
|
|
||||||
RCON_CMDS_STARTUP: |-
|
|
||||||
scoreboard objectives add Deaths deathCount
|
|
||||||
#scoreboard objectives add Health health {"text":"❤","color":"red"}
|
|
||||||
RCON_CMDS_ON_CONNECT: |-
|
|
||||||
scoreboard objectives setdisplay list Deaths
|
|
||||||
#scoreboard objectives setdisplay belowName Health
|
|
||||||
# Pack-specific stuff
|
|
||||||
MODRINTH_PROJECT: "https://modrinth.com/modpack/create-farmersdelight/version/1.0.0"
|
MODRINTH_PROJECT: "https://modrinth.com/modpack/create-farmersdelight/version/1.0.0"
|
||||||
MOTD: "Create Farming and Delights! Spinny trains!"
|
|
||||||
TYPE: "MODRINTH"
|
TYPE: "MODRINTH"
|
||||||
VERSION: "1.20.1"
|
VERSION: "1.20.1"
|
||||||
MAX_MEMORY: "6G"
|
MAX_MEMORY: "6G"
|
||||||
#VIEW_DISTANCE: "10"
|
|
||||||
ports:
|
ports:
|
||||||
- "25565:25565/tcp"
|
- "25565:25565/tcp"
|
||||||
- "25565:25565/udp"
|
- "25565:25565/udp"
|
||||||
- "24454:24454/udp"
|
- "24454:24454/udp"
|
||||||
# Prometheus exporter for Forge
|
|
||||||
# https://www.curseforge.com/minecraft/mc-mods/prometheus-exporter
|
|
||||||
#- "19565:19565/tcp"
|
|
||||||
# Prometheus exporter for Fabric
|
|
||||||
# https://modrinth.com/mod/fabricexporter
|
|
||||||
- "19565:25585/tcp"
|
|
||||||
volumes:
|
volumes:
|
||||||
- /data/minecraft/createfarming:/data
|
- /data/minecraft/createfarming:/data
|
||||||
tags: [ docker, minecraft, create, createfarming ]
|
tags: [ docker, minecraft ]
|
||||||
|
34
playbooks/tasks/game/minecraft-direwolf20.yml
Normal file
34
playbooks/tasks/game/minecraft-direwolf20.yml
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# vim:ft=ansible:
|
||||||
|
- name: docker deploy minecraft - direwolf20
|
||||||
|
docker_container:
|
||||||
|
name: minecraft-direwolf20
|
||||||
|
state: started
|
||||||
|
image: itzg/minecraft-server:latest
|
||||||
|
restart_policy: unless-stopped
|
||||||
|
pull: yes
|
||||||
|
env:
|
||||||
|
EULA: "true"
|
||||||
|
GENERIC_PACK: "/modpacks/1.20.1-direwolf20/Da Bois.zip"
|
||||||
|
TYPE: "NEOFORGE"
|
||||||
|
VERSION: "1.20.1"
|
||||||
|
FORGE_VERSION: "47.1.105"
|
||||||
|
MEMORY: "8G"
|
||||||
|
MOTD: "Tannerite Dog Edition\\n#abolishtheatf"
|
||||||
|
OPS: "VintageSalt"
|
||||||
|
RCON_CMDS_STARTUP: |-
|
||||||
|
scoreboard objectives add Deaths deathCount
|
||||||
|
scoreboard objectives add Health health {"text":"❤","color":"red"}
|
||||||
|
RCON_CMDS_ON_CONNECT: |-
|
||||||
|
scoreboard objectives setdisplay list Deaths
|
||||||
|
scoreboard objectives setdisplay belowName Health
|
||||||
|
SNOOPER_ENABLED: "false"
|
||||||
|
SPAWN_PROTECTION: "0"
|
||||||
|
USE_AIKAR_FLAGS: "true"
|
||||||
|
VIEW_DISTANCE: "10"
|
||||||
|
ports:
|
||||||
|
- "25567:25565/tcp"
|
||||||
|
- "25567:25565/udp"
|
||||||
|
volumes:
|
||||||
|
- /data/srv/packs:/modpacks
|
||||||
|
- /data/minecraft/direwolf20:/data
|
||||||
|
tags: [ docker, minecraft, direwolf20 ]
|
@ -1,50 +0,0 @@
|
|||||||
# vim:ft=ansible:
|
|
||||||
- name: docker deploy minecraft - magicpack
|
|
||||||
docker_container:
|
|
||||||
name: minecraft-magicpack
|
|
||||||
state: absent
|
|
||||||
image: itzg/minecraft-server:java8
|
|
||||||
env:
|
|
||||||
# Common envvars
|
|
||||||
EULA: "true"
|
|
||||||
OPS: "VintageSalt"
|
|
||||||
SNOOPER_ENABLED: "false"
|
|
||||||
SPAWN_PROTECTION: "0"
|
|
||||||
USE_AIKAR_FLAGS: "true"
|
|
||||||
#
|
|
||||||
# This enables the use of Ely.by as an auth and skin server
|
|
||||||
# Comment this and the above line out if you'd like to use Mojang's
|
|
||||||
# https://docs.ely.by/en/authlib-injector.html
|
|
||||||
#
|
|
||||||
# All players should register on Ely.by in order for this to work.
|
|
||||||
# They should also use Fjord Launcher by Unmojang:
|
|
||||||
# https://github.com/unmojang/FjordLauncher
|
|
||||||
#
|
|
||||||
JVM_OPTS: "-javaagent:/authlib-injector.jar=ely.by"
|
|
||||||
RCON_CMDS_STARTUP: |-
|
|
||||||
scoreboard objectives add Deaths deathCount
|
|
||||||
#scoreboard objectives add Health health {"text":"❤","color":"red"}
|
|
||||||
RCON_CMDS_ON_CONNECT: |-
|
|
||||||
scoreboard objectives setdisplay list Deaths
|
|
||||||
#scoreboard objectives setdisplay belowName Health
|
|
||||||
# Pack-specific stuff
|
|
||||||
MODRINTH_PROJECT: "https://srv.9iron.club/files/packs/1.7.10-magicpack/server.mrpack"
|
|
||||||
MOTD: "It's ya boy, uh, skrunkly modpack"
|
|
||||||
TYPE: "MODRINTH"
|
|
||||||
VERSION: "1.7.10"
|
|
||||||
MAX_MEMORY: "6G"
|
|
||||||
#VIEW_DISTANCE: "10"
|
|
||||||
ports:
|
|
||||||
- "25565:25565/tcp"
|
|
||||||
- "25565:25565/udp"
|
|
||||||
- "24454:24454/udp"
|
|
||||||
# Prometheus exporter for Forge
|
|
||||||
# https://www.curseforge.com/minecraft/mc-mods/prometheus-exporter
|
|
||||||
- "19565:19565/tcp"
|
|
||||||
# Prometheus exporter for Fabric
|
|
||||||
# https://modrinth.com/mod/fabricexporter
|
|
||||||
#- "19565:25585/tcp"
|
|
||||||
volumes:
|
|
||||||
- /data/minecraft/magicpack:/data
|
|
||||||
- /data/minecraft/authlib-injector-1.2.5.jar:/authlib-injector.jar
|
|
||||||
tags: [ docker, minecraft, magicpack ]
|
|
33
playbooks/tasks/game/minecraft-vanilla.yml
Normal file
33
playbooks/tasks/game/minecraft-vanilla.yml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# vim:ft=ansible:
|
||||||
|
- name: docker deploy minecraft - vanilla
|
||||||
|
docker_container:
|
||||||
|
name: minecraft-vanilla
|
||||||
|
state: absent
|
||||||
|
image: itzg/minecraft-server:latest
|
||||||
|
restart_policy: unless-stopped
|
||||||
|
pull: yes
|
||||||
|
env:
|
||||||
|
DIFFICULTY: "normal"
|
||||||
|
ENABLE_COMMAND_BLOCK: "true"
|
||||||
|
EULA: "true"
|
||||||
|
MAX_PLAYERS: "8"
|
||||||
|
MODRINTH_PROJECT: "https://modrinth.com/modpack/adrenaserver"
|
||||||
|
MOTD: "Tannerite Dog Edition\\n#abolishtheatf"
|
||||||
|
OPS: "VintageSalt"
|
||||||
|
RCON_CMDS_STARTUP: |-
|
||||||
|
scoreboard objectives add Deaths deathCount
|
||||||
|
scoreboard objectives add Health health {"text":"❤","color":"red"}
|
||||||
|
RCON_CMDS_ON_CONNECT: |-
|
||||||
|
scoreboard objectives setdisplay list Deaths
|
||||||
|
scoreboard objectives setdisplay belowName Health
|
||||||
|
SNOOPER_ENABLED: "false"
|
||||||
|
SPAWN_PROTECTION: "0"
|
||||||
|
TYPE: "MODRINTH"
|
||||||
|
USE_AIKAR_FLAGS: "true"
|
||||||
|
VIEW_DISTANCE: "12"
|
||||||
|
ports:
|
||||||
|
- "26565:25565/tcp"
|
||||||
|
- "26565:25565/udp"
|
||||||
|
volumes:
|
||||||
|
- /data/minecraft/vanilla:/data
|
||||||
|
tags: [ docker, minecraft ]
|
@ -1,44 +0,0 @@
|
|||||||
# vim:ft=ansible:
|
|
||||||
- name: docker deploy minecraft - weediewack next gen pack
|
|
||||||
docker_container:
|
|
||||||
name: minecraft-weedie
|
|
||||||
state: started
|
|
||||||
image: itzg/minecraft-server:latest
|
|
||||||
env:
|
|
||||||
# Common envvars
|
|
||||||
EULA: "true"
|
|
||||||
OPS: "VintageSalt"
|
|
||||||
SNOOPER_ENABLED: "false"
|
|
||||||
SPAWN_PROTECTION: "0"
|
|
||||||
USE_AIKAR_FLAGS: "true"
|
|
||||||
ALLOW_FLIGHT: "true"
|
|
||||||
RCON_CMDS_STARTUP: |-
|
|
||||||
scoreboard objectives add Deaths deathCount
|
|
||||||
scoreboard objectives add Health health {"text":"❤","color":"red"}
|
|
||||||
RCON_CMDS_ON_CONNECT: |-
|
|
||||||
scoreboard objectives setdisplay list Deaths
|
|
||||||
scoreboard objectives setdisplay belowName Health
|
|
||||||
# Pack-specific stuff
|
|
||||||
TYPE: "Forge"
|
|
||||||
MOTD: "We're doing it a-fucking-gain!"
|
|
||||||
VERSION: "1.20.1"
|
|
||||||
FORGE_VERSION: "47.3.11"
|
|
||||||
MAX_MEMORY: "8G"
|
|
||||||
#GENERIC_PACKS: "Server Files 1.3.7"
|
|
||||||
#GENERIC_PACKS_PREFIX: "https://mediafilez.forgecdn.net/files/5832/451/"
|
|
||||||
#GENERIC_PACKS_SUFFIX: ".zip"
|
|
||||||
#SKIP_GENERIC_PACK_UPDATE_CHECK: "true"
|
|
||||||
#VIEW_DISTANCE: "10"
|
|
||||||
ports:
|
|
||||||
- "25565:25565/tcp"
|
|
||||||
- "25565:25565/udp"
|
|
||||||
- "24454:24454/udp"
|
|
||||||
# Prometheus exporter for Forge
|
|
||||||
# https://www.curseforge.com/minecraft/mc-mods/prometheus-exporter
|
|
||||||
- "19566:19565/tcp"
|
|
||||||
# Prometheus exporter for Fabric
|
|
||||||
# https://modrinth.com/mod/fabricexporter
|
|
||||||
#- "19565:25585/tcp"
|
|
||||||
volumes:
|
|
||||||
- /data/minecraft/weedie:/data
|
|
||||||
tags: [ docker, minecraft, weedie ]
|
|
@ -1,47 +0,0 @@
|
|||||||
# vim:ft=ansible:
|
|
||||||
- name: ensure docker network
|
|
||||||
docker_network: name=satisfactory
|
|
||||||
tags: [ satisfactory, docker, network ]
|
|
||||||
- name: docker deploy satisfactory
|
|
||||||
docker_container:
|
|
||||||
name: satisfactory
|
|
||||||
state: absent
|
|
||||||
image: wolveix/satisfactory-server:latest
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
pull: yes
|
|
||||||
networks:
|
|
||||||
- name: satisfactory
|
|
||||||
aliases: [ "gameserver" ]
|
|
||||||
env:
|
|
||||||
MAXPLAYERS: "8"
|
|
||||||
# We have this turned on for modding's sake
|
|
||||||
#SKIPUPDATE: "true"
|
|
||||||
ports:
|
|
||||||
- '7777:7777/udp'
|
|
||||||
- '7777:7777/tcp'
|
|
||||||
volumes:
|
|
||||||
- /data/satisfactory/config:/config
|
|
||||||
tags: [ docker, satisfactory ]
|
|
||||||
- name: docker deploy satisfactory sftp
|
|
||||||
docker_container:
|
|
||||||
name: satisfactory-sftp
|
|
||||||
state: started
|
|
||||||
image: atmoz/sftp
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
pull: yes
|
|
||||||
ulimits:
|
|
||||||
- 'nofile:262144:262144'
|
|
||||||
ports:
|
|
||||||
- '7776:22/tcp'
|
|
||||||
volumes:
|
|
||||||
- /data/satisfactory/config:/home/servermgr/game
|
|
||||||
command: 'servermgr:{{ server_password }}:1000'
|
|
||||||
vars:
|
|
||||||
server_password: !vault |
|
|
||||||
$ANSIBLE_VAULT;1.1;AES256
|
|
||||||
33336138656461646462323661363336623235333861663730373535656331623230313334353239
|
|
||||||
6535623833343237626161383833663435643262376133320a616634613764396661316332373339
|
|
||||||
33633662366666623931643635313162366339306539666632643437396637616632633432326631
|
|
||||||
3038333932623638390a386362653463306338326436396230633562313466336464663764643461
|
|
||||||
3134
|
|
||||||
tags: [ docker, satisfactory, sidecar, sftp ]
|
|
@ -2,7 +2,7 @@
|
|||||||
- name: docker deploy zomboid
|
- name: docker deploy zomboid
|
||||||
community.docker.docker_container:
|
community.docker.docker_container:
|
||||||
name: zomboid
|
name: zomboid
|
||||||
state: started
|
state: absent
|
||||||
# Wanted to use latest but:
|
# Wanted to use latest but:
|
||||||
# https://github.com/Renegade-Master/zomboid-dedicated-server/issues/74
|
# https://github.com/Renegade-Master/zomboid-dedicated-server/issues/74
|
||||||
# https://github.com/Renegade-Master/zomboid-dedicated-server/issues/68
|
# https://github.com/Renegade-Master/zomboid-dedicated-server/issues/68
|
||||||
@ -16,14 +16,14 @@
|
|||||||
ADMIN_USERNAME: "Salt"
|
ADMIN_USERNAME: "Salt"
|
||||||
ADMIN_PASSWORD: "SuperMegaDicks"
|
ADMIN_PASSWORD: "SuperMegaDicks"
|
||||||
MAX_PLAYERS: "8"
|
MAX_PLAYERS: "8"
|
||||||
MAP_NAMES: "vehicle_interior;MotoriousExpandedSpawnZones,VehicleSpawnZonesExpandedRedRace;Louisville"
|
MAP_NAMES: "vehicle_interior;MotoriousExpandedSpawnZones,VehicleSpawnZonesExpandedRedRace;Basements;Louisville"
|
||||||
# Generating this list by hand is asinine
|
# Generating this list by hand is asinine
|
||||||
# Go here: https://getcollectionids.moonguy.me/
|
# Go here: https://getcollectionids.moonguy.me/
|
||||||
# Use this: https://steamcommunity.com/sharedfiles/filedetails/?id=3145884377
|
# Use this: https://steamcommunity.com/sharedfiles/filedetails/?id=3145884377
|
||||||
# Or this: 3145884377
|
# Or this: 3145884377
|
||||||
# Add mods to that collection if you want to add them here, then regen these two fields.
|
# Add mods to that collection if you want to add them here, then regen these two fields.
|
||||||
MOD_NAMES: "P4HasBeenRead;AutoSewing;AutoMechanics;BulbMechanics;ShowBulbCondition;modoptions;BoredomTweaks;MoreCLR_desc4mood;MiniHealthPanel;CombatText;manageContainers;EQUIPMENT_UI;ModManager;MoreDescriptionForTraits4166;SkillRecoveryJournal;RV_Interior_MP;RV_Interior_Vanilla;FRUsedCars;FRUsedCarsNRN;Lingering Voices;MapSymbolSizeSlider;VISIBLE_BACKPACK_BACKGROUND;BetterSortCC;MapLegendUI;BB_CommonSense;DRAW_ON_MAP;coavinsfirearmbase;coavinsfirearmsupport1;coavinsfirearmsupport2;coavinsfirearmsupport3;coavinsfirearmsupport4;coavinsfirearmsupport5;Shrek1and2intheirENTIRETYasvhs's;NoVanillaVehicles;AnotherPlayersOnMinimap;AnimSync;DescriptiveSkillTooltips;darkPatches;noirrsling;Susceptible;ToadTraits;TheStar;BION_PlainMoodles;FH;ProximityInventory;SlowConsumption;MaintenanceImprovesRepair;fhqExpVehSpawn;fhqExpVehSpawnGageFarmDisable;fhqExpVehSpawnM911FarmDisable;fhqExpVehSpawnP19AFarmDisable;fhqExpVehSpawnNoVanilla;fhqExpVehSpawnRedRace;RUNE-EXP;NestedContainer01;AddRandomSprinters;TrueActionsDancing;VFExpansion1;Squishmallows;DeLoreanDMC-12;1989Porsche911Turbo;suprabase;IceCreamTruckFreezer;GarbageTruck;T3;MarTraitsBlind;BraStorage;KuromiBackpack;TalsCannedRat;happygilmoretape;SimpleReadWhileWalking41;FasterHoodOpening;SchizophreniaTrait;TwinkiesVan;LouisVille SP;hf_point_blank;UIAPI;WaterDispenser;TheOnlyCure;FancyHandwork;BrutalHandwork;WanderingZombies;AuthenticZLite;ReloadAllMagazines;jiggasGreenfireMod;amclub;SpnClothHideFix;SpnOpenCloth;SpnHairAPI;PwSleepingbags;Video_Game_Consoles;metal_mod_pariah;truemusic;tm_grunge;TPAM;EasyLaundry;DropRollMod;9301;No Mo Culling;SpnCloth;SpnClothHideFix;SpnHair;lore_friendly_music;AmmoLootDropVFE;tsarslib;ItemTweakerAPIExtraClothingAddon;ItemTweakerAPI;TsarcraftCache2;TrueMusicMoodImprovement;StickyWeight"
|
MOD_NAMES: "P4HasBeenRead;AutoSewing;AutoMechanics;BulbMechanics;ShowBulbCondition;modoptions;BoredomTweaks;MoreCLR_desc4mood;MiniHealthPanel;CombatText;manageContainers;EQUIPMENT_UI;ModManager;MoreDescriptionForTraits4166;SkillRecoveryJournal;RV_Interior_MP;RV_Interior_Vanilla;FRUsedCars;FRUsedCarsNRN;Lingering Voices;MapSymbolSizeSlider;VISIBLE_BACKPACK_BACKGROUND;BetterSortCC;MapLegendUI;BB_CommonSense;DRAW_ON_MAP;coavinsfirearmbase;coavinsfirearmsupport1;coavinsfirearmsupport2;coavinsfirearmsupport3;coavinsfirearmsupport4;coavinsfirearmsupport5;Shrek1and2intheirENTIRETYasvhs;NoVanillaVehicles;AnotherPlayersOnMinimap;AnimSync;DescriptiveSkillTooltips;noirrsling;Susceptible;ToadTraits;TheStar;BION_PlainMoodles;FH;ProximityInventory;SlowConsumption;MaintenanceImprovesRepair;fhqExpVehSpawn;fhqExpVehSpawnGageFarmDisable;fhqExpVehSpawnM911FarmDisable;fhqExpVehSpawnP19AFarmDisable;fhqExpVehSpawnNoVanilla;fhqExpVehSpawnRedRace;RUNE-EXP;NestedContainer01;AddRandomSprinters;TrueActionsDancing;VFExpansion1;Squishmallows;1989Porsche911Turbo;suprabase;IceCreamTruckFreezer;GarbageTruck;T3;MarTraitsBlind;BraStorage;KuromiBackpack;TalsCannedRat;happygilmoretape;SimpleReadWhileWalking41;FasterHoodOpening;SchizophreniaTrait;TwinkiesVan;Basements;LouisVille SP;hf_point_blank;UIAPI;WaterDispenser;BasementsPatch;No Mo Culling;FancyHandwork;BrutalHandwork;TheOnlyCure;WanderingZombies"
|
||||||
MOD_WORKSHOP_IDS: "2544353492;2584991527;2588598892;2778537451;2964435557;2169435993;2725360009;2763647806;2866258937;2286124931;2650547917;2950902979;2694448564;2685168362;2503622437;2822286426;1510950729;2874678809;2734705913;2808679062;2313387159;2710167561;2875848298;2804531012;3101379739;3138722707;2535461640;3117340325;2959512313;3134776712;2949818236;2786499395;2795677303;1299328280;2619072426;3008416736;2447729538;2847184718;2864231031;2920089312;2793164190;2758443202;2946221823;2797104510;2648779556;2667899942;3109119611;1687801932;2567438952;2689292423;2783373547;2783580134;2748047915;3121062639;3045079599;3022845661;3056136040;3163764362;2845952197;2584112711;2711720885;2838950860;2849247394;2678653895;2990322197;2760035814;2687798127;2949998111;3115293671;3236152598;2904920097;2934621024;2983905789;2335368829;2907834593;2920899878;1703604612;2778576730;2812326159;3041733782;2714848168;2831786301;2853710135;2613146550;2810869183;2717792692;2925034918;2908614026;2866536557;2684285534;2463184726;2839277937;3041910754;2392709985;2810800927;566115016;2688809268;3048902085;2997503254"
|
MOD_WORKSHOP_IDS: "2544353492;2584991527;2588598892;2778537451;2964435557;2169435993;2725360009;2763647806;2866258937;2286124931;2650547917;2950902979;2694448564;2685168362;2503622437;2822286426;1510950729;2874678809;2734705913;2808679062;2313387159;2710167561;2875848298;2804531012;3101379739;3138722707;2535461640;3117340325;2959512313;3134776712;2949818236;2786499395;2795677303;1299328280;2619072426;3008416736;2447729538;2847184718;2864231031;2920089312;2793164190;2758443202;2946221823;2797104510;2648779556;2667899942;3109119611;1687801932;2567438952;2689292423;2783373547;2783580134;2748047915;3121062639;3045079599;3022845661;3056136040;3163764362;2845952197;2584112711;2711720885;2838950860;2849247394;2678653895;2990322197;2760035814;2687798127;2949998111;3115293671;3236152598;2904920097;2934621024;2983905789"
|
||||||
RCON_PASSWORD: "SuperMegaDicks"
|
RCON_PASSWORD: "SuperMegaDicks"
|
||||||
SERVER_NAME: "The Salty Spitoon"
|
SERVER_NAME: "The Salty Spitoon"
|
||||||
SERVER_PASSWORD: "dicks"
|
SERVER_PASSWORD: "dicks"
|
||||||
|
53
playbooks/tasks/web/firefly-iii.yml
Normal file
53
playbooks/tasks/web/firefly-iii.yml
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# vim:ft=ansible:
|
||||||
|
#
|
||||||
|
# NOTE: This app is currently not fully-functional. It needs a cronjob
|
||||||
|
# implemented for things like recurring transactions and budgets.
|
||||||
|
#
|
||||||
|
# https://docs.firefly-iii.org/firefly-iii/advanced-installation/cron/
|
||||||
|
#
|
||||||
|
- name: docker deploy firefly
|
||||||
|
docker_container:
|
||||||
|
name: firefly
|
||||||
|
image: fireflyiii/core:latest
|
||||||
|
env:
|
||||||
|
APP_KEY: "{{ secret_firefly_app_key }}"
|
||||||
|
APP_URL: "https://firefly.desu.ltd"
|
||||||
|
# We set TRUSTED_PROXIES to * here, which allows any app in the docker
|
||||||
|
# network to proxy this. I'm considering this a fine security concession
|
||||||
|
# since we can't guarantee the IP of the ingress container
|
||||||
|
TRUSTED_PROXIES: "*"
|
||||||
|
DB_HOST: "10.0.0.2"
|
||||||
|
DB_PORT: "5432"
|
||||||
|
DB_CONNECTION: pgsql
|
||||||
|
DB_DATABASE: "firefly-desultd"
|
||||||
|
DB_USERNAME: "firefly-desultd"
|
||||||
|
DB_PASSWORD: "{{ secret_firefly_db_pass }}"
|
||||||
|
networks:
|
||||||
|
- name: web
|
||||||
|
aliases: [ "firefly" ]
|
||||||
|
volumes:
|
||||||
|
- /data/firefly/export:/var/www/html/storage/export
|
||||||
|
- /data/firefly/logs:/var/www/html/storage/logs
|
||||||
|
- /data/firefly/upload:/var/www/html/storage/upload
|
||||||
|
tags: [ docker, firefly ]
|
||||||
|
- name: docker deploy firefly importer
|
||||||
|
docker_container:
|
||||||
|
name: firefly-importer
|
||||||
|
image: fireflyiii/data-importer:latest
|
||||||
|
# We need to use this workaround with custom DNS servers due to some host
|
||||||
|
# entries on the container host
|
||||||
|
dns_servers:
|
||||||
|
- "8.8.8.8"
|
||||||
|
- "8.8.4.4"
|
||||||
|
env:
|
||||||
|
# This TRUSTED_PROXIES line is still undocumented
|
||||||
|
# https://github.com/firefly-iii/firefly-iii/issues/3256
|
||||||
|
# God fucking dammit
|
||||||
|
TRUSTED_PROXIES: "*"
|
||||||
|
FIREFLY_III_ACCESS_TOKEN: "{{ secret_firefly_access_token }}"
|
||||||
|
FIREFLY_III_URL: "http://firefly:8080"
|
||||||
|
VANITY_URL: "https://firefly.desu.ltd"
|
||||||
|
networks:
|
||||||
|
- name: web
|
||||||
|
aliases: [ "firefly-importer" ]
|
||||||
|
tags: [ docker, firefly ]
|
@ -1,42 +0,0 @@
|
|||||||
# vim:ft=ansible:
|
|
||||||
- name: ensure grafana dirs
|
|
||||||
ansible.builtin.file:
|
|
||||||
state: directory
|
|
||||||
owner: 472
|
|
||||||
group: 472
|
|
||||||
mode: "0750"
|
|
||||||
path: "{{ item }}"
|
|
||||||
with_items:
|
|
||||||
- /data/grafana/storage
|
|
||||||
- /data/grafana/logs
|
|
||||||
tags: [ docker, grafana, monitoring ]
|
|
||||||
- name: docker deploy grafana
|
|
||||||
docker_container:
|
|
||||||
name: grafana
|
|
||||||
image: grafana/grafana-oss:main
|
|
||||||
env:
|
|
||||||
TZ: "America/Chicago"
|
|
||||||
# This enables logging to STDOUT for log aggregators to more easily hook it
|
|
||||||
GF_LOG_MODE: "console file"
|
|
||||||
GF_SERVER_DOMAIN: "grafana.desu.ltd"
|
|
||||||
GF_SERVER_PROTOCOL: "http"
|
|
||||||
GF_SERVER_ROOT_URL: "https://grafana.desu.ltd"
|
|
||||||
networks:
|
|
||||||
- name: web
|
|
||||||
aliases: [ "grafana" ]
|
|
||||||
volumes:
|
|
||||||
- /data/grafana/storage:/var/lib/grafana
|
|
||||||
- /data/grafana/logs:/var/log/grafana
|
|
||||||
tags: [ docker, grafana, monitoring ]
|
|
||||||
- name: docker deploy grafana matrix bridge
|
|
||||||
docker_container:
|
|
||||||
name: grafana-matrix-bridge
|
|
||||||
image: registry.gitlab.com/hctrdev/grafana-matrix-forwarder:latest
|
|
||||||
env:
|
|
||||||
GMF_MATRIX_USER: "@grafana:desu.ltd"
|
|
||||||
GMF_MATRIX_PASSWORD: "{{ secret_grafana_matrix_token }}"
|
|
||||||
GMF_MATRIX_HOMESERVER: matrix.desu.ltd
|
|
||||||
networks:
|
|
||||||
- name: web
|
|
||||||
aliases: [ "grafana-matrix-bridge" ]
|
|
||||||
tags: [ docker, grafana, monitoring, bridge, matrix ]
|
|
@ -2,7 +2,6 @@
|
|||||||
- name: docker deploy lidarr
|
- name: docker deploy lidarr
|
||||||
docker_container:
|
docker_container:
|
||||||
name: lidarr
|
name: lidarr
|
||||||
state: absent
|
|
||||||
image: linuxserver/lidarr:latest
|
image: linuxserver/lidarr:latest
|
||||||
networks:
|
networks:
|
||||||
- name: web
|
- name: web
|
||||||
@ -10,10 +9,7 @@
|
|||||||
env:
|
env:
|
||||||
TZ: "America/Chicago"
|
TZ: "America/Chicago"
|
||||||
volumes:
|
volumes:
|
||||||
# https://github.com/RandomNinjaAtk/arr-scripts?tab=readme-ov-file
|
|
||||||
- /data/lidarr/config:/config
|
- /data/lidarr/config:/config
|
||||||
- /data/lidarr/custom-services.d:/custom-services.d
|
|
||||||
- /data/lidarr/custom-cont-init.d:/custom-cont-init.d
|
|
||||||
- /data/shared/downloads:/data
|
- /data/shared/downloads:/data
|
||||||
- /data/shared/media/music:/music
|
- /data/shared/media/music:/music
|
||||||
tags: [ docker, lidarr ]
|
tags: [ docker, lidarr ]
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
# vim:ft=ansible:
|
|
||||||
- name: docker deploy navidrome
|
|
||||||
docker_container:
|
|
||||||
name: navidrome
|
|
||||||
state: absent
|
|
||||||
image: deluan/navidrome:latest
|
|
||||||
user: 911:911
|
|
||||||
env:
|
|
||||||
ND_BASEURL: "https://music.desu.ltd"
|
|
||||||
ND_PROMETHEUS_ENABLED: "true"
|
|
||||||
ND_LOGLEVEL: "info"
|
|
||||||
networks:
|
|
||||||
- name: web
|
|
||||||
aliases: [ "navidrome" ]
|
|
||||||
volumes:
|
|
||||||
- /data/navidrome/data:/data
|
|
||||||
- /data/shared/media/music:/music:ro
|
|
||||||
tags: [ docker, navidrome ]
|
|
@ -2,7 +2,7 @@
|
|||||||
- name: docker deploy synapse
|
- name: docker deploy synapse
|
||||||
docker_container:
|
docker_container:
|
||||||
name: synapse
|
name: synapse
|
||||||
image: matrixdotorg/synapse:latest
|
image: ghcr.io/element-hq/synapse:latest
|
||||||
env:
|
env:
|
||||||
TZ: "America/Chicago"
|
TZ: "America/Chicago"
|
||||||
SYNAPSE_SERVER_NAME: matrix.desu.ltd
|
SYNAPSE_SERVER_NAME: matrix.desu.ltd
|
||||||
|
@ -14,8 +14,6 @@ backup_s3_bucket: replaceme
|
|||||||
# Credentials for the bucket
|
# Credentials for the bucket
|
||||||
backup_s3_aws_access_key_id: REPLACEME
|
backup_s3_aws_access_key_id: REPLACEME
|
||||||
backup_s3_aws_secret_access_key: REPLACEME
|
backup_s3_aws_secret_access_key: REPLACEME
|
||||||
# Changeme if you use a non-AWS S3-compatible object store (like Backblaze)
|
|
||||||
#backup_s3_aws_endpoint_url:
|
|
||||||
|
|
||||||
# List of files/directories to back up
|
# List of files/directories to back up
|
||||||
# Note that tar is NOT instructed to recurse through symlinks
|
# Note that tar is NOT instructed to recurse through symlinks
|
||||||
@ -29,4 +27,3 @@ backup_s3backup_exclude_list_extra: []
|
|||||||
# Note that passing f here is probably a bad idea
|
# Note that passing f here is probably a bad idea
|
||||||
backup_s3backup_tar_args: cz
|
backup_s3backup_tar_args: cz
|
||||||
backup_s3backup_tar_args_extra: ""
|
backup_s3backup_tar_args_extra: ""
|
||||||
# The backup URL to use for S3 copies
|
|
||||||
|
@ -3,15 +3,11 @@
|
|||||||
Description=Nightly backup service
|
Description=Nightly backup service
|
||||||
After=network-online.target
|
After=network-online.target
|
||||||
Wants=network-online.target
|
Wants=network-online.target
|
||||||
StartLimitInterval=600
|
|
||||||
StartLimitBurst=5
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=oneshot
|
Type=oneshot
|
||||||
MemoryMax=256M
|
MemoryMax=256M
|
||||||
ExecStart=/opt/backup.sh
|
ExecStart=/opt/backup.sh
|
||||||
Restart=on-failure
|
|
||||||
RestartSec=5
|
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
@ -39,7 +39,8 @@ DIRS+=("{{ item }}")
|
|||||||
if command -v ostree > /dev/null 2>&1; then
|
if command -v ostree > /dev/null 2>&1; then
|
||||||
for file in $(
|
for file in $(
|
||||||
ostree admin config-diff 2>/dev/null | \
|
ostree admin config-diff 2>/dev/null | \
|
||||||
grep -oP '^[A|M]\s*\K.*'
|
grep -e '^[A|M]' | \
|
||||||
|
awk '{print $2}'
|
||||||
); do
|
); do
|
||||||
DIRS+=("/etc/$file")
|
DIRS+=("/etc/$file")
|
||||||
done
|
done
|
||||||
@ -59,10 +60,7 @@ backup() {
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
"$dir" \
|
"$dir" \
|
||||||
| aws s3 cp --expected-size 274877906944 - \
|
| aws s3 cp --expected-size 274877906944 - \
|
||||||
{% if backup_s3_aws_endpoint_url is defined %}
|
"s3://{{ backup_s3_bucket }}/{{ inventory_hostname }}/$dir/$(date "+{{ backup_dateformat }}").tar.gz"
|
||||||
--endpoint-url="{{ backup_s3_aws_endpoint_url }}" \
|
|
||||||
{% endif %}
|
|
||||||
"s3://{{ backup_s3_bucket }}/$HOSTNAME/$dir/$(date "+{{ backup_dateformat }}").tar.gz"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Tar up all items in the backup list, recursively, and pipe them straight
|
# Tar up all items in the backup list, recursively, and pipe them straight
|
||||||
@ -107,9 +105,6 @@ if command -v psql > /dev/null 2>&1; then
|
|||||||
sudo -u postgres pg_dump "$db" \
|
sudo -u postgres pg_dump "$db" \
|
||||||
| gzip -v9 \
|
| gzip -v9 \
|
||||||
| aws s3 cp - \
|
| aws s3 cp - \
|
||||||
{% if backup_s3_aws_endpoint_url is defined %}
|
"s3://{{ backup_s3_bucket }}/{{ inventory_hostname }}/pgdump/$db/$(date "+{{ backup_dateformat }}").pgsql.gz"
|
||||||
--endpoint-url="{{ backup_s3_aws_endpoint_url }}" \
|
|
||||||
{% endif %}
|
|
||||||
"s3://{{ backup_s3_bucket }}/$HOSTNAME/pgdump/$db/$(date "+{{ backup_dateformat }}").pgsql.gz"
|
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
# Please make any configuration changes in the main repo
|
# Please make any configuration changes in the main repo
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
url="s3://{{ backup_s3_bucket}}/$HOSTNAME/"
|
url="s3://{{ backup_s3_bucket}}/{{ inventory_hostname }}/"
|
||||||
|
|
||||||
# AWS S3 configuration
|
# AWS S3 configuration
|
||||||
# NOTE: THIS IS SECRET INFORMATION
|
# NOTE: THIS IS SECRET INFORMATION
|
||||||
@ -33,11 +33,7 @@ printf "Querying S3 for restoreable backups (\e[35m$url\e[0m)...\n"
|
|||||||
while read line; do
|
while read line; do
|
||||||
filename="$(echo "$line" | awk '{print $NF}')"
|
filename="$(echo "$line" | awk '{print $NF}')"
|
||||||
BACKUPS+=("$filename")
|
BACKUPS+=("$filename")
|
||||||
done < <(aws s3 \
|
done < <(aws s3 ls "$url")
|
||||||
{% if backup_s3_aws_endpoint_url is defined %}
|
|
||||||
--endpoint-url="{{ backup_s3_aws_endpoint_url }}" \
|
|
||||||
{% endif %}
|
|
||||||
ls "$url")
|
|
||||||
|
|
||||||
# Present the user with some options
|
# Present the user with some options
|
||||||
printf "Possible restorable backups:\n"
|
printf "Possible restorable backups:\n"
|
||||||
@ -65,8 +61,4 @@ fi
|
|||||||
|
|
||||||
# Copy the thing
|
# Copy the thing
|
||||||
printf "Pulling backup...\n"
|
printf "Pulling backup...\n"
|
||||||
aws s3 \
|
aws s3 cp "$url${BACKUPS[$restoreindex]}" ./
|
||||||
{% if backup_s3_aws_endpoint_url is defined %}
|
|
||||||
--endpoint-url="{{ backup_s3_aws_endpoint_url }}" \
|
|
||||||
{% endif %}
|
|
||||||
cp "$url${BACKUPS[$restoreindex]}" ./
|
|
||||||
|
@ -44,8 +44,10 @@
|
|||||||
- name: configure rpm-ostree packages
|
- name: configure rpm-ostree packages
|
||||||
community.general.rpm_ostree_pkg:
|
community.general.rpm_ostree_pkg:
|
||||||
name:
|
name:
|
||||||
|
- awscli
|
||||||
- htop
|
- htop
|
||||||
- ibm-plex-fonts-all
|
- ibm-plex-fonts-all
|
||||||
- ncdu
|
- ncdu
|
||||||
|
- screen
|
||||||
- vim
|
- vim
|
||||||
when: ansible_os_family == "RedHat" and ansible_pkg_mgr == "atomic_container"
|
when: ansible_os_family == "RedHat" and ansible_pkg_mgr == "atomic_container"
|
||||||
|
@ -153,31 +153,17 @@ desktop_flatpak_remotes:
|
|||||||
url: "https://dl.flathub.org/repo/flathub.flatpakrepo"
|
url: "https://dl.flathub.org/repo/flathub.flatpakrepo"
|
||||||
- name: flathub-beta
|
- name: flathub-beta
|
||||||
url: "https://flathub.org/beta-repo/flathub-beta.flatpakrepo"
|
url: "https://flathub.org/beta-repo/flathub-beta.flatpakrepo"
|
||||||
# - name: unmojang
|
|
||||||
# url: "https://unmojang.github.io/unmojang-flatpak/index.flatpakrepo"
|
|
||||||
desktop_flatpak_remotes_extra: []
|
desktop_flatpak_remotes_extra: []
|
||||||
|
|
||||||
desktop_flatpak_packages:
|
desktop_flatpak_packages:
|
||||||
- remote: flathub
|
- remote: flathub
|
||||||
packages:
|
packages:
|
||||||
- com.github.KRTirtho.Spotube
|
- com.discordapp.Discord
|
||||||
- com.github.Matoking.protontricks
|
- com.obsproject.Studio
|
||||||
- com.github.tchx84.Flatseal
|
|
||||||
- com.nextcloud.desktopclient.nextcloud
|
|
||||||
- com.valvesoftware.Steam
|
|
||||||
- dev.vencord.Vesktop
|
|
||||||
- im.riot.Riot
|
|
||||||
- io.freetubeapp.FreeTube
|
|
||||||
- io.kopia.KopiaUI
|
|
||||||
- io.mpv.Mpv
|
|
||||||
- net.minetest.Minetest
|
- net.minetest.Minetest
|
||||||
- org.DolphinEmu.dolphin-emu
|
- org.DolphinEmu.dolphin-emu
|
||||||
- org.gnucash.GnuCash
|
|
||||||
- org.mozilla.firefox
|
- org.mozilla.firefox
|
||||||
- org.mozilla.Thunderbird
|
- remote: flathub-beta
|
||||||
- org.openscad.OpenSCAD
|
packages:
|
||||||
- org.qbittorrent.qBittorrent
|
- net.lutris.Lutris
|
||||||
# - remote: unmojang
|
|
||||||
# packages:
|
|
||||||
# - org.unmojang.FjordLauncher
|
|
||||||
desktop_flatpak_packages_extra: []
|
desktop_flatpak_packages_extra: []
|
||||||
|
@ -29,7 +29,7 @@
|
|||||||
when: ansible_pkg_mgr == "apt"
|
when: ansible_pkg_mgr == "apt"
|
||||||
- name: configure pip3 packages
|
- name: configure pip3 packages
|
||||||
ansible.builtin.pip: executable=/usr/bin/pip3 state=latest name="{{ desktop_pip3_packages + desktop_pip3_packages_extra }}"
|
ansible.builtin.pip: executable=/usr/bin/pip3 state=latest name="{{ desktop_pip3_packages + desktop_pip3_packages_extra }}"
|
||||||
when: ansible_pkg_mgr == "apt"
|
when: ansible_os_family != "Gentoo"
|
||||||
- name: configure flatpak
|
- name: configure flatpak
|
||||||
block:
|
block:
|
||||||
- name: configure flatpak remotes
|
- name: configure flatpak remotes
|
||||||
|
@ -1,7 +1,3 @@
|
|||||||
{% if ingress_head is defined %}
|
|
||||||
{{ ingress_head }}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% for server in ingress_servers %}
|
{% for server in ingress_servers %}
|
||||||
server {
|
server {
|
||||||
{% if loop.index == 1 %}
|
{% if loop.index == 1 %}
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
image: manios/nagios:latest
|
image: manios/nagios:latest
|
||||||
pull: yes
|
pull: yes
|
||||||
restart_policy: unless-stopped
|
restart_policy: unless-stopped
|
||||||
state: absent
|
state: started
|
||||||
env:
|
env:
|
||||||
NAGIOSADMIN_USER: admin
|
NAGIOSADMIN_USER: admin
|
||||||
NAGIOSADMIN_PASS: "{{ nagios_admin_pass }}"
|
NAGIOSADMIN_PASS: "{{ nagios_admin_pass }}"
|
||||||
|
@ -149,5 +149,70 @@ define host {
|
|||||||
|
|
||||||
contact_groups ansible
|
contact_groups ansible
|
||||||
}
|
}
|
||||||
|
{% for service in vars.services %}
|
||||||
|
{% for tag in service.tags %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checkmatrix" %}
|
||||||
|
{% for port in service.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description Matrix Synapse - {{ service.name }} - {{ port }}
|
||||||
|
check_command check_http!--ssl -H {{ service.name }} -u https://{{ service.name }}/health -s OK -p {{ port }} -f sticky
|
||||||
|
host_name {{ host }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checkminecraft" %}
|
||||||
|
{% for port in service.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description Minecraft - {{ service.name }} - {{ port }}
|
||||||
|
check_command check_by_ssh!/usr/local/bin/monitoring-scripts/check_minecraft -H {{ host }} -p {{ port }} -m "{{ service.description }}" -f -w 3 -c 5
|
||||||
|
host_name {{ host }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checkhttp" %}
|
||||||
|
{% for port in service.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description HTTP - {{ service.name }} - {{ port }}
|
||||||
|
check_command check_http!-H {{ service.name }} -p {{ port }} -f sticky
|
||||||
|
host_name {{ host }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checkhttps" %}
|
||||||
|
{% for port in service.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description HTTPS - {{ service.name }} - {{ port }}
|
||||||
|
check_command check_http!--ssl -H {{ service.name }} -p {{ port }} -f sticky
|
||||||
|
host_name {{ host }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checktcp" %}
|
||||||
|
{% for port in service.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description TCP {{ service.name }} - {{ port }}
|
||||||
|
check_command check_tcp!{{ port }}
|
||||||
|
host_name {{ host }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -182,6 +182,55 @@ define host {
|
|||||||
# Created: {{ service.value.created }}
|
# Created: {{ service.value.created }}
|
||||||
# Updated: {{ service.value.last_updated }}
|
# Updated: {{ service.value.last_updated }}
|
||||||
{% for tag in service.value.tags %}
|
{% for tag in service.value.tags %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checkminecraft" %}
|
||||||
|
{% for port in service.value.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description Minecraft - {{ service.value.name }} - {{ port }}
|
||||||
|
check_command check_by_ssh!/usr/local/bin/monitoring-scripts/check_minecraft -H {{ host_name }} -p {{ port }} -m "{{ service.value.description }}" -f -w 3 -c 5
|
||||||
|
host_name {{ host_name }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checkhttp" %}
|
||||||
|
{% for port in service.value.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description HTTP - {{ service.value.name }} - {{ port }}
|
||||||
|
check_command check_http!-H {{ service.value.name }} -p {{ port }} -f sticky
|
||||||
|
host_name {{ host_name }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checkhttps" %}
|
||||||
|
{% for port in service.value.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description HTTPS - {{ service.value.name }} - {{ port }}
|
||||||
|
check_command check_http!--ssl -H {{ service.value.name }} -p {{ port }} -f sticky
|
||||||
|
host_name {{ host_name }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
|
{% if tag.slug == "nagios-checktcp" %}
|
||||||
|
{% for port in service.value.ports %}
|
||||||
|
define service {
|
||||||
|
use ansible-generic-service
|
||||||
|
service_description TCP {{ service.value.name }} - {{ port }}
|
||||||
|
check_command check_tcp!{{ port }}
|
||||||
|
host_name {{ host_name }}
|
||||||
|
contact_groups ansible
|
||||||
|
}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{# #}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
#!/usr/bin/env ansible-playbook
|
|
||||||
# vim:ft=ansible:
|
|
||||||
- name: restart prometheus container
|
|
||||||
docker_container: name="prometheus" state=started restart=yes
|
|
||||||
become: yes
|
|
||||||
- name: restart blackbox container
|
|
||||||
docker_container: name="prometheus-blackbox" state=started restart=yes
|
|
||||||
become: yes
|
|
@ -1,67 +0,0 @@
|
|||||||
# vim:ft=ansible:
|
|
||||||
- name: deploy prometheus
|
|
||||||
block:
|
|
||||||
- name: ensure prometheus dirs
|
|
||||||
ansible.builtin.file:
|
|
||||||
state: directory
|
|
||||||
owner: 5476
|
|
||||||
group: 5476
|
|
||||||
mode: "0750"
|
|
||||||
path: "{{ item }}"
|
|
||||||
with_items:
|
|
||||||
- /data/prometheus/config
|
|
||||||
- /data/prometheus/data
|
|
||||||
notify: restart prometheus container
|
|
||||||
- name: template out configuration file
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: prometheus.yml.j2
|
|
||||||
owner: 5476
|
|
||||||
group: 5476
|
|
||||||
mode: "0640"
|
|
||||||
dest: /data/prometheus/config/prometheus.yml
|
|
||||||
notify: restart prometheus container
|
|
||||||
- name: docker deploy prometheus
|
|
||||||
community.docker.docker_container:
|
|
||||||
name: prometheus
|
|
||||||
image: prom/prometheus:latest
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
user: 5476:5476
|
|
||||||
env:
|
|
||||||
TZ: "America/Chicago"
|
|
||||||
networks:
|
|
||||||
- name: web
|
|
||||||
aliases: [ "prometheus" ]
|
|
||||||
volumes:
|
|
||||||
- /data/prometheus/config:/etc/prometheus
|
|
||||||
- /data/prometheus/data:/prometheus
|
|
||||||
- name: deploy prometheus blackbox
|
|
||||||
block:
|
|
||||||
- name: ensure blackbox dirs
|
|
||||||
ansible.builtin.file:
|
|
||||||
state: directory
|
|
||||||
owner: 5476
|
|
||||||
group: 5476
|
|
||||||
mode: "0750"
|
|
||||||
path: /data/prometheus/blackbox
|
|
||||||
notify: restart blackbox container
|
|
||||||
- name: template out configuration file
|
|
||||||
ansible.builtin.template:
|
|
||||||
src: blackbox.yml.j2
|
|
||||||
owner: 5476
|
|
||||||
group: 5476
|
|
||||||
mode: "0640"
|
|
||||||
dest: /data/prometheus/blackbox/blackbox.yml
|
|
||||||
notify: restart blackbox container
|
|
||||||
- name: docker deploy prometheus blackbox
|
|
||||||
community.docker.docker_container:
|
|
||||||
name: prometheus-blackbox
|
|
||||||
image: quay.io/prometheus/blackbox-exporter:latest
|
|
||||||
restart_policy: unless-stopped
|
|
||||||
user: 5476:5476
|
|
||||||
command:
|
|
||||||
- '--config.file=/config/blackbox.yml'
|
|
||||||
networks:
|
|
||||||
- name: web
|
|
||||||
aliases: [ "blackbox" ]
|
|
||||||
volumes:
|
|
||||||
- /data/prometheus/blackbox:/config
|
|
@ -1,19 +0,0 @@
|
|||||||
# https://github.com/prometheus/blackbox_exporter/blob/master/CONFIGURATION.md
|
|
||||||
# vim:ft=ansible:
|
|
||||||
modules:
|
|
||||||
http_2xx:
|
|
||||||
prober: http
|
|
||||||
timeout: 5s
|
|
||||||
http:
|
|
||||||
preferred_ip_protocol: "ip4"
|
|
||||||
follow_redirects: true
|
|
||||||
valid_http_versions: ["HTTP/1.1", "HTTP/2.0"]
|
|
||||||
valid_status_codes: [] # Defaults to 2xx
|
|
||||||
method: GET
|
|
||||||
ssh_banner:
|
|
||||||
prober: tcp
|
|
||||||
timeout: 3s
|
|
||||||
tcp:
|
|
||||||
query_response:
|
|
||||||
- expect: "^SSH-2.0-"
|
|
||||||
- send: "SSH-2.0-blackbox-ssh-check"
|
|
@ -1,146 +0,0 @@
|
|||||||
# vim:ft=ansible:
|
|
||||||
---
|
|
||||||
global:
|
|
||||||
scrape_interval: 15s
|
|
||||||
evaluation_interval: 15s
|
|
||||||
|
|
||||||
scrape_configs:
|
|
||||||
# Default Prometheus job to monitor itself
|
|
||||||
- job_name: "prometheus"
|
|
||||||
static_configs:
|
|
||||||
- targets: ['localhost:9090']
|
|
||||||
|
|
||||||
# This is shipped by the Ansible role that deploys Prometheus
|
|
||||||
- job_name: "blackbox-ssh"
|
|
||||||
metrics_path: /probe
|
|
||||||
params:
|
|
||||||
module: [ssh_banner]
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios'] %}
|
|
||||||
- "{{ host }}:22"
|
|
||||||
{% endfor %}
|
|
||||||
relabel_configs:
|
|
||||||
- source_labels: [__address__]
|
|
||||||
target_label: __param_target
|
|
||||||
- source_labels: [__param_target]
|
|
||||||
target_label: instance
|
|
||||||
- target_label: __address__
|
|
||||||
replacement: blackbox:9115
|
|
||||||
- job_name: "blackbox-http"
|
|
||||||
metrics_path: /probe
|
|
||||||
params:
|
|
||||||
module: [http_2xx]
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios'] %}
|
|
||||||
{% set vars = hostvars[host] %}
|
|
||||||
{% for service in vars.services %}
|
|
||||||
{% for tag in service.tags %}
|
|
||||||
{# #}
|
|
||||||
{% if tag.slug == "nagios-checkhttps" %}
|
|
||||||
{% for port in service.ports %}
|
|
||||||
- "https://{{ service.name }}:{{ port }}"
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{# #}
|
|
||||||
{% if tag.slug == "nagios-checkmatrix" %}
|
|
||||||
{% for port in service.ports %}
|
|
||||||
- "https://{{ service.name }}:{{ port }}/health"
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{# #}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
relabel_configs:
|
|
||||||
- source_labels: [__address__]
|
|
||||||
target_label: __param_target
|
|
||||||
- source_labels: [__param_target]
|
|
||||||
target_label: instance
|
|
||||||
- target_label: __address__
|
|
||||||
replacement: blackbox:9115
|
|
||||||
- job_name: "blackbox-exporter"
|
|
||||||
static_configs:
|
|
||||||
- targets: ['blackbox:9115']
|
|
||||||
|
|
||||||
# This job manages Minecraft servers, when we have them
|
|
||||||
- job_name: "minecraft"
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios'] %}
|
|
||||||
{% set vars = hostvars[host] %}
|
|
||||||
{% for service in vars.services %}
|
|
||||||
{% for tag in service.tags %}
|
|
||||||
{# #}
|
|
||||||
{% if tag.slug == "nagios-checkminecraft" %}
|
|
||||||
{% for port in service.ports %}
|
|
||||||
- "{{ host }}:{{ port }}"
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{# #}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
# This job takes in information from Netbox on the generic "prom-metrics" tag
|
|
||||||
# It's useful for all sorts of stuff
|
|
||||||
- job_name: "generic"
|
|
||||||
scheme: "https"
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios'] %}
|
|
||||||
{% set vars = hostvars[host] %}
|
|
||||||
{% for service in vars.services %}
|
|
||||||
{% for tag in service.tags %}
|
|
||||||
{# #}
|
|
||||||
{% if tag.slug == "prom-metrics" %}
|
|
||||||
{% for port in service.ports %}
|
|
||||||
- "{{ service.name }}:{{ port }}"
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{# #}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
# This one does the same thing but for HTTP-only clients
|
|
||||||
- job_name: "generic-http"
|
|
||||||
scheme: "http"
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios'] %}
|
|
||||||
{% set vars = hostvars[host] %}
|
|
||||||
{% for service in vars.services %}
|
|
||||||
{% for tag in service.tags %}
|
|
||||||
{# #}
|
|
||||||
{% if tag.slug == "prom-metrics-http" %}
|
|
||||||
{% for port in service.ports %}
|
|
||||||
- "{{ service.name }}:{{ port }}"
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
{# #}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
# These two jobs are included for every node in our inventory
|
|
||||||
- job_name: "node-exporter"
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios'] %}
|
|
||||||
- '{{ host }}:9100'
|
|
||||||
{% endfor %}
|
|
||||||
- job_name: "cadvisor-exporter"
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios'] %}
|
|
||||||
- '{{ host }}:9101'
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
# This container is used on every psql machine
|
|
||||||
- job_name: "psql-exporter"
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
{% for host in groups['tags_nagios-checkpgsql'] %}
|
|
||||||
- '{{ host }}:9102'
|
|
||||||
{% endfor %}
|
|
Loading…
Reference in New Issue
Block a user