Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • infrastruktur/ansible-warpzone
  • specki/ansible-warpzone
2 results
Show changes
Showing
with 832 additions and 123 deletions
...@@ -6,15 +6,15 @@ motd_lines: ...@@ -6,15 +6,15 @@ motd_lines:
- "IPs: {{ansible_bond0.ipv4.address}}" - "IPs: {{ansible_bond0.ipv4.address}}"
debian_sources: debian_sources:
- "deb http://deb.debian.org/debian/ bullseye main non-free contrib" - "deb http://deb.debian.org/debian/ bookworm main non-free contrib"
- "deb http://security.debian.org/debian-security bullseye-security main contrib non-free" - "deb http://security.debian.org/debian-security bookworm-security main contrib non-free"
- "deb http://deb.debian.org/debian/ bullseye-updates main contrib non-free" - "deb http://deb.debian.org/debian/ bookworm-updates main contrib non-free"
- "deb http://download.proxmox.com/debian/pve bullseye pve-no-subscription" - "deb http://download.proxmox.com/debian/pve bookworm pve-no-subscription"
debian_keys_id: debian_keys_id:
debian_keys_url: debian_keys_url:
- "https://enterprise.proxmox.com/debian/proxmox-release-bullseye.gpg" - "https://enterprise.proxmox.com/debian/proxmox-release-bookworm.gpg"
# Art des Hosts: physical, vm, docker # Art des Hosts: physical, vm, docker
...@@ -32,8 +32,8 @@ administratorenteam: ...@@ -32,8 +32,8 @@ administratorenteam:
# Monitoring aktivieren # Monitoring aktivieren
alert: alert:
load: load:
warn: 4 warn: 20
crit: 8 crit: 40
disks: disks:
- { mountpoint: "/", warn: "10 GB", crit: "3 GB" } - { mountpoint: "/", warn: "10 GB", crit: "3 GB" }
# btrfs currently no data from node exporter # btrfs currently no data from node exporter
......
...@@ -6,10 +6,10 @@ motd_lines: ...@@ -6,10 +6,10 @@ motd_lines:
- "Öffentliche IPs: {{ansible_ens18.ipv4.address}} / {{ansible_ens18.ipv6[0].address}}" - "Öffentliche IPs: {{ansible_ens18.ipv4.address}} / {{ansible_ens18.ipv6[0].address}}"
debian_sources: debian_sources:
- "deb http://ftp2.de.debian.org/debian/ bullseye main contrib non-free" - "deb http://ftp2.de.debian.org/debian/ bookworm main contrib non-free"
- "deb http://ftp.debian.org/debian bullseye-updates main contrib non-free" - "deb http://ftp.debian.org/debian bookworm-updates main contrib non-free"
- "deb http://security.debian.org/ bullseye-security main contrib non-free" - "deb http://security.debian.org/ bookworm-security main contrib non-free"
- "deb https://download.docker.com/linux/debian bullseye stable" - "deb https://download.docker.com/linux/debian bookworm stable"
debian_keys_id: debian_keys_id:
...@@ -68,8 +68,8 @@ docker: ...@@ -68,8 +68,8 @@ docker:
# Monitoring aktivieren # Monitoring aktivieren
alert: alert:
load: load:
warn: 8 warn: 5
crit: 16 crit: 10
containers: containers:
- { name: "coturn_coturn_1" } - { name: "coturn_coturn_1" }
- { name: "dockerstats_app_1" } - { name: "dockerstats_app_1" }
...@@ -103,6 +103,7 @@ alert: ...@@ -103,6 +103,7 @@ alert:
- { name: "mail_mailman-nginx_1" } - { name: "mail_mailman-nginx_1" }
- { name: "matrix_ma1sd_1" } - { name: "matrix_ma1sd_1" }
- { name: "matrix_db_1" } - { name: "matrix_db_1" }
- { name: "matrix_purgemediacache_1" }
- { name: "matrix_synapse_1" } - { name: "matrix_synapse_1" }
- { name: "matterbridge_cw_1" } - { name: "matterbridge_cw_1" }
- { name: "matterbridge_wz_1" } - { name: "matterbridge_wz_1" }
...@@ -120,7 +121,7 @@ alert: ...@@ -120,7 +121,7 @@ alert:
- { name: "workadventure_redis_1" } - { name: "workadventure_redis_1" }
disks: disks:
- { mountpoint: "/", warn: "5 GB", crit: "1 GB" } - { mountpoint: "/", warn: "5 GB", crit: "1 GB" }
- { mountpoint: "/srv", warn: "1 GB", crit: "500 MB" } - { mountpoint: "/srv", warn: "5 GB", crit: "1 GB" }
# Definition von Borgbackup Repositories # Definition von Borgbackup Repositories
......
# Nameskonvention für Server: Pratchett Name/Charaktere
# Namensliste: https://wiki.lspace.org/List_of_Pratchett_characters
# Nächste freie Namen: vimes, cake, colon, detritus, dibbler, dorfl, gaspode, quirm, cherry, nobby, ramkin, ron, shoe, slant, angua, vetinary, bursar, coin, dean, worblehat, luggage. mustrum, rincewind, wrangler, stibbons, whitlow
[test]
[prod]
# Interner Proxmox-Server
# Für Verbindungen über den Webserver als Jumphost folgende Parameter ergänzen:
# ansible_ssh_common_args='-o ForwardAgent=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ProxyCommand="ssh -W %h:%p -q 159.69.57.51"'
weatherwax ansible_ssh_host=192.168.0.200
# Server für interne Dienste
# Container auf dem internen Proxmox Server
# Wichtige Optionen: Nesting = Yes, keyctl = enabled
ogg ansible_ssh_host=192.168.0.201
# Server für VPN Verbindung zum Webserver
# Container auf dem internen Proxmox Server
# Wichtige Optionen: Nesting = Yes, keyctl = enabled
carrot ansible_ssh_host=192.168.0.202
# Externe Server Warpzone
# Öffentlicher Root Server Warpzone bei Hetzner
tiffany ansible_ssh_host=159.69.57.15
# Öffentlicher Webserver Warpzone
# VM auf Tiffany
webserver ansible_ssh_host=159.69.57.51
# Vorstands-VM
# VM auf Tiffany
# Auch erreichbar unter verwaltung.warpzone.ms
verwaltung ansible_ssh_host=195.201.179.60
# Physischer Server für Veranstaltungen / Camps
# warpzone.remote Proxmox-Server
hex ansible_ssh_host=10.111.10.100
# Virtueller Server für Infrastruktur-Dienste auf Veranstaltungen / Camps
# Container auf dem warpzone.remote Proxmox-Server
# Wichtige Optionen: Nesting = Yes, keyctl = enabled
hix ansible_ssh_host=10.111.10.101
...@@ -12,11 +12,13 @@ prod: ...@@ -12,11 +12,13 @@ prod:
# ansible_ssh_common_args='-o ForwardAgent=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ProxyCommand="ssh -W %h:%p -q 159.69.57.51"' # ansible_ssh_common_args='-o ForwardAgent=yes -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o ProxyCommand="ssh -W %h:%p -q 159.69.57.51"'
weatherwax: weatherwax:
ansible_ssh_host: 192.168.0.200 ansible_ssh_host: 192.168.0.200
ansible_user: root
# Externe Server Warpzone # Externe Server Warpzone
# Öffentlicher Root Server Warpzone bei Hetzner # Öffentlicher Root Server Warpzone bei Hetzner
tiffany: tiffany:
ansible_ssh_host: 159.69.57.15 ansible_ssh_host: 159.69.57.15
ansible_user: root
vms: vms:
children: children:
...@@ -27,12 +29,14 @@ prod: ...@@ -27,12 +29,14 @@ prod:
# Wichtige Optionen: Nesting = Yes, keyctl = enabled # Wichtige Optionen: Nesting = Yes, keyctl = enabled
ogg: ogg:
ansible_ssh_host: 192.168.0.201 ansible_ssh_host: 192.168.0.201
ansible_user: root
# Server für VPN Verbindung zum Webserver # Server für VPN Verbindung zum Webserver
# Container auf dem internen Proxmox Server # Container auf dem internen Proxmox Server
# Wichtige Optionen: Nesting = Yes, keyctl = enabled # Wichtige Optionen: Nesting = Yes, keyctl = enabled
carrot: carrot:
ansible_ssh_host: 192.168.0.202 ansible_ssh_host: 192.168.0.202
ansible_user: root
tiffany-vms: tiffany-vms:
hosts: hosts:
...@@ -40,12 +44,14 @@ prod: ...@@ -40,12 +44,14 @@ prod:
# VM auf Tiffany # VM auf Tiffany
webserver: webserver:
ansible_ssh_host: 159.69.57.51 ansible_ssh_host: 159.69.57.51
ansible_user: root
# Vorstands-VM # Vorstands-VM
# VM auf Tiffany # VM auf Tiffany
# Auch erreichbar unter verwaltung.warpzone.ms # Auch erreichbar unter verwaltung.warpzone.ms
verwaltung: verwaltung:
ansible_ssh_host: 195.201.179.60 ansible_ssh_host: 195.201.179.60
ansible_user: root
event: event:
children: children:
...@@ -55,6 +61,7 @@ event: ...@@ -55,6 +61,7 @@ event:
# warpzone.remote Proxmox-Server # warpzone.remote Proxmox-Server
hex: hex:
ansible_ssh_host: 10.111.10.100 ansible_ssh_host: 10.111.10.100
ansible_user: root
vms: vms:
hosts: hosts:
...@@ -63,3 +70,4 @@ event: ...@@ -63,3 +70,4 @@ event:
# Wichtige Optionen: Nesting = Yes, keyctl = enabled # Wichtige Optionen: Nesting = Yes, keyctl = enabled
hix: hix:
ansible_ssh_host: 10.111.10.101 ansible_ssh_host: 10.111.10.101
ansible_user: root
\ No newline at end of file
...@@ -19,6 +19,10 @@ ...@@ -19,6 +19,10 @@
payload: "CLOSED" payload: "CLOSED"
then: then:
- switch.turn_off: relay - switch.turn_off: relay
- topic: warpzone/door/status_once
payload: "OPEN"
then:
- switch.turn_on: relay
status_led: status_led:
pin: pin:
......
...@@ -19,6 +19,10 @@ ...@@ -19,6 +19,10 @@
payload: "CLOSED" payload: "CLOSED"
then: then:
- switch.turn_off: relay - switch.turn_off: relay
- topic: warpzone/door/status_once
payload: "OPEN"
then:
- switch.turn_on: relay
status_led: status_led:
pin: pin:
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
{% include "/includes/ansible.inc.yaml" %} {% include "/includes/ansible.inc.yaml" %}
{% include "/includes/board.esp01_1m.inc.yaml" %} {% include "/includes/board.esp01_1m.inc.yaml" %}
comment: Halogen Lounge comment: frei
{% include "/includes/common.inc.yaml" %} {% include "/includes/common.inc.yaml" %}
on_message: on_message:
......
...@@ -19,6 +19,10 @@ ...@@ -19,6 +19,10 @@
payload: "CLOSED" payload: "CLOSED"
then: then:
- switch.turn_off: relay - switch.turn_off: relay
- topic: warpzone/door/status_once
payload: "OPEN"
then:
- switch.turn_on: relay
substitutions: substitutions:
plug_name: {{ devicename }} plug_name: {{ devicename }}
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
{% include "/includes/ansible.inc.yaml" %} {% include "/includes/ansible.inc.yaml" %}
{% include "/includes/board.esp01_1m.inc.yaml" %} {% include "/includes/board.esp01_1m.inc.yaml" %}
comment: 3D Drucker Prusanet comment: WLED Vorhang
{% include "/includes/common.inc.yaml" %} {% include "/includes/common.inc.yaml" %}
on_message: on_message:
...@@ -15,10 +15,10 @@ ...@@ -15,10 +15,10 @@
payload: "OFF" payload: "OFF"
then: then:
- switch.turn_off: relay - switch.turn_off: relay
- topic: warpzone/door/status - topic: warpzone/door/status_once
payload: "CLOSED" payload: "OPEN"
then: then:
- switch.turn_off: relay - switch.turn_on: relay
substitutions: substitutions:
plug_name: {{ devicename }} plug_name: {{ devicename }}
......
...@@ -135,7 +135,7 @@ climate: ...@@ -135,7 +135,7 @@ climate:
id: pid_heater id: pid_heater
name: "PID Heater Controller" name: "PID Heater Controller"
sensor: pot_sensor sensor: pot_sensor
default_target_temperature: 240 default_target_temperature: 220
heat_output: pot_heater heat_output: pot_heater
control_parameters: control_parameters:
kp: 0.09549 kp: 0.09549
......
...@@ -4,7 +4,7 @@ services: ...@@ -4,7 +4,7 @@ services:
app: app:
image: grafana/grafana:9.4.3 image: grafana/grafana:9.5.6
restart: always restart: always
volumes: volumes:
- "{{ basedir }}/grafana.ini:/etc/grafana/grafana.ini" - "{{ basedir }}/grafana.ini:/etc/grafana/grafana.ini"
......
version: "3"
services:
mongodb:
image: "mongo:6.0.4"
restart: "always"
opensearch:
image: "opensearchproject/opensearch:2.6.0"
environment:
- "TZ=Europe/Berlin"
- "OPENSEARCH_JAVA_OPTS=-Xms1g -Xmx1g"
- "bootstrap.memory_lock=true"
- "discovery.type=single-node"
- "action.auto_create_index=false"
- "plugins.security.ssl.http.enabled=false"
- "plugins.security.disabled=true"
restart: "always"
graylog:
hostname: "server"
image: "graylog/graylog:5.0.5"
entrypoint: "/usr/bin/tini -- wait-for-it opensearch:9200 -- /docker-entrypoint.sh"
environment:
TZ: "Europe/Berlin"
GRAYLOG_NODE_ID_FILE: "/usr/share/graylog/data/config/node-id"
GRAYLOG_PASSWORD_SECRET: "warpzonewarpzone"
GRAYLOG_ROOT_PASSWORD_SHA2: "26230bc6e5e044e6e3cef7c76a2800fdf2d3952ef03e85c83491b99eef149c40"
GRAYLOG_HTTP_BIND_ADDRESS: "0.0.0.0:9000"
GRAYLOG_HTTP_EXTERNAL_URI: "http://graylog.warpzone.lan/"
GRAYLOG_ELASTICSEARCH_HOSTS: "http://opensearch:9200"
GRAYLOG_MONGODB_URI: "mongodb://mongodb:27017/graylog"
ports:
- "514:5140/udp" # Syslog
- "514:5140/tcp" # Syslog
restart: "always"
labels:
- traefik.enable=true
- traefik.http.routers.{{ servicename }}.rule=Host(`{{ domain }}`)
- traefik.http.routers.{{ servicename }}.entrypoints=websecure
- traefik.http.services.{{ servicename }}.loadbalancer.server.port=9000
networks:
- web
- default
networks:
web:
external: true
--- ---
- include: ../functions/get_secret.yml - include_tasks: ../functions/get_secret.yml
with_items: with_items:
- { path: "{{ basedir }}/warpai_status_update_key", length: -1 } - { path: "{{ basedir }}/warpai_status_update_key", length: -1 }
- { path: "{{ basedir }}/homeassistant_admin_password", length: 12 } - { path: "{{ basedir }}/homeassistant_admin_password", length: 12 }
......
...@@ -5,8 +5,10 @@ services: ...@@ -5,8 +5,10 @@ services:
app: app:
image: homeassistant/home-assistant:2023.3 image: homeassistant/home-assistant:2023.6
restart: always restart: always
ports:
- "{{ int_ip4 }}:{{ homematic_callback_port }}:{{ homematic_callback_port }}"
volumes: volumes:
- "/etc/localtime:/etc/localtime:ro" - "/etc/localtime:/etc/localtime:ro"
- "{{ basedir }}/config:/config" - "{{ basedir }}/config:/config"
...@@ -24,7 +26,7 @@ services: ...@@ -24,7 +26,7 @@ services:
influxdb: influxdb:
image: influxdb:2.6.1 image: influxdb:2.7.1
restart: always restart: always
ports: ports:
- "{{ int_ip4 }}:{{ influxdb_port }}:8086" - "{{ int_ip4 }}:{{ influxdb_port }}:8086"
......
--- ---
- include: ../functions/get_secret.yml - include_tasks: ../functions/get_secret.yml
with_items: with_items:
- { path: "{{ basedir }}/influxdb_password", length: 12 } - { path: "{{ basedir }}/influxdb_password", length: 12 }
- { path: "{{ basedir }}/influxdb_token", length: 24 } - { path: "{{ basedir }}/influxdb_token", length: 24 }
......
...@@ -9,7 +9,7 @@ version: "3" ...@@ -9,7 +9,7 @@ version: "3"
services: services:
app: app:
image: nodered/node-red:2.2.3 image: nodered/node-red:3.0.2
restart: always restart: always
volumes: volumes:
- "{{ basedir }}/data:/data" - "{{ basedir }}/data:/data"
......
--- ---
- include: ../functions/get_secret.yml - include_tasks: ../functions/get_secret.yml
with_items: with_items:
- { path: "{{ basedir }}/homeassistant_admin_password", length: 12 } - { path: "{{ basedir }}/homeassistant_admin_password", length: 12 }
- { path: "{{ basedir }}/influxdb_password", length: 12 } - { path: "{{ basedir }}/influxdb_password", length: 12 }
......
...@@ -5,7 +5,7 @@ services: ...@@ -5,7 +5,7 @@ services:
app: app:
image: homeassistant/home-assistant:2023.3.3 image: homeassistant/home-assistant:2023.6.1
restart: always restart: always
volumes: volumes:
- "/etc/localtime:/etc/localtime:ro" - "/etc/localtime:/etc/localtime:ro"
...@@ -24,7 +24,7 @@ services: ...@@ -24,7 +24,7 @@ services:
influxdb: influxdb:
image: influxdb:2.6.0 image: influxdb:2.7.1
restart: always restart: always
ports: ports:
- "{{ int_ip4 }}:{{ influxdb_port }}:8086" - "{{ int_ip4 }}:{{ influxdb_port }}:8086"
......
...@@ -5,7 +5,7 @@ services: ...@@ -5,7 +5,7 @@ services:
app: app:
image: prom/prometheus:v2.42.0 image: prom/prometheus:v2.45.0
restart: always restart: always
ports: ports:
- 9090:9090 - 9090:9090
......