refactor: upgrade ansible, remove clean_desired flag, add separate clean playbook

This commit is contained in:
Yuri Tatishchev 2025-01-05 00:23:23 -08:00
parent 73c370b360
commit c0103496a1
Signed by: CaZzzer
GPG Key ID: E0EBF441EA424369
22 changed files with 73 additions and 75 deletions

View File

@ -1,19 +1,23 @@
.POSIX:
.PHONY: *
.EXPORT_ALL_VARIABLES:
MAKEFLAGS += -r # no use of built-in rules
env ?= staging
vault_id ?= alpina@contrib/rbw-client.sh
clean_desired ?= false
playbook_cmd := poetry run ansible-playbook --vault-id ${vault_id} -i inventories/${env}
all: site
all: site services
setup:
poetry install --quiet
site: setup
poetry run ansible-playbook --vault-id ${vault_id} -i inventories/${env} --extra-vars "clean_desired_arg=${clean_desired}" site.yml
$(playbook_cmd) site.yml
services: setup
poetry run ansible-playbook --vault-id ${vault_id} -i inventories/${env} services.yml
$(playbook_cmd) services.yml
clean: setup
$(playbook_cmd) clean.yml

3
clean.yml Normal file
View File

@ -0,0 +1,3 @@
- hosts: alpina
roles:
- clean

View File

@ -5,6 +5,12 @@ alpina_svc_path: ~/alpina
base_volume_path: /mnt/dock
media_volume_path: /mnt/media
docker_ipv6_subnet: "{{ \
ansible_default_ipv6.address \
| ansible.utils.ipsubnet(64) \
| ansible.utils.ipsubnet(72, docker_ipv6_index) \
}}"
# Authentik
authentik_db_password: "{{ vault_authentik_db_password }}"
authentik_secret_key: "{{ vault_authentik_secret_key }}"

View File

@ -1 +0,0 @@
domain: cazzzer.com

View File

@ -1,6 +1,8 @@
# Environment specific variables (prod)
---
domain: cazzzer.com
docker_ipv6_index: 255
# Arrstack VPN

View File

@ -1 +0,0 @@
domain: lab.cazzzer.com

View File

@ -1,6 +1,8 @@
# Environment specific variables (staging)
---
domain: lab.cazzzer.com
docker_ipv6_index: 254
# Arrstack VPN

36
poetry.lock generated
View File

@ -1,28 +1,28 @@
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "ansible"
version = "10.7.0"
version = "11.1.0"
description = "Radically simple IT automation"
optional = false
python-versions = ">=3.10"
python-versions = ">=3.11"
files = [
{file = "ansible-10.7.0-py3-none-any.whl", hash = "sha256:0089f08e047ceb70edd011be009f5c6273add613fbe491e9697c0556c989d8ea"},
{file = "ansible-10.7.0.tar.gz", hash = "sha256:59d29e3de1080e740dfa974517d455217601b16d16880314d9be26145c68dc22"},
{file = "ansible-11.1.0-py3-none-any.whl", hash = "sha256:bbaf7073993f019fc0293fc8b76c7b215081831957c28eb020f12c270a16e8f0"},
{file = "ansible-11.1.0.tar.gz", hash = "sha256:d01b425990d960d2a33fc378e1b73dbca1c0e28bc22f4056ab6b3c8e9ae74fba"},
]
[package.dependencies]
ansible-core = ">=2.17.7,<2.18.0"
ansible-core = ">=2.18.1,<2.19.0"
[[package]]
name = "ansible-core"
version = "2.17.7"
version = "2.18.1"
description = "Radically simple IT automation"
optional = false
python-versions = ">=3.10"
python-versions = ">=3.11"
files = [
{file = "ansible_core-2.17.7-py3-none-any.whl", hash = "sha256:64d4f0a006687a5621aa80dca54fd0c5ae75145b7aac8c1b8d7f07a1399c4705"},
{file = "ansible_core-2.17.7.tar.gz", hash = "sha256:3aaab735d6c4e2d6239bc326800dc0ecda2a1490caa8455b41084ec0bc54dacf"},
{file = "ansible_core-2.18.1-py3-none-any.whl", hash = "sha256:4a312e416e09c7271188d6b8e2b1062fc6834fefd6a1814d0e02fb8aadb3e1ba"},
{file = "ansible_core-2.18.1.tar.gz", hash = "sha256:14cac1f92bbdae881cb0616eddeb17925e8cb507e486087975e724533d9de74f"},
]
[package.dependencies]
@ -418,25 +418,25 @@ test = ["commentjson", "packaging", "pytest"]
[[package]]
name = "setuptools"
version = "75.6.0"
version = "75.7.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.9"
files = [
{file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"},
{file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"},
{file = "setuptools-75.7.0-py3-none-any.whl", hash = "sha256:84fb203f278ebcf5cd08f97d3fb96d3fbed4b629d500b29ad60d11e00769b183"},
{file = "setuptools-75.7.0.tar.gz", hash = "sha256:886ff7b16cd342f1d1defc16fc98c9ce3fde69e087a4e1983d7ab634e5f41f4f"},
]
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "334448cb0c7d192f0e10987a995ecefca5e136733cce4dd15dcc2238f1c371c8"
python-versions = "^3.11"
content-hash = "7c5b28e1b7fc5cf1c55fedf89a01f26e9246b9d1baa1441d51a8693697b6767a"

View File

@ -6,8 +6,8 @@ authors = ["Iurii Tatishchev <itatishch@gmail.com>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.10"
ansible = "^10.1.0"
python = "^3.11"
ansible = "^11.1.0"
ansible-vault = "^2.1.0"
netaddr = "^1.3.0"

View File

@ -9,6 +9,8 @@ services:
container_name: gluetun
cap_add:
- NET_ADMIN
devices:
- /dev/net/tun:/dev/net/tun
sysctls:
- net.ipv6.conf.all.disable_ipv6=0
env_file:

View File

@ -15,7 +15,8 @@ services:
restart: unless-stopped
# Port forward is needed because traefik can't resolve the container name from the host network
ports:
- "9000:9000"
- "127.0.0.1:9000:9000"
- "[::1]:9000:9000"
command: server
env_file:
- .env.authentik

View File

@ -31,7 +31,8 @@ services:
- -config.file=/etc/loki/loki-config.yaml
# Port forward is needed because not possible to resolve the container name from the host network
ports:
- 3100:3100
- "127.0.0.1:3100:3100"
- "[::1]:3100:3100"
volumes:
- {{ base_volume_path }}/monitoring/loki:/loki
- ./loki_config:/etc/loki:ro

View File

@ -0,0 +1,22 @@
- name: Get list of running Docker containers
docker_host_info:
containers: yes
register: docker_container_list
- name: Stop all running Docker containers
docker_container:
name: "{{ item }}"
state: stopped
loop: "{{ docker_container_list.containers | map(attribute='Id') | list }}"
async: 300
poll: 0
- name: Prune all Docker containers and networks
docker_prune:
containers: yes
networks: yes
- name: Clean alpina directory
file:
path: "{{ alpina_svc_path }}"
state: absent

View File

@ -1,12 +1,5 @@
- name: Get IPv6 subnet for Docker
set_fact:
docker_ipv6_subnet: "{{ \
ansible_default_ipv6.address \
| ansible.utils.ipsubnet(64) \
| ansible.utils.ipsubnet(72, docker_ipv6_index) \
}}"
- debug:
- name: IPv6 subnet for Docker
debug:
var: docker_ipv6_subnet
- name: Configure Docker daemon
@ -35,33 +28,6 @@
state: disabled
register: docker0_firewalld
- name: Get list of running Docker containers
docker_host_info:
containers: yes
register: docker_container_list
when: clean_desired is true
- name: Stop all running Docker containers
docker_container:
name: "{{ item }}"
state: stopped
loop: "{{ docker_container_list.containers | map(attribute='Id') | list }}"
async: 300
poll: 0
when: clean_desired is true and docker_container_list.containers | length > 0
- name: Prune all Docker containers and networks
docker_prune:
containers: yes
networks: yes
when: clean_desired is true
- name: Clean alpina directory
file:
path: "{{ alpina_svc_path }}"
state: absent
when: clean_desired is true
- name: Restart Docker daemon
become: yes
service:

View File

@ -1,6 +1,5 @@
- hosts: alpina
roles:
- docker_host
- alpina
post_tasks:
- name: Docker prune objects

View File

@ -1,12 +1,4 @@
- hosts: all
- hosts: alpina
roles:
- common
pre_tasks:
- name: Set fact for clean desired of docker objects and compose files
set_fact:
# clean_desired_arg is an extra variable passed to the playbook
clean_desired: "{{ clean_desired_arg | bool }}"
- name: Install services
import_playbook: services.yml
- docker_host