refactor: move to Debian on truenas scale

This commit is contained in:
Iurii Tatishchev 2023-03-25 04:28:41 -07:00
parent a6c370b85a
commit ed426593d4
Signed by: CaZzzer
GPG Key ID: 9A156B7DA6398968
26 changed files with 251 additions and 158 deletions

2
.idea/alpina.iml generated
View File

@ -26,6 +26,8 @@
<option value="$MODULE_DIR$/roles/traefik/templates" />
<option value="$MODULE_DIR$/roles/gitea/templates" />
<option value="$MODULE_DIR$/roles/nextcloud/templates" />
<option value="$MODULE_DIR$/roles/arrstack/templates" />
<option value="$MODULE_DIR$/roles/jellyfin/templates" />
</list>
</option>
</component>

16
.idea/jsonSchemas.xml generated
View File

@ -37,6 +37,22 @@
</SchemaInfo>
</value>
</entry>
<entry key="docker-compose.yml">
<value>
<SchemaInfo>
<option name="name" value="docker-compose.yml" />
<option name="relativePathToSchema" value="https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json" />
<option name="applicationDefined" value="true" />
<option name="patterns">
<list>
<Item>
<option name="path" value="roles/traefik/templates/docker-compose.yml.j2" />
</Item>
</list>
</option>
</SchemaInfo>
</value>
</entry>
</map>
</state>
</component>

View File

@ -6,7 +6,7 @@ vault_id ?= alpina@contrib/rbw-client.sh
all: site
setup:
poetry install
poetry install --quiet
site: setup
poetry run ansible-playbook --vault-id ${vault_id} -i inventories/${env} site.yml

View File

@ -1,3 +1,4 @@
# Alpina
A home for configuring all of my homelab containers on an Alpine Linux machine.
A home for configuring all of my homelab containers on a Debian Linux machine.
This assumes a Debian Linux machine with Docker and Docker Compose installed.

View File

@ -4,13 +4,13 @@
file:
path: "{{ current_svc_path }}"
state: directory
mode: "500"
mode: "700"
- name: Ensure directory structure exists
file:
path: "{{ current_svc_path }}/{{ item.path }}"
state: directory
mode: "500"
mode: "700"
with_community.general.filetree: "{{ templates_source }}"
when: item.state == "directory"
@ -23,7 +23,7 @@
template:
src: "{{ item.src }}"
dest: "{{ current_svc_path }}/{{ item.path | regex_replace('\\.j2$', '') }}"
mode: "400"
mode: "600"
with_community.general.filetree: "{{ templates_source }}"
when: item.state == "file"

View File

@ -1 +1,10 @@
domain: cazzzer.com
wg_privkey: !vault |
$ANSIBLE_VAULT;1.2;AES256;alpina
61346533346138643038616365373264333063626539316266326164353935666464346534643433
3634353332373937323464346634643639623039366163350a666161323932633866633264303034
32303833613236316463643066363565333536323833373562343832333435303732626264353337
3831353935663865390a383335333133613039386237653665653663346666626666616439323530
33626333383830383430313765386439323738336336333234303738383837356135353635366365
3066313962653537376430613963316132613663356665316238

View File

@ -1,2 +1,2 @@
[docker_hosts]
root@alpina.lab.home
debbi.lab.home

View File

@ -1 +1,10 @@
domain: lab.cazzzer.com
wg_privkey: !vault |
$ANSIBLE_VAULT;1.2;AES256;alpina
66323965396438656630376232373462616536303233663163373933306261396634623164653536
3964323735386530303932616135346461353036393635350a353434303730633265343035623434
35323064373733373436383939386335306463316634363436396264313432363961353766633930
3662633131636332620a313334396161386230303936646566363162643831393965376563386432
37613538613466353266666566373836663037363139316463313335633335633536613232323062
3765366135356362326138313636646263646235656333386132

View File

@ -1,2 +1,2 @@
[docker_hosts]
root@etapp.lab.home
etappi.lab.home

40
poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
[[package]]
name = "ansible"
@ -34,6 +34,25 @@ packaging = "*"
PyYAML = ">=5.1"
resolvelib = ">=0.5.3,<0.9.0"
[[package]]
name = "ansible-vault"
version = "2.1.0"
description = "R/W an ansible-vault yaml file"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "ansible-vault-2.1.0.tar.gz", hash = "sha256:5ce8fdb5470f1449b76bf07ae2abc56480dad48356ae405c85b686efb64dbd5e"},
]
[package.dependencies]
ansible = "*"
setuptools = "*"
[package.extras]
dev = ["black", "flake8", "isort[pyproject]", "pytest"]
release = ["twine"]
[[package]]
name = "cffi"
version = "1.15.1"
@ -327,7 +346,24 @@ lint = ["black", "flake8", "isort", "mypy", "types-requests"]
release = ["build", "towncrier", "twine"]
test = ["commentjson", "packaging", "pytest"]
[[package]]
name = "setuptools"
version = "67.6.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"},
{file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "fdbbbb1f3c9578e994fc064c9036557f370fe659bb84e4a5cf5ad29247968141"
content-hash = "4c3656f66006d184debf3777b8df073898df0eb1f53611cdd47ec4c543071595"

View File

@ -8,6 +8,7 @@ readme = "README.md"
[tool.poetry.dependencies]
python = "^3.10"
ansible = "^7.3.0"
ansible-vault = "^2.1.0"
[build-system]

View File

@ -0,0 +1,23 @@
#!/usr/bin/env bash
apk add --no-cache wireguard-tools-wg
local_gateway=$(ip route | grep default | awk '{print $3}')
# This used as the gateway address for NAT-PMP to work properly
wg_gateway="{{ wg_dns }}"
wg_peer_address=$(echo "{{ wg_peer_endpoint }}" | cut -d: -f1)
ip route add "$wg_peer_address" via "$local_gateway"
ip link add wg0 type wireguard
wg setconf wg0 /etc/wireguard/wg0.conf
ip address add dev wg0 "{{ wg_address }}"
ip link set wg0 up
ip route add "$wg_gateway" dev wg0
ip route del default
ip route add default via "$wg_gateway"
# Note that the DNS isn't changed, so there's actually a leak there
# That's on purpose, just in case I want to access local jackett from qbit
# Finally, optionally allow access to the home network
# ip route add "\{\{ home_network }}" via "$local_gateway"

View File

@ -0,0 +1,82 @@
version: "3.7"
networks:
default:
traefik_traefik:
external: true
services:
qbittorrent:
image: linuxserver/qbittorrent:latest
container_name: qbittorrent
cap_add:
- NET_ADMIN
labels:
- traefik.enable=true
- traefik.http.routers.qbittorrent.rule=Host(`qbit.{{ domain }}`)
- traefik.http.services.qbittorrent.loadbalancer.server.port=8080
restart: unless-stopped
networks:
- default
- traefik_traefik
volumes:
- ./wireguard:/etc/wireguard:ro
- ./custom-init:/custom-cont-init.d:ro
- {{ base_volume_path }}/arrstack/config/qbittorrent:/config
- {{ base_volume_path }}/arrstack/downloads:/downloads
- {{ media_volume_path }}/Plex:/media/Plex
- {{ media_volume_path }}/iso-img:/media/iso-img
jackett:
image: linuxserver/jackett:latest
container_name: jackett
labels:
- traefik.enable=true
- traefik.http.routers.jackett.rule=Host(`jackett.{{ domain }}`)
- traefik.http.services.jackett.loadbalancer.server.port=9117
restart: unless-stopped
networks:
- default
- traefik_traefik
volumes:
- {{ base_volume_path }}/arrstack/config/jackett:/config
- {{ base_volume_path }}/arrstack/downloads:/downloads
- {{ media_volume_path }}/Plex:/media/Plex
sonarr:
image: linuxserver/sonarr:latest
container_name: sonarr
labels:
- traefik.enable=true
- traefik.http.routers.sonarr.rule=Host(`sonarr.{{ domain }}`)
- traefik.http.services.sonarr.loadbalancer.server.port=8989
restart: unless-stopped
depends_on:
- qbittorrent
- jackett
networks:
- default
- traefik_traefik
volumes:
- {{ base_volume_path }}/arrstack/config/sonarr:/config
- {{ base_volume_path }}/arrstack/downloads:/downloads
- {{ media_volume_path }}/Plex:/media/Plex
radarr:
image: linuxserver/radarr:latest
container_name: radarr
labels:
- traefik.enable=true
- traefik.http.routers.radarr.rule=Host(`radarr.{{ domain }}`)
- traefik.http.services.radarr.loadbalancer.server.port=7878
restart: unless-stopped
depends_on:
- qbittorrent
- jackett
networks:
- default
- traefik_traefik
volumes:
- {{ base_volume_path }}/arrstack/config/radarr:/config
- {{ base_volume_path }}/arrstack/downloads:/downloads
- {{ media_volume_path }}/Plex:/media/Plex

View File

@ -0,0 +1,11 @@
# Stripped version of the wg config
[Interface]
PrivateKey = {{ wg_privkey }}
# Address = {{ wg_address }}
# DNS = {{ wg_dns }} # This is also used as the gateway address for NAT-PMP to work properly
[Peer]
PublicKey = {{ wg_peer_pubkey }}
AllowedIPs = 0.0.0.0/0
Endpoint = {{ wg_peer_endpoint }}

View File

@ -0,0 +1,12 @@
$ANSIBLE_VAULT;1.2;AES256;alpina
37653839366635373530306432303538626233356164633761316231623732316138643532383735
3132613432333636383363383162643434626638613234320a343337333435393461323735646338
34353764366561633738383933626261643734343266333364353162366161313738663064656530
6666313731343663650a343761646664356238373763383136366431383337313065613663303233
36613233653666306338373839623130323833393932386161353933613338613836326632653262
31646131646637646237373964376365336337386639396266393731623761393038396233663663
32393964313361326463356435343064643964343731386238643263653738356534383536353330
32376162376235663636626562646436613265656461656133643762396137313238383533653831
31396632656630626138326335363462383131343431336264656236346665366236353863326237
66653064653166373838653631653563303834303334633830383064323965393563663563636361
653139663339346331336435313263343936

View File

@ -1,5 +1,5 @@
---
current_svc_name: jackett
current_svc_name: arrstack
current_svc_path: "{{ my_svc_path }}/{{ current_svc_name }}"
templates_source: "{{ role_path }}/templates"

View File

@ -1,49 +1,14 @@
- name: Upgrade alpine packages
community.general.apk:
upgrade: yes
- name: Upgrade Debian packages
become: yes
ansible.builtin.apt:
upgrade: dist
update_cache: yes
register: apk_upgrades
- name: Install alpine packages
community.general.apk:
name:
- qemu-guest-agent
- dhcpcd
- python3
- fish
- docker
- docker-compose
- docker-fish-completion
- docker-compose-fish-completion
- zfs
- zfs-utils-py
cache_valid_time: 3600
autoremove: yes
state: latest
update_cache: yes
register: apk_installs
- name: Enable qemu-guest-agent service
service:
name: qemu-guest-agent
runlevel: boot
enabled: yes
- name: Enable zfs-import service
service:
name: zfs-import
runlevel: sysinit
enabled: yes
- name: Enable zfs-mount service
service:
name: zfs-mount
runlevel: sysinit
enabled: yes
- name: Enable docker service
service:
name: docker
enabled: yes
register: apt_upgrades
- name: Reboot if needed
reboot:
when: apk_upgrades.changed or apk_installs.changed
become: yes
ansible.builtin.reboot:
when: apt_upgrades.changed

View File

@ -1,14 +1,10 @@
version: "3.9"
version: "3.7"
networks:
default:
traefik_traefik:
external: true
volumes:
gitea:
postgres:
services:
server:
image: gitea/gitea:1.18
@ -24,7 +20,7 @@ services:
- default
- traefik_traefik
volumes:
- gitea:/data
- {{ base_volume_path }}/gitea/gitea:/data
depends_on:
- db
db:
@ -36,4 +32,4 @@ services:
networks:
- default
volumes:
- postgres:/var/lib/postgresql/data
- {{ base_volume_path }}/gitea/postgres:/var/lib/postgresql/data

View File

@ -1,26 +0,0 @@
version: "3.9"
networks:
default:
traefik_traefik:
external: true
volumes:
config:
torrent_blackhole:
services:
jackett:
image: linuxserver/jackett:latest
container_name: jackett
labels:
- traefik.enable=true
- traefik.http.routers.jackett.rule=Host(`jackett.{{ domain }}`)
- traefik.http.services.jackett.loadbalancer.server.port=9117
restart: unless-stopped
networks:
- default
- traefik_traefik
volumes:
- config:/config
- torrent_blackhole:/downloads

View File

@ -1,26 +1,10 @@
version: "3.9"
version: "3.7"
networks:
default:
traefik_traefik:
external: true
volumes:
config:
cache:
media:
driver: local
driver_opts:
type: nfs
o: "addr=truenas.lab.home,nfsvers=4,ro,noatime"
device: ":/mnt/Mass-Storage-New/JailStorage/Plex/media"
other_videos:
driver: local
driver_opts:
type: nfs
o: "addr=truenas.lab.home,nfsvers=4,ro,noatime"
device: ":/mnt/Mass-Storage-New/syncThingData/Videos"
services:
jellyfin:
image: jellyfin/jellyfin:10.8.6
@ -36,10 +20,10 @@ services:
- default
- traefik_traefik
volumes:
- config:/config
- cache:/cache
- media:/data/media
- other_videos:/data/other_videos
- {{ base_volume_path }}/jellyfin/config:/config
- {{ base_volume_path }}/jellyfin/cache:/cache
- {{ media_volume_path }}/Plex/media:/data/media:ro
- {{ media_volume_path }}/other_videos:/data/other_videos:ro
tmpfs:
- /tmp/transcodes
devices:

View File

@ -1,15 +1,10 @@
version: "3.9"
version: "3.7"
networks:
default:
traefik_traefik:
external: true
volumes:
nextcloud:
nextcloud_config:
nextcloud_data:
db:
services:
app:
@ -24,9 +19,9 @@ services:
networks:
- default
volumes:
- nextcloud:/var/www/html
- nextcloud_config:/var/www/html/config
- nextcloud_data:/var/www/html/data
- {{ base_volume_path }}/nextcloud/nextcloud:/var/www/html
- {{ base_volume_path }}/nextcloud/nextcloud_config:/var/www/html/config
- {{ base_volume_path }}/nextcloud/nextcloud_data:/var/www/html/data
cron:
image: nextcloud:${NEXTCLOUD_VERSION}
@ -37,8 +32,10 @@ services:
entrypoint: /cron.sh
networks:
- default
volumes_from:
- app
volumes:
- {{ base_volume_path }}/nextcloud/nextcloud:/var/www/html
- {{ base_volume_path }}/nextcloud/nextcloud_config:/var/www/html/config
- {{ base_volume_path }}/nextcloud/nextcloud_data:/var/www/html/data
notify_push:
image: nextcloud:${NEXTCLOUD_VERSION}
@ -51,8 +48,10 @@ services:
- /var/www/html/config/config.php
networks:
- default
volumes_from:
- app
volumes:
- {{ base_volume_path }}/nextcloud/nextcloud:/var/www/html
- {{ base_volume_path }}/nextcloud/nextcloud_config:/var/www/html/config
- {{ base_volume_path }}/nextcloud/nextcloud_data:/var/www/html/data
db:
image: postgres:13-alpine
@ -63,7 +62,7 @@ services:
networks:
- default
volumes:
- db:/var/lib/postgresql/data
- {{ base_volume_path }}/nextcloud/db:/var/lib/postgresql/data
redis:
image: redis:7-alpine
@ -91,6 +90,7 @@ services:
- traefik_traefik
- default
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf
volumes_from:
- app
- ./nginx.conf:/etc/nginx/nginx.conf:ro
- {{ base_volume_path }}/nextcloud/nextcloud:/var/www/html
- {{ base_volume_path }}/nextcloud/nextcloud_config:/var/www/html/config
- {{ base_volume_path }}/nextcloud/nextcloud_data:/var/www/html/data

View File

@ -1,4 +1,4 @@
version: "3.9"
version: "3.7"
networks:
default:
@ -8,9 +8,6 @@ networks:
config:
- subnet: {{ traefik_ip }}/24
volumes:
logs:
services:
traefik:
image: traefik:v2.9
@ -32,5 +29,5 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- ./traefik.yml:/etc/traefik/traefik.yml:ro
- logs:/logs
- {{ current_svc_path }}/rules:/rules:ro
- {{ base_volume_path }}/traefik/logs:/logs
- ./rules:/rules:ro

View File

@ -1,25 +0,0 @@
http:
routers:
sonarr:
rule: "Host(`sonarr.{{ domain }}`)"
service: sonarr
radarr:
rule: "Host(`radarr.{{ domain }}`)"
service: radarr
serversTransports:
sonarrTransport:
insecureSkipVerify: true
forwardingTimeouts:
dialTimeout: "180s"
services:
sonarr:
loadBalancer:
servers:
- url: "http://sonarr.lab.home:8989"
serversTransport: "sonarrTransport"
radarr:
loadBalancer:
servers:
- url: "http://radarr.lab.home:7878"
serversTransport: "sonarrTransport"

View File

@ -5,7 +5,7 @@
- nextcloud
- jellyfin
- gitea
- jackett
- arrstack
post_tasks:
- name: Docker prune objects
docker_prune: