Compare commits

...

15 Commits

Author SHA1 Message Date
il f28661e664 refactor(convention): add existing 'site' tag to convention 2026-05-15 10:16:36 +09:00
il a09712c142 refactor(script): update edit_secret.sh to optimize log print logic 2026-05-15 10:12:42 +09:00
il a7e2320b21 chore(script): archive a extract_secret.sh script
archived stack: extract_secret.sh
2026-05-15 09:19:59 +09:00
il 24c83029e9 refactor(playbook): update convention and remove deprecated tag
update notes:
- remove tags 'update', 'upgrade' from convention.yaml
- remove tags 'update' from playbooks/app/site.yaml
2026-05-15 09:04:51 +09:00
il ac64b3c04e docs(readme): add RPO on readme 2026-05-13 17:12:59 +09:00
il 26d696f813 refactor(all): update hardcoded internal domain to ansible variable 2026-05-12 08:08:04 +09:00
il 1096981ef2 feat(paperless): change paperless OCR engine model from tesseract_fast to tesseract_best 2026-05-12 08:00:37 +09:00
il e1936b494d fix(crowdsec): update whitelist.yaml to prevent false positive
false positive:
- nextcloud chunk problem (crowdsecurity/http-crawl-non_statics)
- change expression 'chunks.mjs' to 'chunk.mjs'
2026-05-11 19:40:50 +09:00
il 0afc841b69 chore(chromium): archive a removed stack from console
archived stack: chromium
2026-05-11 19:37:25 +09:00
il a39122eb4b fix(crowdsec): update whitelist.yaml to prevent false positive
false positive:
- nextcloud chunk problem (crowdsecurity/http-crawl-non_statics)
2026-05-11 19:34:22 +09:00
il 0f4da0bb53 feat(backup): add archiving of runtime binary packages 2026-05-11 01:37:15 +09:00
il 1dd1c53e2a feat(backup): add archiving of deployed container images 2026-05-11 00:52:28 +09:00
il 530407c162 refactor(all): update hardcoded timezone 'Asia/Seoul' to ansible variable 'timezone' 2026-05-10 18:44:28 +09:00
il 11ab2f5205 fix(sure): correct task name and subuid variable reference 2026-05-10 14:39:54 +09:00
il 4527e39d0f chore(app): archive removed stacks from app
archived stacks:
- actual-budget
- ezbookkeeping
- opencloud
- trilium
- vikunja
- wikijs
2026-05-10 00:07:51 +09:00
105 changed files with 962 additions and 647 deletions
+2
View File
@@ -2,6 +2,8 @@
data/bin/*
data/volumes/*
data/images/*
!data/images/containers
data/images/containers/*
docs/archives/textfiles/
docs/notes/*
*.sql
+10 -1
View File
@@ -2,7 +2,16 @@
This homelab project implements single-node On-premise IaaS system. The homelab contains virtual machines which are divided by their roles, such as private firewall, DNS, PKI, LDAP and database, SSO(OIDC). The standard domain is used to implement this system without specific vendors. All components are defined as code and initiated by IaC (Ansible) except hypervisor initial configuration.
## RTO times
## RTO and RPO
### RPO
- Each backup guarantees 24 hours RPO
- DB dumps are backed up at 12:00 AM
- Stateful data in app vm is backed up at 03:00 AM
- The maximum inconsistency window between DB dumps and stateful data can be 27 hours.
- The different backup time.
### RTO
- Feb/25/2026 - Reprovisioning Hypervisor and vms
- RTO: 1 hour 30 min - verified
- Manual install and set vmm: 20 min
+2 -3
View File
@@ -33,7 +33,6 @@
tags:
- "always"
- "init"
- "upgrade"
- "update"
- "site"
- "[service_name]"
# when: "'tags' is not in ansible_run_tags"
+1 -48
View File
@@ -1,6 +1,7 @@
---
# Global vars
ansible_ssh_private_key_file: "/etc/secrets/{{ hostvars['console']['node']['uid'] }}/id_console"
timezone: "Asia/Seoul"
# CA
root_cert_filename: "ilnmors_root_ca.crt"
@@ -109,13 +110,6 @@ services:
immich-ml:
ports:
http: "3003"
actualbudget:
domain:
public: "actualbudget"
internal: "actualbudget.app"
ports:
http: "5006"
subuid: "101000"
paperless:
domain:
public: "paperless"
@@ -124,20 +118,6 @@ services:
http: "8001"
redis: "6380"
subuid: "100999"
vikunja:
domain:
public: "vikunja"
internal: "vikunja.app"
ports:
http: "3456"
subuid: "100999"
opencloud:
domain:
public: "opencloud"
internal: "opencloud.app"
ports:
http: "9200"
subuid: "100999"
manticore:
subuid: "100998"
affine:
@@ -163,13 +143,6 @@ services:
ports:
http: "9980"
subuid: "101000"
ezbookkeeping:
domain:
public: "budget"
internal: "budget.app"
ports:
http: "8003"
subuid: "100999"
sure:
domain:
public: "sure"
@@ -178,20 +151,6 @@ services:
http: "3001"
redis: "6383"
subuid: "100999"
wikijs:
domain:
public: "wiki"
internal: "wiki.app"
ports:
http: "3002"
subuid: "100999"
trilium:
domain:
public: "notes"
internal: "notes.app"
ports:
http: "8004"
subuid: "100999"
version:
packages:
@@ -222,15 +181,9 @@ version:
gitea: "1.26.1"
redis: "8.6.3"
immich: "v2.7.5"
actualbudget: "26.3.0"
paperless: "2.20.15"
vikunja: "2.2.2"
opencloud: "4.0.6"
manticore: "25.0.0"
affine: "0.26.3"
nextcloud: "33.0.3"
collabora: "25.04.9.4.1"
ezbookkeeping: "1.4.0"
sure: "0.7.0-hotfix.2"
wikijs: "2.5.314"
trilium: "v0.102.2"
+4 -52
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
@@ -142,8 +142,8 @@
name: "common"
tasks_from: "services/set_alloy"
apply:
tags: ["init", "update", "alloy"]
tags: ["init", "update", "alloy"]
tags: ["init", "alloy"]
tags: ["init", "alloy"]
- name: Set kopia
ansible.builtin.include_role:
@@ -185,14 +185,6 @@
tags: ["site", "immich"]
tags: ["site", "immich"]
- name: Set actual budget
ansible.builtin.include_role:
name: "app"
tasks_from: "services/set_actual-budget"
apply:
tags: ["site", "actual-budget"]
tags: ["site", "actual-budget"]
- name: Set paperless
ansible.builtin.include_role:
name: "app"
@@ -201,22 +193,6 @@
tags: ["site", "paperless"]
tags: ["site", "paperless"]
- name: Set vikunja
ansible.builtin.include_role:
name: "app"
tasks_from: "services/set_vikunja"
apply:
tags: ["site", "vikunja"]
tags: ["site", "vikunja"]
- name: Set opencloud
ansible.builtin.include_role:
name: "app"
tasks_from: "services/set_opencloud"
apply:
tags: ["site", "opencloud"]
tags: ["site", "opencloud"]
- name: Set affine
ansible.builtin.include_role:
name: "app"
@@ -241,14 +217,6 @@
tags: ["site", "collabora"]
tags: ["site", "collabora"]
- name: Set ezbookkeeping
ansible.builtin.include_role:
name: "app"
tasks_from: "services/set_ezbookkeeping"
apply:
tags: ["site", "ezbookkeeping"]
tags: ["site", "ezbookkeeping"]
- name: Set sure
ansible.builtin.include_role:
name: "app"
@@ -257,22 +225,6 @@
tags: ["site", "sure"]
tags: ["site", "sure"]
- name: Set wiki.js
ansible.builtin.include_role:
name: "app"
tasks_from: "services/set_wikijs"
apply:
tags: ["site", "wikijs"]
tags: ["site", "wikijs"]
- name: Set trilium
ansible.builtin.include_role:
name: "app"
tasks_from: "services/set_trilium"
apply:
tags: ["site", "trilium"]
tags: ["site", "trilium"]
- name: Flush handlers right now
ansible.builtin.meta: "flush_handlers"
+2 -2
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
+2 -2
View File
@@ -24,9 +24,9 @@
tasks:
# init
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
+4 -4
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
@@ -162,8 +162,8 @@
name: "fw"
tasks_from: "services/set_bind"
apply:
tags: ["init", "update", "bind"]
tags: ["init", "update", "bind"]
tags: ["init", "bind"]
tags: ["init", "bind"]
- name: Set blocky
ansible.builtin.include_role:
+2 -2
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
+2 -2
View File
@@ -30,9 +30,9 @@
tags: ["always"]
tasks:
# init
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
-67
View File
@@ -43,17 +43,6 @@
listen: "notification_restart_immich-ml"
ignore_errors: true # noqa: ignore-errors
- name: Restart actual-budget
ansible.builtin.systemd:
name: "actual-budget.service"
state: "restarted"
enabled: true
scope: "user"
daemon_reload: true
changed_when: false
listen: "notification_restart_actual-budget"
ignore_errors: true # noqa: ignore-errors
- name: Restart paperless
ansible.builtin.systemd:
name: "paperless.service"
@@ -65,29 +54,6 @@
listen: "notification_restart_paperless"
ignore_errors: true # noqa: ignore-errors
- name: Restart vikunja
ansible.builtin.systemd:
name: "vikunja.service"
state: "restarted"
enabled: true
scope: "user"
daemon_reload: true
changed_when: false
listen: "notification_restart_vikunja"
ignore_errors: true # noqa: ignore-errors
- name: Restart opencloud
ansible.builtin.systemd:
name: "opencloud.service"
state: "restarted"
enabled: true
daemon_reload: true
scope: "user"
when: is_opencloud_init.stat.exists
changed_when: false
listen: "notification_restart_opencloud"
ignore_errors: true # noqa: ignore-errors
- name: Restart affine
ansible.builtin.systemd:
name: "affine.service"
@@ -123,17 +89,6 @@
listen: "notification_restart_collabora"
ignore_errors: true # noqa: ignore-errors
- name: Restart ezbookkeeping
ansible.builtin.systemd:
name: "ezbookkeeping.service"
state: "restarted"
enabled: true
scope: "user"
daemon_reload: true
changed_when: false
listen: "notification_restart_ezbookkeeping"
ignore_errors: true # noqa: ignore-errors
- name: Restart sure
ansible.builtin.systemd:
name: "{{ item }}"
@@ -147,25 +102,3 @@
changed_when: false
listen: "notification_restart_sure"
ignore_errors: true # noqa: ignore-errors
- name: Restart wikijs
ansible.builtin.systemd:
name: "wikijs.service"
state: "restarted"
enabled: true
scope: "user"
daemon_reload: true
changed_when: false
listen: "notification_restart_wikijs"
ignore_errors: true # noqa: ignore-errors
- name: Restart trilium
ansible.builtin.systemd:
name: "trilium.service"
state: "restarted"
enabled: true
scope: "user"
daemon_reload: true
changed_when: false
listen: "notification_restart_trilium"
ignore_errors: true # noqa: ignore-errors
@@ -161,3 +161,38 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/manticoresearch/manticore:{{ version['containers']['manticore'] }}"
file: "docker.io_manticoresearch_manticore_{{ version['containers']['manticore'] }}"
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "ghcr.io/toeverything/affine:{{ version['containers']['affine'] }}"
file: "ghcr.io_toeverything_affine_{{ version['containers']['affine'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -15,3 +15,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_collabora_code_{{ version['containers']['collabora'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/collabora/code:{{ version['containers']['collabora'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_collabora_code_{{ version['containers']['collabora'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_collabora_code_{{ version['containers']['collabora'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -49,3 +49,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_gitea_gitea_{{ version['containers']['gitea'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/gitea/gitea:{{ version['containers']['gitea'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_gitea_gitea_{{ version['containers']['gitea'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_gitea_gitea_{{ version['containers']['gitea'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -118,3 +118,38 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "ghcr.io/immich-app/immich-machine-learning:{{ version['containers']['immich'] }}-openvino"
file: "ghcr.io_immich-app_immich-machine-learning_{{ version['containers']['immich'] }}-openvino"
- image: "ghcr.io/immich-app/immich-server:{{ version['containers']['immich'] }}"
file: "ghcr.io_immich-app_immich-server_{{ version['containers']['immich'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -174,3 +174,36 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "docker.io/library/nextcloud:{{ version['containers']['nextcloud'] }}"
file: "docker.io_library_nextcloud_{{ version['containers']['nextcloud'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -57,8 +57,16 @@
- "data/containers/paperless/consume"
- "containers/paperless"
- "containers/paperless/ssl"
- "containers/paperless/build"
become: true
- name: Deploy containerfile for build
ansible.builtin.template:
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/paperless/build/paperless.containerfile.j2"
dest: "{{ node['home_path'] }}/containers/paperless/build/Containerfile"
owner: "{{ ansible_user }}"
group: "svadmins"
mode: "0640"
- name: Deploy root certificate
ansible.builtin.copy:
@@ -72,6 +80,18 @@
notify: "notification_restart_paperless"
no_log: true
- name: Build paperless container image
containers.podman.podman_image:
name: "{{ domain['internal'] }}/{{ node['name'] }}/paperless-ngx"
# check tags from container file
tag: "{{ version['containers']['paperless'] }}"
state: "build"
path: "{{ node['home_path'] }}/containers/paperless/build"
- name: Prune paperless dangling images
containers.podman.podman_prune:
image: true
- name: Register secret value to podman secret
containers.podman.podman_secret:
name: "{{ item.name }}"
@@ -122,3 +142,36 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "{{ domain['internal'] }}/{{ node['name'] }}/paperless-ngx:{{ version['containers']['paperless'] }}"
file: "{{ domain['internal'] }}_{{ node['name'] }}_paperless-ngx_{{ version['containers']['paperless'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
+35 -2
View File
@@ -63,7 +63,7 @@
content: |
{{ hostvars['console']['ca']['root']['crt'] }}
dest: "{{ node['home_path'] }}/containers/sure/ssl/{{ root_cert_filename }}"
owner: "{{ services['paperless']['subuid'] }}"
owner: "{{ services['sure']['subuid'] }}"
group: "svadmins"
mode: "0440"
become: true
@@ -98,7 +98,7 @@
- "sure-worker.container"
notify: "notification_restart_sure"
- name: Enable paperless.service
- name: Enable sure.service
ansible.builtin.systemd:
name: "{{ item }}"
state: "started"
@@ -108,3 +108,36 @@
loop:
- "sure-web.service"
- "sure-worker.service"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "ghcr.io/we-promise/sure:{{ version['containers']['sure'] }}"
file: "ghcr.io_we-promise_sure_{{ version['containers']['sure'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -55,3 +55,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_vaultwarden_server_{{ version['containers']['vaultwarden'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/vaultwarden/server:{{ version['containers']['vaultwarden'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_vaultwarden_server_{{ version['containers']['vaultwarden'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_vaultwarden_server_{{ version['containers']['vaultwarden'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -76,3 +76,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_authelia_authelia_{{ version['containers']['authelia'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/authelia/authelia:{{ version['containers']['authelia'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_authelia_authelia_{{ version['containers']['authelia'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_authelia_authelia_{{ version['containers']['authelia'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -74,3 +74,10 @@
enabled: true
daemon_reload: true
become: true
- name: Fetch deb bin file
ansible.builtin.fetch:
src: "/var/cache/apt/archives/alloy-{{ version['packages']['alloy'] }}.deb"
dest: "{{ hostvars['console']['node']['data_path'] }}/bin/"
flat: true
become: true
@@ -97,3 +97,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ domain['internal'] }}_{{ node['name'] }}_caddy_{{ version['containers']['caddy'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ domain['internal'] }}/{{ node['name'] }}/caddy:{{ version['containers']['caddy'] }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ domain['internal'] }}_{{ node['name'] }}_caddy_{{ version['containers']['caddy'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ domain['internal'] }}_{{ node['name'] }}_caddy_{{ version['containers']['caddy'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -24,6 +24,17 @@
mode: "0770"
when: node['name'] == "app"
- name: Create container image archive directory
ansible.builtin.file:
path: "{{ item }}"
owner: "{{ ansible_user }}"
group: "svadmins"
state: "directory"
mode: "0700"
loop:
- "{{ node['home_path'] }}/archives"
- "{{ node['home_path'] }}/archives/containers"
- name: Install podman and reset ssh connection for initiating
when: is_podman_installed.rc != 0
become: true
-7
View File
@@ -1,8 +1 @@
---
- name: Register font
ansible.builtin.shell: |
fc-cache -f -v
become: true
changed_when: false
listen: "notification_update_font"
ignore_errors: true # noqa: ignore-errors
+11 -21
View File
@@ -41,7 +41,7 @@
ansible.builtin.get_url:
url: "https://github.com/0xERR0R/blocky/releases/download/v{{ version['packages']['blocky'] }}/\
blocky_v{{ version['packages']['blocky'] }}_Linux_x86_64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-x86_64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
owner: "blocky"
group: "blocky"
mode: "0600"
@@ -52,16 +52,16 @@
ansible.builtin.get_url:
url: "https://github.com/0xERR0R/blocky/releases/download/v{{ version['packages']['blocky'] }}/\
blocky_v{{ version['packages']['blocky'] }}_Linux_arm64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-arm64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
owner: "blocky"
group: "blocky"
mode: "0600"
become: true
when: ansible_facts['architecture'] == "aarch64"
- name: Deploy blocky binary file (x86_64)
- name: Deploy blocky binary file
ansible.builtin.unarchive:
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-x86_64.tar.gz"
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
remote_src: true
dest: "/usr/local/bin/"
owner: "root"
@@ -72,23 +72,6 @@
- "--wildcards"
- "blocky"
become: true
when: ansible_facts['architecture'] == "x86_64"
notify: "notification_restart_blocky"
- name: Deploy blocky binary file (aarch64)
ansible.builtin.unarchive:
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-arm64.tar.gz"
remote_src: true
dest: "/usr/local/bin/"
owner: "root"
group: "root"
mode: "0755"
extra_opts:
- "--strip-components=0"
- "--wildcards"
- "blocky"
become: true
when: ansible_facts['architecture'] == "aarch64"
notify: "notification_restart_blocky"
- name: Deploy blocky config
@@ -141,3 +124,10 @@
enabled: true
daemon_reload: true
become: true
- name: Fetch deb bin file
ansible.builtin.fetch:
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
dest: "{{ hostvars['console']['node']['data_path'] }}/bin/"
flat: true
become: true
@@ -78,3 +78,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_smallstep_step-ca_{{ version['containers']['step'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/smallstep/step-ca:{{ version['containers']['step'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_smallstep_step-ca_{{ version['containers']['step'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_smallstep_step-ca_{{ version['containers']['step'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -83,3 +83,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_grafana_{{ version['containers']['grafana'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/grafana/grafana:{{ version['containers']['grafana'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_grafana_{{ version['containers']['grafana'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_grafana_{{ version['containers']['grafana'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -75,7 +75,7 @@
rm: true
detach: false
env:
TZ: "Asia/Seoul"
TZ: "{{ timezone }}"
LLDAP_LDAP_BASE_DN: "{{ domain['dc'] }}"
secrets:
- "LLDAP_DATABASE_URL,type=env"
@@ -108,3 +108,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_lldap_lldap_{{ version['containers']['ldap'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/lldap/lldap:{{ version['containers']['ldap'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_lldap_lldap_{{ version['containers']['ldap'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_lldap_lldap_{{ version['containers']['ldap'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -64,3 +64,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_loki_{{ version['containers']['loki'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/grafana/loki:{{ version['containers']['loki'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_loki_{{ version['containers']['loki'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_loki_{{ version['containers']['loki'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -9,12 +9,9 @@
- "gitea"
- "immich"
- "paperless"
- "vikunja"
- "affine"
- "nextcloud"
- "ezbookkeeping"
- "sure"
- "wikijs"
- name: Create postgresql directory
ansible.builtin.file:
@@ -175,3 +172,29 @@
daemon_reload: true
scope: "user"
loop: "{{ connected_services }}"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/\
{{ domain['internal'] }}_{{ node['name'] }}_postgres_\
pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ domain['internal'] }}/{{ node['name'] }}/postgres:pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}"
dest: "{{ node['home_path'] }}/archives/containers/\
{{ domain['internal'] }}_{{ node['name'] }}_postgres_\
pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/\
{{ domain['internal'] }}_{{ node['name'] }}_postgres_\
pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -68,3 +68,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_prom_prometheus_{{ version['containers']['prometheus'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/prom/prometheus:{{ version['containers']['prometheus'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_prom_prometheus_{{ version['containers']['prometheus'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_prom_prometheus_{{ version['containers']['prometheus'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -68,3 +68,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_enix_x509-certificate-exporter_{{ version['containers']['x509-exporter'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/enix/x509-certificate-exporter:{{ version['containers']['x509-exporter'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_enix_x509-certificate-exporter_{{ version['containers']['x509-exporter'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_enix_x509-certificate-exporter_{{ version['containers']['x509-exporter'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
+7 -3
View File
@@ -19,7 +19,11 @@ log() {
local timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
local level="$1"
local msg="$2"
echo "time=\"$timestamp\" level=\"$level\" msg=\"$msg\" source=\"edit_secret.sh\"">&2
if [ "$level" == "error" ]; then
echo "time=\"$timestamp\" level=\"$level\" msg=\"$msg\" source=\"edit_secret.sh\"">&2
else
echo "time=\"$timestamp\" level=\"$level\" msg=\"$msg\" source=\"edit_secret.sh\"">&1
fi
}
# Secret file check
@@ -58,9 +62,9 @@ cleanup() {
trap cleanup EXIT
# Get GPG password from prompt
echo -n "Enter GPG passphrase: " >&2
echo -n "Enter GPG passphrase: " >&1
read -s GPG_PASSPHRASE
echo "" >&2
echo "" >&1
# Decrypt age-key on the tmpfs (memory)
echo "$GPG_PASSPHRASE" | gpg --batch --yes --passphrase-fd 0 \
+2 -54
View File
@@ -117,12 +117,9 @@ postgresql:
gitea: ENC[AES256_GCM,data:l+pBCzyQa3000SE9z1R4htD0V0ONsBtKy92dfgsVYsZ3XlEyVJDIBOsugwM=,iv:5t/oHW1vFAmV/s2Ze/cV9Vuqo96Qu6QvZeRbio7VX2s=,tag:4zeQaXiXIzBpy+tXsxmN7Q==,type:str]
immich: ENC[AES256_GCM,data:11jvxTKA/RL0DGL6y2/X092hnDohj6yTrYGK4IVojqBd1gCOBnDvUjgmx14=,iv:oBfHxsx9nxhyKY/WOuWfybxEX2bf+lHEtsaifFRS9lg=,tag:tAfkBdgQ8ZEkLIFcDICKDw==,type:str]
paperless: ENC[AES256_GCM,data:6VBrBbjVoam7SkZCSvoBTdrfkUoDghdGTiBmFLul04X/okXOHeC5zusJffY=,iv:iZumcJ3TWwZD77FzYx8THwCqC+EbnXUBrEKuPh3zgV8=,tag:u2m8SppAdxZ/duNdpuS3oQ==,type:str]
vikunja: ENC[AES256_GCM,data:/+wQdoFPTBG2elI9kZbAVWrHZ0DhMaYr4dc+2z9QNdb3TcDS2PEia0JuSAg=,iv:MViZTyUD8YqMmxSTWCQpJ30f/KQdQGOzPlRHHsQ8lAw=,tag:zov3POno139dkMxFDpj2gg==,type:str]
affine: ENC[AES256_GCM,data:XPXrcszsV06YqCJZ7CDqc4rCwqqNlbtLCFYfLAQ8jamLtft8L2UVrMA4WZo=,iv:vrWdBeckxB9tmEE628j4jhU+hSpE6TXYMGt0hh1Cg84=,tag:hlWwWUGht8NqWTZREMsa1Q==,type:str]
nextcloud: ENC[AES256_GCM,data:ROsximNuWYMTZktmLJPx7W1Qol/uT+APgwoCtFO/6ZYYc3KxKvlk344eqEc=,iv:4d+MrfIHjJKAcwhvZ3g4go66uZcieuL7lngKErJd+fg=,tag:QbWOtxeCbiu62GyrE2atXg==,type:str]
ezbookkeeping: ENC[AES256_GCM,data:CYYQ5DVr8Na46QduvUNF6d0XBVSXTml34q3/PhIYIvUNviOVgCjqXA4wN7g=,iv:qRljohJ+wI50XxSgMElKp65HyV3mKRTqDGjw9C1S0d0=,tag:PClp7PRmC0+PV0SzZpJqqQ==,type:str]
sure: ENC[AES256_GCM,data:FULJ2gjJ2gZC3s324itW+CjGRBHIP9RnOqw5TT1UaiUhb7UHAPm1na+LsZk=,iv:c0GnVZkxprJUzPPq3TCQaZvAes9QQuvDXqgVLLaiQIg=,tag:uDxy/Lkd2hNK4AWwMNMslw==,type:str]
wikijs: ENC[AES256_GCM,data:2drkkTevrcUrgxOHavIEPcemc2l5+/3GEAYNCYVL/63daVda5tzL61tPm2A=,iv:87qPrlRaosXO75eaxo4xjevVc1Pt9MiHv6lYFBB3MKU=,tag:SnVbVR4ZM0qvVmWpcgSKrg==,type:str]
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
@@ -213,14 +210,6 @@ immich:
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:bzMt0Ox0Za4dOhoo7S6dYCdK32JI9Q==,iv:PRTryIJk0tR545XY0LoHwklvsJp5+A5bEljNmzUvRhY=,tag:EVsjRUGMOadaNbMu0Xr4XA==,type:comment]
actualbudget:
oidc:
secret: ENC[AES256_GCM,data:TE2umZ9Vvr7cSfA2+TAfRadIWZN3hyOKQ6U9NqJFm5e9iiw1avI+QlnYcKI=,iv:rUWoclBRqh0tsGnMq29395Fn2NP7AXnSCd0s+S8jQ6I=,tag:qPX/TcdIo6BJeex7wmi02Q==,type:str]
hash: ENC[AES256_GCM,data:UjhNkGj+sxbnmPUx1V5kVYwZnzsB0aEvN8YV29lcvMbSnf9xpQWwD5C93Zu8SYrnS/p88qZpGBgAjr9Pcly3y0H1YMRt9zzbHZU3Uo0DPDrSWRQdeB/8LkcM/cwMAs8arS6PO03ECNnN5Z6aTmFdFnLjUkvUuSWMFscItAzMzhWCpeY=,iv:B06LI7Cq3NN8haOLfN3gWIpUFnvdUlq6D2XmARojDpk=,tag:MflE8qcY5j/aAA7xfPCqng==,type:str]
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:McPUAbIUvtC1gdPaxTgAxAMCMWcLfg==,iv:Tp6idRf7he3sYzo8LW596C905JAaoTIhIoDUzSyRT0k=,tag:4mZQ0Swu1X9uuwjsRNhr2A==,type:comment]
paperless:
session_secret: ENC[AES256_GCM,data:siwCs2noeVpg9DCEZybnmo/oz11BdrHSTnHciMOu/6g=,iv:XVjhu10TIujIdUopN9+TVVqRade9EvItDWxym6YXnZs=,tag:TxLYm+4Bo7IMaTQBtMg9pQ==,type:str]
@@ -232,22 +221,6 @@ paperless:
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:V7DJHA2JQirfBsrCGhXrhg==,iv:+jYqX9hGNnuyYj9o9LpCYFVOoD6nSrtc4t40Ag0mMzo=,tag:1wSxKtkJm42reUxdwYDvlg==,type:comment]
vikunja:
session_secret: ENC[AES256_GCM,data:CMyw8JGHyTczGsrOJJwQBKfXMU4Sudvwkur1Lgx4o64=,iv:F2VmpqddiDT4jGaGDKGl6FARsQOt3lLz3X6TjC2MIVU=,tag:UJYyzrl/FX1BNwY4ROFncA==,type:str]
oidc:
secret: ENC[AES256_GCM,data:QwqndYsfr+fh9OLkHYtLYCa6WUdhnL7A4btz1d1eelTwq3Kps5S6BUN5qZg=,iv:51N8byIAAUh4ky7YBAuEJOBEWu1d9AX5W1m37/cLlCM=,tag:GD7jbxNGd748TCPgqsxyMg==,type:str]
hash: ENC[AES256_GCM,data:ORifyT4u1V2CyBCNBgF72wwS2i05mlzA4iIVEa1cH9aaE69PdiQvGGzMHK+tmlfpVaVQEENSt1QDUSSlMyeuZT/3a0JwAvlz+XDbpS7bicL2cB6DCa4JyEd/rbGRXs0/COfxPxXzYv7jq9gd2uSJ+cCGYb/93WuEXSEI6PHi+FF7N94=,iv:FVSGySa4YB2vwenqSagBzxeIexg91ewvcQMix+etmng=,tag:yyQtOgzOZypba+rV3A1K9g==,type:str]
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:EsRGZP7snPchEAMoQN5PoQpiOA==,iv:A/8POGq3pIw7aX5S2vyKtI2vPqH0FT6yZnpe/vVbifw=,tag:BgUYHX2zxIL7yLS0JbI1Yg==,type:comment]
opencloud:
admin:
password: ENC[AES256_GCM,data:VKG7sNTTLHCXRGf4SAlR91+hvc7PaNrnpJX/4kItVcT9W1Hdl/yKgHHD7M8=,iv:WwWnx9KuN+i/Ugwv+HY4IGDZrLHk71hsobGFOn9kml0=,tag:SS6ihrtZjLnlAJR59lw+gw==,type:str]
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:k55osvepVeB1RC5hZ4IF,iv:AlhfmWwn/DiSESWc+ULJSOLUhnrKAIfWr7MeiwV8qc8=,tag:hOgptwUcY6nVxPIhu+DYgw==,type:comment]
affine:
secret_key: ENC[AES256_GCM,data:LLX78DpYnha1JWhgw0sHLzIVq/oIzvT+nB7zgli4mroGbnt7WZaXCx34zKkYRwYj/+0L4IFFVdkzKtK5DO84SgFkS2Bk2iNdCMqIx80CpyiD8IWAcyRu5d6hh82PlgyxU80T/4nbLbIn0GLubPTTeUX8GC3VxRU=,iv:DnmvbhlygSHes0jAkIm4+WXMUQLzr4R4dNa33rO67v8=,tag:+2wlh+/ekiTyShWM4XBbUw==,type:str]
@@ -269,14 +242,6 @@ nextcloud:
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:tMahvC9OLW4+AGLyx68SNsOPBezApw==,iv:WHx8ruuQ33J/8XtwyhvDy2cKqE7lAWvj/r5AUhdyssU=,tag:uRwheXUxqNSIhcPqGeMNog==,type:comment]
ezbookkeeping:
oidc:
secret: ENC[AES256_GCM,data:ZMIfRwXDT1ujGKoc7DGvc8/O+ciB+kajo9yOwVsMsbEjl6D8gl6I0Lbsta8=,iv:++p1TTW6gDUEvh56SjMgldrpob/VWNtiYGo6wNS8cz0=,tag:LQaW333UskiN4mtIjUAguA==,type:str]
hash: ENC[AES256_GCM,data:XyB1N3MUzBHWHAumat7/ASy/Aja/gLKmeTriOqLnMgZ9lBE1birYtFW+R0wZ+vyx79tHKVnRxzrWsxoD5jitCmHyMVrJmJKl5c4SYMhytKfBPgrNe3twcc06U+wONmgAuVpaEQlnnyzAz42SpOHbT55GegHjYzT5hXax8eRvdM6xJSY=,iv:R4+EdQuKo2JumY3cu8KPpeFezcLhlehXBxr2wVG5wHk=,tag:hpDX1x9NCCutUsnDKEf1Sg==,type:str]
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:Fsqc2JDp9dvfgiCjdQ==,iv:3DALKKEXaP8hzXRvxD4CgfFpOiPPsOa16OB94n8WKp8=,tag:K+FF3zGrc0YLXWK/R2L3Ow==,type:comment]
sure:
session_secret: ENC[AES256_GCM,data:InHsz/jld8E9TwI8MWpxk9x2I7dxlIsY9R6jtDK2pBA=,iv:HY5yXEC2Dce26e9/vXTIWELvVd9ZjhcCwFD0jhz5pPw=,tag:LLSJovZ0RH3CUK+se7R4Ag==,type:str]
@@ -286,23 +251,6 @@ sure:
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:NkvAsD10P7qUvGPXeTY+rQ==,iv:GjsUk3Ht6RYW/rhkRhMSFEmtsAiS+dK7niYDJVBj2iE=,tag:8KnDcuRTm7P76Kh2hmWeXw==,type:comment]
wikijs:
il: ENC[AES256_GCM,data:gsAEHk4MI75EXIiqdb05RYSmlxaQ7mlYXTwTYYVJ20KC397T6xbHzvNojlI=,iv:iYc+BahiJ50LSr35/T1VCQsxsRen5rKLwQhfVQMkdz4=,tag:rscWcLWyTaSR4KEPJaes2A==,type:str]
oidc:
secret: ENC[AES256_GCM,data:+bmvyUkiQ+vnaJW7wgjohv4wdvliqx8whdSM8iBUJXGFy/QOs2oJm4FoUcA=,iv:U07y/+87zbXQ2hQ4HvzKcEH5nQsaSIF1Oh3yv6/ytWU=,tag:knGwjGhH5D/OSvW6j5S0VQ==,type:str]
hash: ENC[AES256_GCM,data:7jKBt9mdfxKDU6vBIP6k/wj0gIsRnLwwSrLOlnbbiNZVmbZXqv/UxEsLxCyx1rP2mzGgaxNCBh6WOo7mbSMPezMiuf/enrNrmIwpcP2R0H6LxGTiLFk/7EZ493oy7qFmmsM2qM7Y6qhhKUygD4XbJfVZ2sdojjIGAWy6XdpbbQICb5I=,iv:N3gPga+iDYUF0uAx671DP+4c7FYUKP12MEbYmKZRPAI=,tag:7tKwhxk5yQ0KfZrg0+v/rw==,type:str]
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:rf52AKZDCNq9PVnAMnDXzw==,iv:+rT8sgcAz0LoeUcPgIrpSw/JWvk5agunnTkaWac16kU=,tag:SCyTu1rUNnmS2EFMeIvlCw==,type:comment]
trilium:
oidc:
secret: ENC[AES256_GCM,data:EfKdxk/OBgQyGVwOnxMFS/HhucL5qicaB7HfWu4yNvmrqxU+ubkT62zJewQ=,iv:Ye4gNbyOuEaujGfxXYKg4GWDOP+cnTNL230t8B98WUY=,tag:B1YoabR7y8OVUKYj/aiSPA==,type:str]
hash: ENC[AES256_GCM,data:QyU+leT28FY3nW+tIbnap2n52xw1bcb77ziFf6cW9gdwwhL6rJCEaTGQritpVsCH5C9ytxlV0Acn7dJbnYSHFtZ2jbuvYMSQR4ewtY+tFX1MdD9+FmtH8umb7PHbG6upXgrXRNRIglJ4U1BEfg0xkdzEPbJq+r13A1+cKESrewayae4=,iv:CUE6YjDzgoc017e8+dT1S956PwmOlb7h6dhnOpCr3iw=,tag:XGgpzuVZXJ8Axb4ib8anVQ==,type:str]
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
#
#
#ENC[AES256_GCM,data:T4Wtn49AAxPd2QUFTR+q,iv:bH5goGWBDqumAat9dUv2OwfCUJUpuVqncTMqMBZUXhI=,tag:G+W6hHA+yftQ+4RJpXrxHg==,type:comment]
switch:
password: ENC[AES256_GCM,data:qu0f9L7A0eFq/UCpaRs=,iv:W8LLOp3MSfd/+EfNEZNf91K8GgI5eUfVPoWTRES2C0Y=,tag:Q5FlAOfwqwJwPvd7k6i+0g==,type:str]
@@ -332,7 +280,7 @@ sops:
UmliaFNxVTBqRkI1QWJpWGpTRWxETW8KEY/8AfU73UOzCGhny1cNnd5dCNv7bHXt
k+uyWPPi+enFkVaceSwMFrA66uaWWrwAj11sXEB7yzvGFPrnAGezjQ==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2026-05-09T12:29:30Z"
mac: ENC[AES256_GCM,data:ql3rWwdwJRn2nH0SLnjTaJK4NVemxG8T814VEDaHv38bc7A3aaMGuZ92mHY4z+5oNA+DpR/UjkGJ/NrckbURxY63BEcyVCsS4Rb95HTKjDOjf2g5rrohdgI3ZUE1jvlyf3tAh2ZYh1J8QddLKyLju/J43KcB+XRQKhJv4kubAQ0=,iv:4inRbBMuhB7Hzi8fGpqyC3juUqteZGLXX0GtnHusF7Y=,tag:ZxJ6iv8NxJr4rvCInml8dg==,type:str]
lastmodified: "2026-05-09T14:26:51Z"
mac: ENC[AES256_GCM,data:TYs08ZSS2kcO5lYuhQ/IySUSQ3DpL+ba3/uNLyszht4OttR110/W/WQLiRuu/Ql6FwtDtjq6I3iNpOhmCHSv1kMCam1l99GEIYCaPUIY+TY3Zw0j7518dFXe8p/DrKRwIVXfK5lIKLIEd+eizD50HzwXXJFmU+7YDkQ1Dx+55kw=,iv:arJKJ4wO4sdQlu3GZbtultsfM6s8vbhG93tnf2EjJDc=,tag:m95gUqvn4w85XI8qVvCZpQ==,type:str]
unencrypted_suffix: _unencrypted
version: 3.12.1
@@ -19,7 +19,7 @@ Volume=%h/containers/affine/config:/root/.affine/config
Volume=%h/containers/affine/ssl:/etc/ssl/affine:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
## OIDC callback URIs
Environment="AFFINE_SERVER_HOST={{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
Environment="AFFINE_SERVER_EXTERNAL_URL=https://{{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
@@ -11,7 +11,7 @@ HostName=collabora
PublishPort={{ services['collabora']['ports']['http'] }}:9980/tcp
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="aliasgroup1=https://{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}"
# Environment="aliasgroup2=other_server_FQDN"
Environment="extra_params=--o:ssl.enable=false --o:ssl.termination=true --o:server_name={{ services['collabora']['domain']['public'] }}.{{ domain['public'] }} --o:admin_console.enable=false"
@@ -19,7 +19,7 @@ Volume=%h/data/containers/gitea:/data:rw
Volume=%h/containers/gitea/ssl:/etc/ssl/gitea:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="GITEA__server__DISABLE_SSH=true"
# Database
Environment="GITEA__database__DB_TYPE=postgres"
@@ -21,7 +21,7 @@ PodmanArgs=--group-add keep-groups
Volume=%h/containers/immich/ml/cache:/cache:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
[Service]
Restart=always
@@ -24,7 +24,7 @@ Volume=%h/data/containers/immich:/data:rw
Volume=%h/containers/immich/ssl:/etc/ssl/immich:ro
# Environment
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# The new environment from version 2.7.0 to enable CSP
Environment="IMMICH_HELMET_FILE=true"
@@ -14,7 +14,7 @@ PublishPort={{ services[manticore_service]['ports']['manticore'] }}:9308
Volume=%h/data/containers/manticore/{{ manticore_service }}:/var/lib/manticore:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
[Service]
Restart=always
@@ -17,7 +17,7 @@ Volume=%h/containers/nextcloud/ini/upload.ini:/usr/local/etc/php/conf.d/upload.i
Volume=%h/data/containers/nextcloud/html:/var/www/html:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# PostgreSQL
Environment="PGSSLMODE=verify-full"
Environment="PGSSLROOTCERT=/etc/ssl/nextcloud/{{ root_cert_filename }}"
@@ -0,0 +1,13 @@
FROM ghcr.io/paperless-ngx/paperless-ngx:{{ version['containers']['paperless'] }}
USER root
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl ca-certificates \
&& curl -fsSL https://raw.githubusercontent.com/tesseract-ocr/tessdata_best/main/kor.traineddata \
-o /usr/share/tesseract-ocr/5/tessdata/kor.traineddata \
&& curl -fsSL https://raw.githubusercontent.com/tesseract-ocr/tessdata_best/main/eng.traineddata \
-o /usr/share/tesseract-ocr/5/tessdata/eng.traineddata \
&& rm -rf /var/lib/apt/lists/*
USER paperless
@@ -8,7 +8,7 @@ After=redis_paperless.service
Wants=redis_paperless.service
[Container]
Image=ghcr.io/paperless-ngx/paperless-ngx:{{ version['containers']['paperless'] }}
Image={{ domain['internal'] }}/{{ node['name'] }}/paperless-ngx:{{ version['containers']['paperless'] }}
ContainerName=paperless
HostName=paperless
PublishPort={{ services['paperless']['ports']['http'] }}:8000/tcp
@@ -20,8 +20,8 @@ Volume=%h/data/containers/paperless/consume:/usr/src/paperless/consume:rw
Volume=%h/containers/paperless/ssl:/etc/ssl/paperless:ro
# General
Environment="TZ=Asia/Seoul"
Environment="PAPERLESS_TIME_ZONE=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="PAPERLESS_TIME_ZONE={{ timezone }}"
Environment="PAPERLESS_URL=https://{{ services['paperless']['domain']['public'] }}.{{ domain['public'] }}"
Environment="PAPERLESS_OCR_LANGUAGE=kor+eng"
Environment="PAPERLESS_OCR_LANGUAGES=kor"
@@ -20,7 +20,7 @@ Volume=%h/containers/redis/{{ redis_service }}/redis.conf:/usr/local/etc/redis/r
Exec=redis-server /usr/local/etc/redis/redis.conf
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
[Service]
Restart=always
@@ -18,7 +18,7 @@ Volume=%h/data/containers/sure/storage:/rails/storage:rw
Volume=%h/containers/sure/ssl:/etc/ssl/sure:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="SELF_HOSTED=true"
Environment="ONBOARDING_STATE=closed"
Environment="RAILS_FORCE_SSL=false"
@@ -18,7 +18,7 @@ Volume=%h/containers/sure/ssl:/etc/ssl/sure:ro
Exec=bundle exec sidekiq
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="SELF_HOSTED=true"
Environment="ONBOARDING_STATE=closed"
Environment="RAILS_FORCE_SSL=false"
@@ -18,7 +18,7 @@ PublishPort={{ services['vaultwarden']['ports']['http'] }}:80/tcp
Volume=%h/data/containers/vaultwarden:/data:rw
Volume=%h/containers/vaultwarden/ssl:/etc/ssl/vaultwarden:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="DOMAIN=https://{{ services['vaultwarden']['domain']['public'] }}.{{ domain['public'] }}"
Environment="SIGNUPS_ALLOWED=false"
Secret=VW_ADMIN_TOKEN,type=env,target=ADMIN_TOKEN
@@ -22,7 +22,7 @@ Volume=%h/containers/authelia/config:/config:rw
Volume=%h/containers/authelia/certs:/etc/ssl/authelia:ro
# Default
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Enable Go template engine
# !CAUTION!
{% raw %}# If this environment were enabled, you would have to use {{/* ... /*}} for {{ go_filter }} options. Go engine always processes its own grammar first.
@@ -93,25 +93,6 @@ notifier:
identity_providers:
oidc:
hmac_secret: '' # $AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE
claims_policies:
# trilium expects name/email value in id token, but authelia doesn't send it basically
trilium:
id_token:
- email
- email_verified
- preferred_username
- name
# For the app which doesn't use secret.
cors:
endpoints:
- 'authorization'
- 'token'
- 'revocation'
- 'introspection'
- 'userinfo'
allowed_origins:
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}'
allowed_origins_from_client_redirect_uris: true
jwks:{% raw %}
- algorithm: 'RS256'
use: 'sig'
@@ -192,28 +173,6 @@ identity_providers:
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_post'
# https://www.authelia.com/integration/openid-connect/clients/actual-budget/
- client_id: 'actual-budget'
client_name: 'Actual Budget'
client_secret: '{{ hostvars['console']['actualbudget']['oidc']['hash'] }}'
public: false
authorization_policy: 'one_factor'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
- 'https://{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}/openid/callback'
scopes:
- 'openid'
- 'profile'
- 'groups'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
# https://www.authelia.com/integration/openid-connect/clients/paperless/
- client_id: 'paperless'
client_name: 'Paperless'
@@ -236,122 +195,6 @@ identity_providers:
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_post'
# https://www.authelia.com/integration/openid-connect/clients/vikunja/
- client_id: 'vikunja'
client_name: 'Vikunja'
client_secret: '{{ hostvars['console']['vikunja']['oidc']['hash'] }}'
public: false
authorization_policy: 'one_factor'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
- 'https://{{ services['vikunja']['domain']['public'] }}.{{ domain['public'] }}/auth/openid/authelia'
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
# OpenCloud configuration
## https://docs.opencloud.eu/docs/admin/configuration/authentication-and-user-management/external-idp/
## Web
- client_id: 'opencloud'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}/'
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}/oidc-callback.html'
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}/oidc-silent-redirect.html'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
## desktop
- client_id: 'OpenCloudDesktop'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'http://localhost'
- 'http://127.0.0.1'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
- 'offline_access'
response_types:
- 'code'
grant_types:
- 'authorization_code'
- 'refresh_token'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
## Android
- client_id: 'OpenCloudAndroid'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'oc://android.opencloud.eu'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
- 'offline_access'
response_types:
- 'code'
grant_types:
- 'authorization_code'
- 'refresh_token'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
## IOS
- client_id: 'OpenCloudIOS'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'oc://ios.opencloud.eu'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
- 'offline_access'
response_types:
- 'code'
grant_types:
- 'authorization_code'
- 'refresh_token'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
# https://docs.affine.pro/self-host-affine/administer/oauth-2-0
- client_id: 'affine'
client_name: 'Affine'
@@ -395,27 +238,6 @@ identity_providers:
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_post'
# https://www.authelia.com/integration/openid-connect/clients/ezbookkeeping/
- client_id: 'ezbookkeeping'
client_name: 'ezBookkeeping'
client_secret: '{{ hostvars['console']['ezbookkeeping']['oidc']['hash'] }}'
public: false
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'https://{{ services['ezbookkeeping']['domain']['public'] }}.{{ domain['public'] }}/oauth2/callback'
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
# https://www.authelia.com/integration/openid-connect/clients/sure/
- client_id: 'sure'
client_name: 'Sure'
@@ -438,49 +260,3 @@ identity_providers:
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
# https://www.authelia.com/integration/openid-connect/clients/wikijs/
- client_id: 'wikijs'
client_name: 'Wiki'
client_secret: '{{ hostvars['console']['wikijs']['oidc']['hash'] }}'
public: false
authorization_policy: 'one_factor'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
# add Callback URL / Redirect URI HERE
- 'https://{{ services['wikijs']['domain']['public'] }}.{{ domain['public'] }}/login/aa72242e-7058-4cfa-9504-19a4208062ea/callback' # Note this must be copied during step 7 of the Application configuration.
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_post'
# https://www.authelia.com/integration/openid-connect/clients/trillium/
# The name is trilium, not trillium
- client_id: 'trilium'
client_name: 'Trilium Notes'
client_secret: '{{ hostvars['console']['trilium']['oidc']['hash'] }}'
public: false
authorization_policy: 'one_factor'
# claims policy above
claims_policy: 'trilium'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
- 'https://{{ services['trilium']['domain']['public'] }}.{{ domain['public'] }}/callback'
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
@@ -33,7 +33,7 @@ Volume=%h/containers/caddy/data:/data:rw
Volume=/var/log/caddy:/log:rw
{% endif %}
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Secret=CADDY_ACME_KEY,target=/run/secrets/CADDY_ACME_KEY
{% if node['name'] == 'auth' %}
@@ -47,30 +47,12 @@
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['actualbudget']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['actualbudget']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['paperless']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['paperless']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['vikunja']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['vikunja']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['opencloud']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['opencloud']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['affine']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['affine']['ports']['http'] }} {
@@ -89,27 +71,9 @@
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['ezbookkeeping']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['ezbookkeeping']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['sure']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['sure']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['wikijs']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['wikijs']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
{{ services['trilium']['domain']['internal'] }}.{{ domain['internal'] }} {
import private_tls
reverse_proxy host.containers.internal:{{ services['trilium']['ports']['http'] }} {
header_up Host {http.request.header.X-Forwarded-Host}
}
}
@@ -91,15 +91,6 @@
}
}
}
{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
crowdsec
reverse_proxy https://{{ services['actualbudget']['domain']['internal'] }}.{{ domain['internal'] }} {
header_up Host {http.reverse_proxy.upstream.host}
}
}
}
{{ services['paperless']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
@@ -109,24 +100,6 @@
}
}
}
{{ services['vikunja']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
crowdsec
reverse_proxy https://{{ services['vikunja']['domain']['internal'] }}.{{ domain['internal'] }} {
header_up Host {http.reverse_proxy.upstream.host}
}
}
}
{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
crowdsec
reverse_proxy https://{{ services['opencloud']['domain']['internal'] }}.{{ domain['internal'] }} {
header_up Host {http.reverse_proxy.upstream.host}
}
}
}
{{ services['affine']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
@@ -154,15 +127,6 @@
}
}
}
{{ services['ezbookkeeping']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
crowdsec
reverse_proxy https://{{services['ezbookkeeping']['domain']['internal'] }}.{{ domain['internal'] }} {
header_up Host {http.reverse_proxy.upstream.host}
}
}
}
{{ services['sure']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
@@ -172,24 +136,6 @@
}
}
}
{{ services['wikijs']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
crowdsec
reverse_proxy https://{{services['wikijs']['domain']['internal'] }}.{{ domain['internal'] }} {
header_up Host {http.reverse_proxy.upstream.host}
}
}
}
{{ services['trilium']['domain']['public'] }}.{{ domain['public'] }} {
import crowdsec_log
route {
crowdsec
reverse_proxy https://{{services['trilium']['domain']['internal'] }}.{{ domain['internal'] }} {
header_up Host {http.reverse_proxy.upstream.host}
}
}
}
# Internal domain
{{ node['name'] }}.{{ domain['internal'] }} {
@@ -21,7 +21,7 @@ Volume=%h/containers/ca/config:/home/step/config:rw
Volume=%h/containers/ca/db:/home/step/db:rw
Volume=%h/containers/ca/templates:/home/step/templates:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Since 0.30.0, Docker CMD no longer expands PWDPATH.
#Environment="PWDPATH=/run/secrets/STEP_CA_PASSWORD"
@@ -24,7 +24,7 @@ Volume=%h/containers/grafana/data:/var/lib/grafana:rw
Volume=%h/containers/grafana/etc:/etc/grafana:ro
Volume=%h/containers/grafana/ssl:/etc/ssl/grafana:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="GF_PATHS_CONFIG=/etc/grafana/grafana.ini"
# plugin
# Environment="GF_INSTALL_PLUGINS=grafana-clock-panel,grafana-simple-json-datasource"
@@ -24,7 +24,7 @@ Volume=%h/containers/ldap/data:/data:rw
Volume=%h/containers/ldap/ssl:/etc/ssl/ldap:ro
# Default
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Domain
Environment="LLDAP_LDAP_BASE_DN={{ domain['dc'] }}"
@@ -19,7 +19,7 @@ Volume=%h/containers/loki/data:/loki:rw
Volume=%h/containers/loki/etc:/etc/loki:ro
Volume=%h/containers/loki/ssl:/etc/ssl/loki:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Exec=--config.file=/etc/loki/loki.yaml
@@ -21,7 +21,7 @@ Volume=%h/containers/postgresql/ssl:/etc/ssl/postgresql:ro
Volume=%h/containers/postgresql/init:/docker-entrypoint-initdb.d/:ro
Volume=%h/containers/postgresql/backups:/backups:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# This option is only for init process, after init custom config file `pg_hba.conf` will control this option.
Environment="POSTGRES_HOST_AUTH_METHOD=trust"
@@ -19,7 +19,7 @@ Volume=%h/containers/prometheus/data:/prometheus:rw
Volume=%h/containers/prometheus/etc:/etc/prometheus:ro
Volume=%h/containers/prometheus/ssl:/etc/ssl/prometheus:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Exec=--config.file=/etc/prometheus/prometheus.yaml \
--web.config.file=/etc/prometheus/web-config.yaml \
@@ -12,12 +12,10 @@ whitelist:
- "{{ hostvars['fw']['network6']['console']['wg'] }}"
{% if node['name'] == 'auth' %}
expression:
# budget local-first sql scrap rule
- "evt.Meta.target_fqdn == '{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status in ['200', '304'] && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains '/data/migrations/'"
# immich thumbnail request 404 error false positive
- "evt.Meta.target_fqdn == '{{ services['immich']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status == '404' && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains '/api/assets/' && evt.Meta.http_path contains '/thumbnail'"
# opencloud chunk request false positive
- "evt.Meta.target_fqdn == '{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status in ['200', '304'] && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains '/js/chunks/'"
# nextcloud thumbnail/preview request error false positive
- "evt.Meta.target_fqdn == '{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status == '404' && evt.Meta.http_verb == 'GET' && evt.Meta.http_path startsWith '/index.php/core/preview?'"
# nextcloud chunks.mjs request false positive
- "evt.Meta.target_fqdn == '{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status in ['200', '304'] && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains 'chunk.mjs'"
{% endif %}
View File
@@ -13,7 +13,7 @@ PublishPort={{ services['actualbudget']['ports']['http'] }}:5006
Volume=%h/data/containers/actual-budget:/data:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="ACTUAL_OPENID_DISCOVERY_URL=https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}/.well-known/openid-configuration"
Environment="ACTUAL_OPENID_CLIENT_ID=actual-budget"
Environment="ACTUAL_OPENID_SERVER_HOSTNAME=https://{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}"
@@ -0,0 +1,26 @@
---
identity_providers:
oidc:
clients:
# https://www.authelia.com/integration/openid-connect/clients/actual-budget/
- client_id: 'actual-budget'
client_name: 'Actual Budget'
client_secret: 'secret'
public: false
authorization_policy: 'one_factor'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
- 'https://actualbudget.example.com/openid/callback'
scopes:
- 'openid'
- 'profile'
- 'groups'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
@@ -0,0 +1,6 @@
name: crowdsecurity/whitelists
description: "Local whitelist policy"
whitelist:
expression:
# budget local-first sql scrap rule
- "evt.Meta.target_fqdn == '{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status in ['200', '304'] && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains '/data/migrations/'"
@@ -0,0 +1,13 @@
---
services:
actualbudget:
domain:
public: ""
internal: ""
ports:
http: ""
subuid: "101000"
version:
containers:
actualbudget: "26.3.0"
@@ -0,0 +1,5 @@
---
actualbudget:
oidc:
secret: ""
hash: ""
@@ -0,0 +1,25 @@
---
identity_providers:
oidc:
clients:
# https://www.authelia.com/integration/openid-connect/clients/ezbookkeeping/
- client_id: 'ezbookkeeping'
client_name: 'ezBookkeeping'
client_secret: 'hash'
public: false
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'https://ezbookkeeping.example.com/oauth2/callback'
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
@@ -18,7 +18,7 @@ Volume=%h/data/containers/ezbookkeeping/data:/data:rw
Volume=%h/containers/ezbookkeeping/ssl:/etc/ssl/ezbookkeeping:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="EBK_SERVER_DOMAIN={{ services['ezbookkeeping']['domain']['public'] }}.{{ domain['public'] }}"
Environment="EBK_SERVER_ROOT_URL=https://{{ services['ezbookkeeping']['domain']['public'] }}.{{ domain['public'] }}/"
Environment="EBK_LOG_MODE=console"
@@ -58,4 +58,4 @@ RestartSec=10s
TimeoutStopSec=120
[Install]
WantedBy=default.target
WantedBy=default.target
@@ -0,0 +1,13 @@
---
services:
ezbookkeeping:
domain:
public: ""
internal: ""
ports:
http: ""
subuid: "100999"
version:
containers:
ezbookkeeping: "1.4.0"
@@ -0,0 +1,8 @@
---
postgresql:
password:
ezbookkeeping: ""
ezbookkeeping:
oidc:
secret: ""
hash: ""
@@ -0,0 +1,110 @@
---
identity_providers:
oidc:
# For the app which doesn't use secret.
cors:
endpoints:
- 'authorization'
- 'token'
- 'revocation'
- 'introspection'
- 'userinfo'
allowed_origins:
- 'https://opencloud.example.com'
allowed_origins_from_client_redirect_uris: true
clients:
# OpenCloud configuration
## https://docs.opencloud.eu/docs/admin/configuration/authentication-and-user-management/external-idp/
## Web
- client_id: 'opencloud'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'https://opencloud.example.com/'
- 'https://opencloud.example.com/oidc-callback.html'
- 'https://opencloud.example.com/oidc-silent-redirect.html'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
## desktop
- client_id: 'OpenCloudDesktop'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'http://localhost'
- 'http://127.0.0.1'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
- 'offline_access'
response_types:
- 'code'
grant_types:
- 'authorization_code'
- 'refresh_token'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
## Android
- client_id: 'OpenCloudAndroid'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'oc://android.opencloud.eu'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
- 'offline_access'
response_types:
- 'code'
grant_types:
- 'authorization_code'
- 'refresh_token'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
## IOS
- client_id: 'OpenCloudIOS'
client_name: 'OpenCloud'
public: true
authorization_policy: 'one_factor'
require_pkce: true
pkce_challenge_method: 'S256'
redirect_uris:
- 'oc://ios.opencloud.eu'
scopes:
- 'openid'
- 'profile'
- 'email'
- 'groups'
- 'offline_access'
response_types:
- 'code'
grant_types:
- 'authorization_code'
- 'refresh_token'
access_token_signed_response_alg: 'RS256'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'none'
@@ -0,0 +1,6 @@
name: crowdsecurity/whitelists
description: "Local whitelist policy"
whitelist:
expression:
# opencloud chunk request false positive
- "evt.Meta.target_fqdn == '{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status in ['200', '304'] && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains '/js/chunks/'"
@@ -35,4 +35,4 @@ directives:
- '''unsafe-inline'''
worker-src:
- '''self'''
- 'blob:'
- 'blob:'
@@ -0,0 +1,13 @@
---
services:
opencloud:
domain:
public: ""
internal: ""
ports:
http: ""
subuid: "100999"
version:
containers:
opencloud: "4.0.6"
@@ -15,7 +15,7 @@ Volume=%h/containers/opencloud:/etc/opencloud:rw
Volume=%h/data/containers/opencloud:/var/lib/opencloud:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Log level info
Environment="OC_LOG_LEVEL=info"
# TLS configuration
@@ -0,0 +1,3 @@
---
opencloud:
admin: ""
@@ -0,0 +1,36 @@
---
identity_providers:
oidc:
claims_policies:
# trilium expects name/email value in id token, but authelia doesn't send it basically
trilium:
id_token:
- email
- email_verified
- preferred_username
- name
clients:
# https://www.authelia.com/integration/openid-connect/clients/trillium/
# The name is trilium, not trillium
- client_id: 'trilium'
client_name: 'Trilium Notes'
client_secret: 'hash'
public: false
authorization_policy: 'one_factor'
# claims policy above
claims_policy: 'trilium'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
- 'https://trilium.example.com/callback'
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
@@ -0,0 +1,13 @@
---
services:
trilium:
domain:
public: ""
internal: ""
ports:
http: ""
subuid: "100999"
version:
containers:
trilium: "v0.102.2"
@@ -0,0 +1,6 @@
---
trilium:
admin: ""
oidc:
secret: ""
hash: ""
@@ -18,7 +18,7 @@ PublishPort={{ services['trilium']['ports']['http'] }}:8080/tcp
Volume=%h/data/containers/trilium/data:/home/node/trilium-data:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="TRILIUM_DATA_DIR=/home/node/trilium-data"
Environment="TRILIUM_NO_UPLOAD_LIMIT=true"
@@ -0,0 +1,25 @@
---
identity_providers:
oidc:
clients:
# https://www.authelia.com/integration/openid-connect/clients/vikunja/
- client_id: 'vikunja'
client_name: 'Vikunja'
client_secret: 'hash'
public: false
authorization_policy: 'one_factor'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
- 'https://vikunja.example.com/auth/openid/authelia'
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_basic'
@@ -0,0 +1,13 @@
---
services:
vikunja:
domain:
public: ""
internal: ""
ports:
http: ""
subuid: "100999"
version:
containers:
vikunja: "2.2.2"
@@ -0,0 +1,9 @@
---
postgresql:
password:
vikunja: ""
vikunja:
session_secret: ""
oidc:
secret: ""
hash: ""
@@ -18,9 +18,9 @@ Volume=%h/data/containers/vikunja:/app/vikunja/files:rw
Volume=%h/containers/vikunja/ssl:/etc/ssl/vikunja:ro
# General
Environment="TZ=Asia/Seoul"
Environment="VIKUNJA_DEFAULTSETTINGS_TIMEZONE=Asia/Seoul"
Environment="VIKUNJA_SERVICE_TIMEZONE=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="VIKUNJA_DEFAULTSETTINGS_TIMEZONE={{ timezone }}"
Environment="VIKUNJA_SERVICE_TIMEZONE={{ timezone }}"
Environment="VIKUNJA_SERVICE_PUBLICURL=https://{{ services['vikunja']['domain']['public'] }}.{{ domain['public'] }}"
Environment="VIKUNJA_SERVICE_ENABLEREGISTRATION=false"
Secret=VIKUNJA_SERVICE_JWTSECRET,type=env
@@ -0,0 +1,26 @@
---
identity_providers:
oidc:
clients:
# https://www.authelia.com/integration/openid-connect/clients/wikijs/
- client_id: 'wikijs'
client_name: 'Wiki'
client_secret: 'hash'
public: false
authorization_policy: 'one_factor'
require_pkce: false
pkce_challenge_method: ''
redirect_uris:
# add Callback URL / Redirect URI HERE
- 'https://wikijs.example.com/login/$UUID/callback' # Note this must be copied during step 7 of the Application configuration.
scopes:
- 'openid'
- 'profile'
- 'email'
response_types:
- 'code'
grant_types:
- 'authorization_code'
access_token_signed_response_alg: 'none'
userinfo_signed_response_alg: 'none'
token_endpoint_auth_method: 'client_secret_post'
@@ -0,0 +1,13 @@
---
services:
wikijs:
domain:
public: ""
internal: ""
ports:
http: ""
subuid: "100999"
version:
containers:
wikijs: "2.5.314"
@@ -0,0 +1,9 @@
---
postgresql:
password:
wikijs: ""
wikijs:
admin: ""
oidc:
secret: ""
hash: ""
@@ -19,7 +19,7 @@ Volume=%h/data/containers/wikijs/export:/wiki/export:rw
Volume=%h/containers/wikijs/ssl:/etc/ssl/wiki:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Database
Environment="DB_TYPE=postgres"

Some files were not shown because too many files have changed in this diff Show More