Compare commits

..

12 Commits

Author SHA1 Message Date
il a7e2320b21 chore(script): archive a extract_secret.sh script
archived stack: extract_secret.sh
2026-05-15 09:19:59 +09:00
il 24c83029e9 refactor(playbook): update convention and remove deprecated tag
update notes:
- remove tags 'update', 'upgrade' from convention.yaml
- remove tags 'update' from playbooks/app/site.yaml
2026-05-15 09:04:51 +09:00
il ac64b3c04e docs(readme): add RPO on readme 2026-05-13 17:12:59 +09:00
il 26d696f813 refactor(all): update hardcoded internal domain to ansible variable 2026-05-12 08:08:04 +09:00
il 1096981ef2 feat(paperless): change paperless OCR engine model from tesseract_fast to tesseract_best 2026-05-12 08:00:37 +09:00
il e1936b494d fix(crowdsec): update whitelist.yaml to prevent false positive
false positive:
- nextcloud chunk problem (crowdsecurity/http-crawl-non_statics)
- change expression 'chunks.mjs' to 'chunk.mjs'
2026-05-11 19:40:50 +09:00
il 0afc841b69 chore(chromium): archive a removed stack from console
archived stack: chromium
2026-05-11 19:37:25 +09:00
il a39122eb4b fix(crowdsec): update whitelist.yaml to prevent false positive
false positive:
- nextcloud chunk problem (crowdsecurity/http-crawl-non_statics)
2026-05-11 19:34:22 +09:00
il 0f4da0bb53 feat(backup): add archiving of runtime binary packages 2026-05-11 01:37:15 +09:00
il 1dd1c53e2a feat(backup): add archiving of deployed container images 2026-05-11 00:52:28 +09:00
il 530407c162 refactor(all): update hardcoded timezone 'Asia/Seoul' to ansible variable 'timezone' 2026-05-10 18:44:28 +09:00
il 11ab2f5205 fix(sure): correct task name and subuid variable reference 2026-05-10 14:39:54 +09:00
64 changed files with 552 additions and 82 deletions
+2
View File
@@ -2,6 +2,8 @@
data/bin/*
data/volumes/*
data/images/*
!data/images/containers
data/images/containers/*
docs/archives/textfiles/
docs/notes/*
*.sql
+10 -1
View File
@@ -2,7 +2,16 @@
This homelab project implements single-node On-premise IaaS system. The homelab contains virtual machines which are divided by their roles, such as private firewall, DNS, PKI, LDAP and database, SSO(OIDC). The standard domain is used to implement this system without specific vendors. All components are defined as code and initiated by IaC (Ansible) except hypervisor initial configuration.
## RTO times
## RTO and RPO
### RPO
- Each backup guarantees 24 hours RPO
- DB dumps are backed up at 12:00 AM
- Stateful data in app vm is backed up at 03:00 AM
- The maximum inconsistency window between DB dumps and stateful data can be 27 hours.
- The different backup time.
### RTO
- Feb/25/2026 - Reprovisioning Hypervisor and vms
- RTO: 1 hour 30 min - verified
- Manual install and set vmm: 20 min
+1 -2
View File
@@ -33,7 +33,6 @@
tags:
- "always"
- "init"
- "upgrade"
- "update"
- "[service_name]"
# when: "'tags' is not in ansible_run_tags"
+1
View File
@@ -1,6 +1,7 @@
---
# Global vars
ansible_ssh_private_key_file: "/etc/secrets/{{ hostvars['console']['node']['uid'] }}/id_console"
timezone: "Asia/Seoul"
# CA
root_cert_filename: "ilnmors_root_ca.crt"
+4 -4
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
@@ -142,8 +142,8 @@
name: "common"
tasks_from: "services/set_alloy"
apply:
tags: ["init", "update", "alloy"]
tags: ["init", "update", "alloy"]
tags: ["init", "alloy"]
tags: ["init", "alloy"]
- name: Set kopia
ansible.builtin.include_role:
+2 -2
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
+2 -2
View File
@@ -24,9 +24,9 @@
tasks:
# init
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
+4 -4
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
@@ -162,8 +162,8 @@
name: "fw"
tasks_from: "services/set_bind"
apply:
tags: ["init", "update", "bind"]
tags: ["init", "update", "bind"]
tags: ["init", "bind"]
tags: ["init", "bind"]
- name: Set blocky
ansible.builtin.include_role:
+2 -2
View File
@@ -23,9 +23,9 @@
tags: ["always"]
tasks:
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
+2 -2
View File
@@ -30,9 +30,9 @@
tags: ["always"]
tasks:
# init
- name: Set timezone to Asia/Seoul
- name: Set timezone
community.general.timezone:
name: Asia/Seoul
name: "{{ timezone }}"
become: true
tags: ["init", "timezone"]
@@ -161,3 +161,38 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/manticoresearch/manticore:{{ version['containers']['manticore'] }}"
file: "docker.io_manticoresearch_manticore_{{ version['containers']['manticore'] }}"
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "ghcr.io/toeverything/affine:{{ version['containers']['affine'] }}"
file: "ghcr.io_toeverything_affine_{{ version['containers']['affine'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -15,3 +15,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_collabora_code_{{ version['containers']['collabora'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/collabora/code:{{ version['containers']['collabora'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_collabora_code_{{ version['containers']['collabora'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_collabora_code_{{ version['containers']['collabora'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -49,3 +49,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_gitea_gitea_{{ version['containers']['gitea'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/gitea/gitea:{{ version['containers']['gitea'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_gitea_gitea_{{ version['containers']['gitea'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_gitea_gitea_{{ version['containers']['gitea'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -118,3 +118,38 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "ghcr.io/immich-app/immich-machine-learning:{{ version['containers']['immich'] }}-openvino"
file: "ghcr.io_immich-app_immich-machine-learning_{{ version['containers']['immich'] }}-openvino"
- image: "ghcr.io/immich-app/immich-server:{{ version['containers']['immich'] }}"
file: "ghcr.io_immich-app_immich-server_{{ version['containers']['immich'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -174,3 +174,36 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "docker.io/library/nextcloud:{{ version['containers']['nextcloud'] }}"
file: "docker.io_library_nextcloud_{{ version['containers']['nextcloud'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -57,8 +57,16 @@
- "data/containers/paperless/consume"
- "containers/paperless"
- "containers/paperless/ssl"
- "containers/paperless/build"
become: true
- name: Deploy containerfile for build
ansible.builtin.template:
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/paperless/build/paperless.containerfile.j2"
dest: "{{ node['home_path'] }}/containers/paperless/build/Containerfile"
owner: "{{ ansible_user }}"
group: "svadmins"
mode: "0640"
- name: Deploy root certificate
ansible.builtin.copy:
@@ -72,6 +80,18 @@
notify: "notification_restart_paperless"
no_log: true
- name: Build paperless container image
containers.podman.podman_image:
name: "{{ domain['internal'] }}/{{ node['name'] }}/paperless-ngx"
# check tags from container file
tag: "{{ version['containers']['paperless'] }}"
state: "build"
path: "{{ node['home_path'] }}/containers/paperless/build"
- name: Prune paperless dangling images
containers.podman.podman_prune:
image: true
- name: Register secret value to podman secret
containers.podman.podman_secret:
name: "{{ item.name }}"
@@ -122,3 +142,36 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "{{ domain['internal'] }}/{{ node['name'] }}/paperless-ngx:{{ version['containers']['paperless'] }}"
file: "{{ domain['internal'] }}_{{ node['name'] }}_paperless-ngx_{{ version['containers']['paperless'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
+35 -2
View File
@@ -63,7 +63,7 @@
content: |
{{ hostvars['console']['ca']['root']['crt'] }}
dest: "{{ node['home_path'] }}/containers/sure/ssl/{{ root_cert_filename }}"
owner: "{{ services['paperless']['subuid'] }}"
owner: "{{ services['sure']['subuid'] }}"
group: "svadmins"
mode: "0440"
become: true
@@ -98,7 +98,7 @@
- "sure-worker.container"
notify: "notification_restart_sure"
- name: Enable paperless.service
- name: Enable sure.service
ansible.builtin.systemd:
name: "{{ item }}"
state: "started"
@@ -108,3 +108,36 @@
loop:
- "sure-web.service"
- "sure-worker.service"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ item.file }}.tar"
loop:
- image: "docker.io/library/redis:{{ version['containers']['redis'] }}"
file: "docker.io_library_redis_{{ version['containers']['redis'] }}"
- image: "ghcr.io/we-promise/sure:{{ version['containers']['sure'] }}"
file: "ghcr.io_we-promise_sure_{{ version['containers']['sure'] }}"
loop_control:
label: "{{ item.file }}"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ item.item.image }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
format: "oci-archive"
force: false
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
when: not item.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ item.item.file }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
loop: "{{ container_archive_images.results }}"
loop_control:
label: "{{ item.item.file }}"
@@ -55,3 +55,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_vaultwarden_server_{{ version['containers']['vaultwarden'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/vaultwarden/server:{{ version['containers']['vaultwarden'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_vaultwarden_server_{{ version['containers']['vaultwarden'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_vaultwarden_server_{{ version['containers']['vaultwarden'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -76,3 +76,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_authelia_authelia_{{ version['containers']['authelia'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/authelia/authelia:{{ version['containers']['authelia'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_authelia_authelia_{{ version['containers']['authelia'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_authelia_authelia_{{ version['containers']['authelia'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -74,3 +74,10 @@
enabled: true
daemon_reload: true
become: true
- name: Fetch deb bin file
ansible.builtin.fetch:
src: "/var/cache/apt/archives/alloy-{{ version['packages']['alloy'] }}.deb"
dest: "{{ hostvars['console']['node']['data_path'] }}/bin/"
flat: true
become: true
@@ -97,3 +97,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/{{ domain['internal'] }}_{{ node['name'] }}_caddy_{{ version['containers']['caddy'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ domain['internal'] }}/{{ node['name'] }}/caddy:{{ version['containers']['caddy'] }}"
dest: "{{ node['home_path'] }}/archives/containers/{{ domain['internal'] }}_{{ node['name'] }}_caddy_{{ version['containers']['caddy'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/{{ domain['internal'] }}_{{ node['name'] }}_caddy_{{ version['containers']['caddy'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -24,6 +24,17 @@
mode: "0770"
when: node['name'] == "app"
- name: Create container image archive directory
ansible.builtin.file:
path: "{{ item }}"
owner: "{{ ansible_user }}"
group: "svadmins"
state: "directory"
mode: "0700"
loop:
- "{{ node['home_path'] }}/archives"
- "{{ node['home_path'] }}/archives/containers"
- name: Install podman and reset ssh connection for initiating
when: is_podman_installed.rc != 0
become: true
-7
View File
@@ -1,8 +1 @@
---
- name: Register font
ansible.builtin.shell: |
fc-cache -f -v
become: true
changed_when: false
listen: "notification_update_font"
ignore_errors: true # noqa: ignore-errors
+11 -21
View File
@@ -41,7 +41,7 @@
ansible.builtin.get_url:
url: "https://github.com/0xERR0R/blocky/releases/download/v{{ version['packages']['blocky'] }}/\
blocky_v{{ version['packages']['blocky'] }}_Linux_x86_64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-x86_64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
owner: "blocky"
group: "blocky"
mode: "0600"
@@ -52,16 +52,16 @@
ansible.builtin.get_url:
url: "https://github.com/0xERR0R/blocky/releases/download/v{{ version['packages']['blocky'] }}/\
blocky_v{{ version['packages']['blocky'] }}_Linux_arm64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-arm64.tar.gz"
dest: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
owner: "blocky"
group: "blocky"
mode: "0600"
become: true
when: ansible_facts['architecture'] == "aarch64"
- name: Deploy blocky binary file (x86_64)
- name: Deploy blocky binary file
ansible.builtin.unarchive:
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-x86_64.tar.gz"
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
remote_src: true
dest: "/usr/local/bin/"
owner: "root"
@@ -72,23 +72,6 @@
- "--wildcards"
- "blocky"
become: true
when: ansible_facts['architecture'] == "x86_64"
notify: "notification_restart_blocky"
- name: Deploy blocky binary file (aarch64)
ansible.builtin.unarchive:
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}-arm64.tar.gz"
remote_src: true
dest: "/usr/local/bin/"
owner: "root"
group: "root"
mode: "0755"
extra_opts:
- "--strip-components=0"
- "--wildcards"
- "blocky"
become: true
when: ansible_facts['architecture'] == "aarch64"
notify: "notification_restart_blocky"
- name: Deploy blocky config
@@ -141,3 +124,10 @@
enabled: true
daemon_reload: true
become: true
- name: Fetch deb bin file
ansible.builtin.fetch:
src: "/home/blocky/bin/blocky-{{ version['packages']['blocky'] }}.tar.gz"
dest: "{{ hostvars['console']['node']['data_path'] }}/bin/"
flat: true
become: true
@@ -78,3 +78,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_smallstep_step-ca_{{ version['containers']['step'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/smallstep/step-ca:{{ version['containers']['step'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_smallstep_step-ca_{{ version['containers']['step'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_smallstep_step-ca_{{ version['containers']['step'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -83,3 +83,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_grafana_{{ version['containers']['grafana'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/grafana/grafana:{{ version['containers']['grafana'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_grafana_{{ version['containers']['grafana'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_grafana_{{ version['containers']['grafana'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -75,7 +75,7 @@
rm: true
detach: false
env:
TZ: "Asia/Seoul"
TZ: "{{ timezone }}"
LLDAP_LDAP_BASE_DN: "{{ domain['dc'] }}"
secrets:
- "LLDAP_DATABASE_URL,type=env"
@@ -108,3 +108,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_lldap_lldap_{{ version['containers']['ldap'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/lldap/lldap:{{ version['containers']['ldap'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_lldap_lldap_{{ version['containers']['ldap'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_lldap_lldap_{{ version['containers']['ldap'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -64,3 +64,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_loki_{{ version['containers']['loki'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/grafana/loki:{{ version['containers']['loki'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_loki_{{ version['containers']['loki'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_grafana_loki_{{ version['containers']['loki'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -172,3 +172,29 @@
daemon_reload: true
scope: "user"
loop: "{{ connected_services }}"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/\
{{ domain['internal'] }}_{{ node['name'] }}_postgres_\
pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "{{ domain['internal'] }}/{{ node['name'] }}/postgres:pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}"
dest: "{{ node['home_path'] }}/archives/containers/\
{{ domain['internal'] }}_{{ node['name'] }}_postgres_\
pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/\
{{ domain['internal'] }}_{{ node['name'] }}_postgres_\
pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -68,3 +68,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_prom_prometheus_{{ version['containers']['prometheus'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/prom/prometheus:{{ version['containers']['prometheus'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_prom_prometheus_{{ version['containers']['prometheus'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_prom_prometheus_{{ version['containers']['prometheus'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -68,3 +68,23 @@
enabled: true
daemon_reload: true
scope: "user"
- name: Check container archive images
ansible.builtin.stat:
path: "{{ node['home_path'] }}/archives/containers/docker.io_enix_x509-certificate-exporter_{{ version['containers']['x509-exporter'] }}.tar"
register: container_archive_images
- name: Save container archive images
containers.podman.podman_save:
image:
- "docker.io/enix/x509-certificate-exporter:{{ version['containers']['x509-exporter'] }}"
dest: "{{ node['home_path'] }}/archives/containers/docker.io_enix_x509-certificate-exporter_{{ version['containers']['x509-exporter'] }}.tar"
format: "oci-archive"
force: false
when: not container_archive_images.stat.exists
- name: Fetch container archive images
ansible.builtin.fetch:
src: "{{ node['home_path'] }}/archives/containers/docker.io_enix_x509-certificate-exporter_{{ version['containers']['x509-exporter'] }}.tar"
dest: "{{ hostvars['console']['node']['data_path'] }}/images/containers/"
flat: true
@@ -19,7 +19,7 @@ Volume=%h/containers/affine/config:/root/.affine/config
Volume=%h/containers/affine/ssl:/etc/ssl/affine:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
## OIDC callback URIs
Environment="AFFINE_SERVER_HOST={{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
Environment="AFFINE_SERVER_EXTERNAL_URL=https://{{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
@@ -11,7 +11,7 @@ HostName=collabora
PublishPort={{ services['collabora']['ports']['http'] }}:9980/tcp
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="aliasgroup1=https://{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}"
# Environment="aliasgroup2=other_server_FQDN"
Environment="extra_params=--o:ssl.enable=false --o:ssl.termination=true --o:server_name={{ services['collabora']['domain']['public'] }}.{{ domain['public'] }} --o:admin_console.enable=false"
@@ -19,7 +19,7 @@ Volume=%h/data/containers/gitea:/data:rw
Volume=%h/containers/gitea/ssl:/etc/ssl/gitea:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="GITEA__server__DISABLE_SSH=true"
# Database
Environment="GITEA__database__DB_TYPE=postgres"
@@ -21,7 +21,7 @@ PodmanArgs=--group-add keep-groups
Volume=%h/containers/immich/ml/cache:/cache:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
[Service]
Restart=always
@@ -24,7 +24,7 @@ Volume=%h/data/containers/immich:/data:rw
Volume=%h/containers/immich/ssl:/etc/ssl/immich:ro
# Environment
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# The new environment from version 2.7.0 to enable CSP
Environment="IMMICH_HELMET_FILE=true"
@@ -14,7 +14,7 @@ PublishPort={{ services[manticore_service]['ports']['manticore'] }}:9308
Volume=%h/data/containers/manticore/{{ manticore_service }}:/var/lib/manticore:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
[Service]
Restart=always
@@ -17,7 +17,7 @@ Volume=%h/containers/nextcloud/ini/upload.ini:/usr/local/etc/php/conf.d/upload.i
Volume=%h/data/containers/nextcloud/html:/var/www/html:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# PostgreSQL
Environment="PGSSLMODE=verify-full"
Environment="PGSSLROOTCERT=/etc/ssl/nextcloud/{{ root_cert_filename }}"
@@ -0,0 +1,13 @@
FROM ghcr.io/paperless-ngx/paperless-ngx:{{ version['containers']['paperless'] }}
USER root
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl ca-certificates \
&& curl -fsSL https://raw.githubusercontent.com/tesseract-ocr/tessdata_best/main/kor.traineddata \
-o /usr/share/tesseract-ocr/5/tessdata/kor.traineddata \
&& curl -fsSL https://raw.githubusercontent.com/tesseract-ocr/tessdata_best/main/eng.traineddata \
-o /usr/share/tesseract-ocr/5/tessdata/eng.traineddata \
&& rm -rf /var/lib/apt/lists/*
USER paperless
@@ -8,7 +8,7 @@ After=redis_paperless.service
Wants=redis_paperless.service
[Container]
Image=ghcr.io/paperless-ngx/paperless-ngx:{{ version['containers']['paperless'] }}
Image={{ domain['internal'] }}/{{ node['name'] }}/paperless-ngx:{{ version['containers']['paperless'] }}
ContainerName=paperless
HostName=paperless
PublishPort={{ services['paperless']['ports']['http'] }}:8000/tcp
@@ -20,8 +20,8 @@ Volume=%h/data/containers/paperless/consume:/usr/src/paperless/consume:rw
Volume=%h/containers/paperless/ssl:/etc/ssl/paperless:ro
# General
Environment="TZ=Asia/Seoul"
Environment="PAPERLESS_TIME_ZONE=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="PAPERLESS_TIME_ZONE={{ timezone }}"
Environment="PAPERLESS_URL=https://{{ services['paperless']['domain']['public'] }}.{{ domain['public'] }}"
Environment="PAPERLESS_OCR_LANGUAGE=kor+eng"
Environment="PAPERLESS_OCR_LANGUAGES=kor"
@@ -20,7 +20,7 @@ Volume=%h/containers/redis/{{ redis_service }}/redis.conf:/usr/local/etc/redis/r
Exec=redis-server /usr/local/etc/redis/redis.conf
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
[Service]
Restart=always
@@ -18,7 +18,7 @@ Volume=%h/data/containers/sure/storage:/rails/storage:rw
Volume=%h/containers/sure/ssl:/etc/ssl/sure:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="SELF_HOSTED=true"
Environment="ONBOARDING_STATE=closed"
Environment="RAILS_FORCE_SSL=false"
@@ -18,7 +18,7 @@ Volume=%h/containers/sure/ssl:/etc/ssl/sure:ro
Exec=bundle exec sidekiq
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="SELF_HOSTED=true"
Environment="ONBOARDING_STATE=closed"
Environment="RAILS_FORCE_SSL=false"
@@ -18,7 +18,7 @@ PublishPort={{ services['vaultwarden']['ports']['http'] }}:80/tcp
Volume=%h/data/containers/vaultwarden:/data:rw
Volume=%h/containers/vaultwarden/ssl:/etc/ssl/vaultwarden:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="DOMAIN=https://{{ services['vaultwarden']['domain']['public'] }}.{{ domain['public'] }}"
Environment="SIGNUPS_ALLOWED=false"
Secret=VW_ADMIN_TOKEN,type=env,target=ADMIN_TOKEN
@@ -22,7 +22,7 @@ Volume=%h/containers/authelia/config:/config:rw
Volume=%h/containers/authelia/certs:/etc/ssl/authelia:ro
# Default
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Enable Go template engine
# !CAUTION!
{% raw %}# If this environment were enabled, you would have to use {{/* ... /*}} for {{ go_filter }} options. Go engine always processes its own grammar first.
@@ -33,7 +33,7 @@ Volume=%h/containers/caddy/data:/data:rw
Volume=/var/log/caddy:/log:rw
{% endif %}
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Secret=CADDY_ACME_KEY,target=/run/secrets/CADDY_ACME_KEY
{% if node['name'] == 'auth' %}
@@ -21,7 +21,7 @@ Volume=%h/containers/ca/config:/home/step/config:rw
Volume=%h/containers/ca/db:/home/step/db:rw
Volume=%h/containers/ca/templates:/home/step/templates:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Since 0.30.0, Docker CMD no longer expands PWDPATH.
#Environment="PWDPATH=/run/secrets/STEP_CA_PASSWORD"
@@ -24,7 +24,7 @@ Volume=%h/containers/grafana/data:/var/lib/grafana:rw
Volume=%h/containers/grafana/etc:/etc/grafana:ro
Volume=%h/containers/grafana/ssl:/etc/ssl/grafana:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="GF_PATHS_CONFIG=/etc/grafana/grafana.ini"
# plugin
# Environment="GF_INSTALL_PLUGINS=grafana-clock-panel,grafana-simple-json-datasource"
@@ -24,7 +24,7 @@ Volume=%h/containers/ldap/data:/data:rw
Volume=%h/containers/ldap/ssl:/etc/ssl/ldap:ro
# Default
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Domain
Environment="LLDAP_LDAP_BASE_DN={{ domain['dc'] }}"
@@ -19,7 +19,7 @@ Volume=%h/containers/loki/data:/loki:rw
Volume=%h/containers/loki/etc:/etc/loki:ro
Volume=%h/containers/loki/ssl:/etc/ssl/loki:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Exec=--config.file=/etc/loki/loki.yaml
@@ -21,7 +21,7 @@ Volume=%h/containers/postgresql/ssl:/etc/ssl/postgresql:ro
Volume=%h/containers/postgresql/init:/docker-entrypoint-initdb.d/:ro
Volume=%h/containers/postgresql/backups:/backups:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# This option is only for init process, after init custom config file `pg_hba.conf` will control this option.
Environment="POSTGRES_HOST_AUTH_METHOD=trust"
@@ -19,7 +19,7 @@ Volume=%h/containers/prometheus/data:/prometheus:rw
Volume=%h/containers/prometheus/etc:/etc/prometheus:ro
Volume=%h/containers/prometheus/ssl:/etc/ssl/prometheus:ro
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Exec=--config.file=/etc/prometheus/prometheus.yaml \
--web.config.file=/etc/prometheus/web-config.yaml \
@@ -16,4 +16,6 @@ whitelist:
- "evt.Meta.target_fqdn == '{{ services['immich']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status == '404' && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains '/api/assets/' && evt.Meta.http_path contains '/thumbnail'"
# nextcloud thumbnail/preview request error false positive
- "evt.Meta.target_fqdn == '{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status == '404' && evt.Meta.http_verb == 'GET' && evt.Meta.http_path startsWith '/index.php/core/preview?'"
# nextcloud chunks.mjs request false positive
- "evt.Meta.target_fqdn == '{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status in ['200', '304'] && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains 'chunk.mjs'"
{% endif %}
View File
@@ -13,7 +13,7 @@ PublishPort={{ services['actualbudget']['ports']['http'] }}:5006
Volume=%h/data/containers/actual-budget:/data:rw
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="ACTUAL_OPENID_DISCOVERY_URL=https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}/.well-known/openid-configuration"
Environment="ACTUAL_OPENID_CLIENT_ID=actual-budget"
Environment="ACTUAL_OPENID_SERVER_HOSTNAME=https://{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}"
@@ -18,7 +18,7 @@ Volume=%h/data/containers/ezbookkeeping/data:/data:rw
Volume=%h/containers/ezbookkeeping/ssl:/etc/ssl/ezbookkeeping:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="EBK_SERVER_DOMAIN={{ services['ezbookkeeping']['domain']['public'] }}.{{ domain['public'] }}"
Environment="EBK_SERVER_ROOT_URL=https://{{ services['ezbookkeeping']['domain']['public'] }}.{{ domain['public'] }}/"
Environment="EBK_LOG_MODE=console"
@@ -15,7 +15,7 @@ Volume=%h/containers/opencloud:/etc/opencloud:rw
Volume=%h/data/containers/opencloud:/var/lib/opencloud:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Log level info
Environment="OC_LOG_LEVEL=info"
# TLS configuration
@@ -18,7 +18,7 @@ PublishPort={{ services['trilium']['ports']['http'] }}:8080/tcp
Volume=%h/data/containers/trilium/data:/home/node/trilium-data:rw
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="TRILIUM_DATA_DIR=/home/node/trilium-data"
Environment="TRILIUM_NO_UPLOAD_LIMIT=true"
@@ -18,9 +18,9 @@ Volume=%h/data/containers/vikunja:/app/vikunja/files:rw
Volume=%h/containers/vikunja/ssl:/etc/ssl/vikunja:ro
# General
Environment="TZ=Asia/Seoul"
Environment="VIKUNJA_DEFAULTSETTINGS_TIMEZONE=Asia/Seoul"
Environment="VIKUNJA_SERVICE_TIMEZONE=Asia/Seoul"
Environment="TZ={{ timezone }}"
Environment="VIKUNJA_DEFAULTSETTINGS_TIMEZONE={{ timezone }}"
Environment="VIKUNJA_SERVICE_TIMEZONE={{ timezone }}"
Environment="VIKUNJA_SERVICE_PUBLICURL=https://{{ services['vikunja']['domain']['public'] }}.{{ domain['public'] }}"
Environment="VIKUNJA_SERVICE_ENABLEREGISTRATION=false"
Secret=VIKUNJA_SERVICE_JWTSECRET,type=env
@@ -19,7 +19,7 @@ Volume=%h/data/containers/wikijs/export:/wiki/export:rw
Volume=%h/containers/wikijs/ssl:/etc/ssl/wiki:ro
# General
Environment="TZ=Asia/Seoul"
Environment="TZ={{ timezone }}"
# Database
Environment="DB_TYPE=postgres"
@@ -148,4 +148,4 @@ if [ "$TYPE" == "ENV" ]; then
log "error" "SOPS extract error"
exit 1
fi
fi
fi
+4 -1
View File
@@ -23,11 +23,14 @@
- 2026-05-03: Make previous expressions annotation
- 2026-05-07: Find the false positive case, which is not on `crowdsecurity/nextcloud-whitelist`
- 2026-05-07: Set whitelist expression
- 2026-05-11: Find the false positive case, which is not on `crowdsec/nextcloud-whitelist`
- 2026-05-11: Set whitelist expression
## Solution
- Install crowdsecurity/nextcloud-whitelist on auth node
- Add expression on whitelist
- evt.Meta.target_fqdn == '{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status == '404' && evt.Meta.http_verb == 'GET' && evt.Meta.http_path startsWith '/index.php/core/preview?'
- evt.Meta.target_fqdn == '{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status == '404' && evt.Meta.http_verb == 'GET' && evt.Meta.http_path startsWith '/index.php/core/preview?'
- evt.Meta.target_fqdn == '{{ services['nextcloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_status in ['200', '304'] && evt.Meta.http_verb == 'GET' && evt.Meta.http_path contains 'chunk.mjs'
### Deprecated solution
- Access to fw
+5
View File
@@ -45,6 +45,11 @@ ALTER DATABASE paperless_db OWNER TO paperless;
- "paperless"
```
### Paperless custom build
- paperless-ngx uses 'tesseract_fast' model
- building custom container to use 'tesseract_best' model to improve OCR accuracy.
## Configuration
### Access to paperless