Compare commits
52 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 621d5310a3 | |||
| 6377a56d95 | |||
| dbd72f43a4 | |||
| 9f236b6fa5 | |||
| b4a0874deb | |||
| c51216ff9b | |||
| 7debdfcb93 | |||
| da016343c0 | |||
| bf749ebbde | |||
| 41d509a49d | |||
| f062f6862f | |||
| 2dfc0f734e | |||
| f9211dfa24 | |||
| 8713631e0b | |||
| 01ad4350b0 | |||
| 8a4ce488f1 | |||
| 664cf2956d | |||
| 8c3fe409ae | |||
| 075b796608 | |||
| 0b7d1c4d78 | |||
| 017de863d9 | |||
| b52a6f6f0d | |||
| 84d961c7e3 | |||
| d1e0eb30c0 | |||
| 0f38df0100 | |||
| 7911657c8c | |||
| fd5d0ce4f8 | |||
| 98bc863d08 | |||
| 9137791aac | |||
| f9179282b8 | |||
| 25e33caec9 | |||
| d5090503d8 | |||
| b7a038dcab | |||
| 5f063d82d5 | |||
| 95eff329b6 | |||
| e8f523c2af | |||
| 726c0c3523 | |||
| 8bff16d172 | |||
| fc3b5a1e05 | |||
| 13839c9dfd | |||
| 242b719671 | |||
| 224b27abc3 | |||
| a2022fd14c | |||
| be07698dae | |||
| ab7e09d90b | |||
| 8c81827e24 | |||
| 57996f1efd | |||
| 8311fcf53e | |||
| 90277b2d4e | |||
| 6fc12d0119 | |||
| 4b7ec4d638 | |||
| 6cd26eb7d8 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
.ansible
|
||||
data/bin/*
|
||||
data/volumes/*
|
||||
data/images/*
|
||||
|
||||
@@ -2,50 +2,152 @@
|
||||
# Global vars
|
||||
ansible_ssh_private_key_file: "/etc/secrets/{{ hostvars['console']['node']['uid'] }}/id_console"
|
||||
|
||||
# URL infromation, you can use {{ infra_uri['services'] | split(':') | first|last }} to seperate domain and ports
|
||||
infra_uri:
|
||||
# CA
|
||||
root_cert_filename: "ilnmors_root_ca.crt"
|
||||
intermediate_cert_filename: "ilnmors_intermediate_ca.crt"
|
||||
intermediate_key_filename: "ilnmors_intermediate_ca.key"
|
||||
|
||||
|
||||
# local SAN and SSH SAN should be updated manually on host_vars
|
||||
domain:
|
||||
public: "ilnmors.com"
|
||||
internal: "ilnmors.internal"
|
||||
dc: "dc=ilnmors,dc=internal"
|
||||
org: "ilnmors"
|
||||
|
||||
# DNS configuration including bind and blocky should be set manually.
|
||||
# named.conf.j2 is also set manually.
|
||||
# Check the hosts.j2 when cname records are fixed
|
||||
|
||||
services:
|
||||
crowdsec:
|
||||
domain: "crowdsec.ilnmors.internal"
|
||||
domain: "crowdsec"
|
||||
ports:
|
||||
https: "8080"
|
||||
bind:
|
||||
domain: "bind.ilnmors.internal"
|
||||
domain: "bind"
|
||||
ports:
|
||||
dns: "53"
|
||||
blocky:
|
||||
domain: "blocky.ilnmors.internal"
|
||||
domain: "blocky"
|
||||
ports:
|
||||
https: "443"
|
||||
dns: "53"
|
||||
postgresql:
|
||||
domain: "postgresql.ilnmors.internal"
|
||||
domain: "postgresql"
|
||||
ports:
|
||||
tcp: "5432" # postgresql db connection port
|
||||
subuid: "100998"
|
||||
ldap:
|
||||
domain: "ldap.ilnmors.internal"
|
||||
domain: "ldap"
|
||||
ports:
|
||||
http: "17170"
|
||||
ldaps: "636"
|
||||
ldaps: "6360"
|
||||
subuid: "100999"
|
||||
ca:
|
||||
domain: "ca.ilnmors.internal"
|
||||
domain: "ca"
|
||||
ports:
|
||||
https: "9000"
|
||||
subuid: "100999"
|
||||
x509-exporter:
|
||||
ports:
|
||||
http: "9793"
|
||||
subuid: "165533"
|
||||
prometheus:
|
||||
domain: "prometheus.ilnmors.internal"
|
||||
domain: "prometheus"
|
||||
ports:
|
||||
https: "9090"
|
||||
subuid: "165533"
|
||||
loki:
|
||||
domain: "loki.ilnmors.internal"
|
||||
domain: "loki"
|
||||
ports:
|
||||
https: "3100"
|
||||
subuid: "110000"
|
||||
grafana:
|
||||
domain: "grafana"
|
||||
ports:
|
||||
http: "3000"
|
||||
subuid: "100471"
|
||||
caddy:
|
||||
ports:
|
||||
http: "2080"
|
||||
https: "2443"
|
||||
nas:
|
||||
domain: "nas.ilnmors.internal"
|
||||
domain: "nas"
|
||||
ports:
|
||||
https: "5001"
|
||||
kopia:
|
||||
domain: "nas.ilnmors.internal"
|
||||
domain: "nas"
|
||||
ports:
|
||||
https: "51515"
|
||||
authelia:
|
||||
domain: "authelia"
|
||||
ports:
|
||||
http: "9091"
|
||||
redis:
|
||||
subuid: "100998"
|
||||
vaultwarden:
|
||||
domain:
|
||||
public: "vault"
|
||||
internal: "vault.app"
|
||||
ports:
|
||||
http: "8000"
|
||||
gitea:
|
||||
domain:
|
||||
public: "gitea"
|
||||
internal: "gitea.app"
|
||||
ports:
|
||||
http: "3000"
|
||||
subuid: "100999"
|
||||
immich:
|
||||
domain:
|
||||
public: "immich"
|
||||
internal: "immich.app"
|
||||
ports:
|
||||
http: "2283"
|
||||
redis: "6379"
|
||||
immich-ml:
|
||||
ports:
|
||||
http: "3003"
|
||||
actualbudget:
|
||||
domain:
|
||||
public: "budget"
|
||||
internal: "budget.app"
|
||||
ports:
|
||||
http: "5006"
|
||||
subuid: "101000"
|
||||
paperless:
|
||||
domain:
|
||||
public: "paperless"
|
||||
internal: "paperless.app"
|
||||
ports:
|
||||
http: "8001"
|
||||
redis: "6380"
|
||||
subuid: "100999"
|
||||
vikunja:
|
||||
domain:
|
||||
public: "vikunja"
|
||||
internal: "vikunja.app"
|
||||
ports:
|
||||
http: "3456"
|
||||
subuid: "100999"
|
||||
opencloud:
|
||||
domain:
|
||||
public: "opencloud"
|
||||
internal: "opencloud.app"
|
||||
ports:
|
||||
http: "9200"
|
||||
subuid: "100999"
|
||||
manticore:
|
||||
subuid: "100998"
|
||||
affine:
|
||||
domain:
|
||||
public: "affine"
|
||||
internal: "affine.app"
|
||||
ports:
|
||||
http: "3010"
|
||||
redis: "6381"
|
||||
manticore: "9308"
|
||||
|
||||
version:
|
||||
packages:
|
||||
@@ -54,7 +156,6 @@ version:
|
||||
kopia: "0.22.3"
|
||||
blocky: "0.28.2"
|
||||
alloy: "1.13.0"
|
||||
# telegraf: "1.37.1"
|
||||
containers:
|
||||
# common
|
||||
caddy: "2.10.2"
|
||||
@@ -72,3 +173,14 @@ version:
|
||||
vectorchord: "0.5.3"
|
||||
# Auth
|
||||
authelia: "4.39.15"
|
||||
# App
|
||||
vaultwarden: "1.35.4"
|
||||
gitea: "1.25.5"
|
||||
redis: "8.6.1"
|
||||
immich: "v2.7.5"
|
||||
actualbudget: "26.3.0"
|
||||
paperless: "2.20.13"
|
||||
vikunja: "2.2.2"
|
||||
opencloud: "4.0.4"
|
||||
manticore: "25.0.0"
|
||||
affine: "0.26.3"
|
||||
|
||||
@@ -21,5 +21,6 @@ node:
|
||||
config_path: "{{ node.homelab_path }}/config"
|
||||
ssh_san: "console,console.ilnmors.internal"
|
||||
ssh_users: "vmm,fw,infra,auth,app"
|
||||
local_san: "localhost console.ilnmors.internal"
|
||||
# add the hostname of wsl, it is needed to improve the sudo problem
|
||||
local_san: "localhost console.ilnmors.internal surface"
|
||||
# ansible_python_interpreter: "{{ ansible_playbook_python }}"
|
||||
|
||||
@@ -153,6 +153,79 @@
|
||||
tags: ["site", "kopia"]
|
||||
tags: ["site", "kopia"]
|
||||
|
||||
- name: Set caddy
|
||||
ansible.builtin.include_role:
|
||||
name: "common"
|
||||
tasks_from: "services/set_caddy"
|
||||
apply:
|
||||
tags: ["site", "caddy"]
|
||||
tags: ["site", "caddy"]
|
||||
|
||||
- name: Set vaultwarden
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_vaultwarden"
|
||||
apply:
|
||||
tags: ["site", "vaultwarden"]
|
||||
tags: ["site", "vaultwarden"]
|
||||
|
||||
- name: Set gitea
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_gitea"
|
||||
apply:
|
||||
tags: ["site", "gitea"]
|
||||
tags: ["site", "gitea"]
|
||||
|
||||
- name: Set immich
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_immich"
|
||||
apply:
|
||||
tags: ["site", "immich"]
|
||||
tags: ["site", "immich"]
|
||||
|
||||
- name: Set actual budget
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_actual-budget"
|
||||
apply:
|
||||
tags: ["site", "actual-budget"]
|
||||
tags: ["site", "actual-budget"]
|
||||
|
||||
- name: Set paperless
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_paperless"
|
||||
apply:
|
||||
tags: ["site", "paperless"]
|
||||
tags: ["site", "paperless"]
|
||||
|
||||
- name: Set vikunja
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_vikunja"
|
||||
apply:
|
||||
tags: ["site", "vikunja"]
|
||||
tags: ["site", "vikunja"]
|
||||
|
||||
- name: Set opencloud
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_opencloud"
|
||||
apply:
|
||||
tags: ["site", "opencloud"]
|
||||
tags: ["site", "opencloud"]
|
||||
|
||||
- name: Set affine
|
||||
ansible.builtin.include_role:
|
||||
name: "app"
|
||||
tasks_from: "services/set_affine"
|
||||
apply:
|
||||
tags: ["site", "affine"]
|
||||
tags: ["site", "affine"]
|
||||
|
||||
|
||||
- name: Flush handlers right now
|
||||
ansible.builtin.meta: "flush_handlers"
|
||||
|
||||
|
||||
@@ -115,18 +115,10 @@
|
||||
become: true
|
||||
tags: ["init", "site", "install-packages"]
|
||||
|
||||
- name: Install CLI tools
|
||||
- name: Set CLI tools
|
||||
ansible.builtin.include_role:
|
||||
name: "console"
|
||||
tasks_from: "services/set_cli_tools"
|
||||
apply:
|
||||
tags: ["init", "site", "tools"]
|
||||
tags: ["init", "site", "tools"]
|
||||
|
||||
- name: Install chromium with font
|
||||
ansible.builtin.include_role:
|
||||
name: "console"
|
||||
tasks_from: "services/set_chromium"
|
||||
apply:
|
||||
tags: ["init", "site", "chromium"]
|
||||
tags: ["init", "site", "chromium"]
|
||||
|
||||
101
ansible/roles/app/handlers/main.yaml
Normal file
101
ansible/roles/app/handlers/main.yaml
Normal file
@@ -0,0 +1,101 @@
|
||||
---
|
||||
- name: Restart vaultwarden
|
||||
ansible.builtin.systemd:
|
||||
name: "vaultwarden.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
scope: "user"
|
||||
daemon_reload: true
|
||||
changed_when: false
|
||||
listen: "notification_restart_vaultwarden"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart gitea
|
||||
ansible.builtin.systemd:
|
||||
name: "gitea.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
scope: "user"
|
||||
daemon_reload: true
|
||||
changed_when: false
|
||||
listen: "notification_restart_gitea"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart immich
|
||||
ansible.builtin.systemd:
|
||||
name: "immich.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
scope: "user"
|
||||
daemon_reload: true
|
||||
changed_when: false
|
||||
listen: "notification_restart_immich"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart immich-ml
|
||||
ansible.builtin.systemd:
|
||||
name: "immich-ml.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
scope: "user"
|
||||
daemon_reload: true
|
||||
changed_when: false
|
||||
listen: "notification_restart_immich-ml"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart actual-budget
|
||||
ansible.builtin.systemd:
|
||||
name: "actual-budget.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
scope: "user"
|
||||
daemon_reload: true
|
||||
changed_when: false
|
||||
listen: "notification_restart_actual-budget"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart paperless
|
||||
ansible.builtin.systemd:
|
||||
name: "paperless.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
scope: "user"
|
||||
daemon_reload: true
|
||||
changed_when: false
|
||||
listen: "notification_restart_paperless"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart vikunja
|
||||
ansible.builtin.systemd:
|
||||
name: "vikunja.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
scope: "user"
|
||||
daemon_reload: true
|
||||
changed_when: false
|
||||
listen: "notification_restart_vikunja"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart opencloud
|
||||
ansible.builtin.systemd:
|
||||
name: "opencloud.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
when: is_opencloud_init.stat.exists
|
||||
changed_when: false
|
||||
listen: "notification_restart_opencloud"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart affine
|
||||
ansible.builtin.systemd:
|
||||
name: "affine.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
when: is_affine_init.stat.exists
|
||||
changed_when: false
|
||||
listen: "notification_restart_affine"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
35
ansible/roles/app/tasks/services/set_actual-budget.yaml
Normal file
35
ansible/roles/app/tasks/services/set_actual-budget.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
- name: Create actual budget directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/data/containers/actual-budget"
|
||||
state: "directory"
|
||||
owner: "{{ services['actualbudget']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
become: true
|
||||
|
||||
- name: Register secret value to podman secret
|
||||
containers.podman.podman_secret:
|
||||
name: "ACTUAL_OPENID_CLIENT_SECRET"
|
||||
data: "{{ hostvars['console']['actualbudget']['oidc']['secret'] }}"
|
||||
state: "present"
|
||||
force: true
|
||||
notify: "notification_restart_actual-budget"
|
||||
no_log: true
|
||||
|
||||
- name: Deploy container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/actual-budget/actual-budget.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/actual-budget.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_actual-budget"
|
||||
|
||||
- name: Enable actual-budget.service
|
||||
ansible.builtin.systemd:
|
||||
name: "actual-budget.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
163
ansible/roles/app/tasks/services/set_affine.yaml
Normal file
163
ansible/roles/app/tasks/services/set_affine.yaml
Normal file
@@ -0,0 +1,163 @@
|
||||
---
|
||||
- name: Set manticore service name
|
||||
ansible.builtin.set_fact:
|
||||
manticore_service: "affine"
|
||||
|
||||
- name: Create manticore directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['manticore']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/manticore"
|
||||
- "data/containers/manticore/{{ manticore_service }}"
|
||||
become: true
|
||||
|
||||
- name: Deploy manticore.container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/manticore/manticore.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/manticore_{{ manticore_service }}.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
register: "is_manticore_containerfile"
|
||||
|
||||
- name: Enable (Restart) manticore.service
|
||||
ansible.builtin.systemd:
|
||||
name: "manticore_{{ manticore_service }}.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
when: is_manticore_containerfile.changed # noqa: no-handler
|
||||
|
||||
- name: Set redis service name
|
||||
ansible.builtin.set_fact:
|
||||
redis_service: "affine"
|
||||
|
||||
- name: Create redis_affine directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['redis']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "containers/redis"
|
||||
- "containers/redis/{{ redis_service }}"
|
||||
- "containers/redis/{{ redis_service }}/data"
|
||||
become: true
|
||||
|
||||
- name: Deploy redis config file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/redis/redis.conf.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/redis/{{ redis_service }}/redis.conf"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
register: "is_redis_conf"
|
||||
|
||||
- name: Deploy redis container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/redis/redis.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/redis_{{ redis_service }}.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
register: "is_redis_containerfile"
|
||||
|
||||
- name: Enable (Restart) redis service
|
||||
ansible.builtin.systemd:
|
||||
name: "redis_{{ redis_service }}.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
when: is_redis_conf.changed or is_redis_containerfile.changed # noqa: no-handler
|
||||
|
||||
- name: Create affine directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/affine"
|
||||
- "containers/affine"
|
||||
- "containers/affine/ssl"
|
||||
- "containers/affine/config"
|
||||
|
||||
- name: Deploy root certificate
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/affine/ssl/{{ root_cert_filename }}"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
notify: "notification_restart_affine"
|
||||
no_log: true
|
||||
|
||||
- name: Register secret value to podman secret
|
||||
containers.podman.podman_secret:
|
||||
name: "{{ item.name }}"
|
||||
data: "{{ item.value }}"
|
||||
state: "present"
|
||||
force: true
|
||||
loop:
|
||||
- name: "AFFINE_PRIVATE_KEY"
|
||||
value: "{{ hostvars['console']['affine']['secret_key'] }}"
|
||||
- name: "AFFINE_DATABASE_URL"
|
||||
value: "postgresql://affine:{{ hostvars['console']['postgresql']['password']['affine'] | urlencode | replace('/', '%2F') }}\
|
||||
@{{ services['postgresql']['domain'] }}.{{ domain['internal'] }}/affine_db?sslmode=verify-full&\
|
||||
sslrootcert=/etc/ssl/affine/{{ root_cert_filename }}"
|
||||
notify: "notification_restart_affine"
|
||||
no_log: true
|
||||
|
||||
- name: Check data directory empty
|
||||
ansible.builtin.stat:
|
||||
path: "{{ node['home_path'] }}/data/containers/affine/.init"
|
||||
register: "is_affine_init"
|
||||
|
||||
- name: Initialize affine
|
||||
when: not is_affine_init.stat.exists
|
||||
block:
|
||||
- name: Execute init command (Including pulling image)
|
||||
containers.podman.podman_container:
|
||||
name: "affine_init"
|
||||
image: "ghcr.io/toeverything/affine:{{ version['containers']['affine'] }}"
|
||||
command: ['sh', '-c', 'node ./scripts/self-host-predeploy.js']
|
||||
state: "started"
|
||||
rm: true
|
||||
detach: false
|
||||
secrets:
|
||||
- "AFFINE_DATABASE_URL,type=env,target=DATABASE_URL"
|
||||
no_log: true
|
||||
|
||||
- name: Create .init file
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/data/containers/affine/.init"
|
||||
state: "touch"
|
||||
mode: "0644"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
|
||||
- name: Deploy affine.container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/affine/affine.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/affine.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_affine"
|
||||
|
||||
- name: Enable affine.service
|
||||
ansible.builtin.systemd:
|
||||
name: "affine.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
51
ansible/roles/app/tasks/services/set_gitea.yaml
Normal file
51
ansible/roles/app/tasks/services/set_gitea.yaml
Normal file
@@ -0,0 +1,51 @@
|
||||
---
|
||||
- name: Create gitea directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['gitea']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/gitea"
|
||||
- "containers/gitea"
|
||||
- "containers/gitea/ssl"
|
||||
become: true
|
||||
|
||||
- name: Deploy root certificate
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/gitea/ssl/{{ root_cert_filename }}"
|
||||
owner: "{{ services['gitea']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
become: true
|
||||
notify: "notification_restart_gitea"
|
||||
no_log: true
|
||||
|
||||
- name: Register secret value to podman secret
|
||||
containers.podman.podman_secret:
|
||||
name: "GITEA__database__PASSWD"
|
||||
data: "{{ hostvars['console']['postgresql']['password']['gitea'] }}"
|
||||
state: "present"
|
||||
force: true
|
||||
notify: "notification_restart_gitea"
|
||||
no_log: true
|
||||
|
||||
- name: Deploy container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/gitea/gitea.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/gitea.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_gitea"
|
||||
|
||||
- name: Enable gitea.service
|
||||
ansible.builtin.systemd:
|
||||
name: "gitea.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
120
ansible/roles/app/tasks/services/set_immich.yaml
Normal file
120
ansible/roles/app/tasks/services/set_immich.yaml
Normal file
@@ -0,0 +1,120 @@
|
||||
---
|
||||
- name: Set redis service name
|
||||
ansible.builtin.set_fact:
|
||||
redis_service: "immich"
|
||||
|
||||
- name: Create redis_immich directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['redis']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "containers/redis"
|
||||
- "containers/redis/{{ redis_service }}"
|
||||
- "containers/redis/{{ redis_service }}/data"
|
||||
become: true
|
||||
|
||||
- name: Deploy redis config file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/redis/redis.conf.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/redis/{{ redis_service }}/redis.conf"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
register: "is_redis_conf"
|
||||
|
||||
- name: Deploy redis container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/redis/redis.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/redis_{{ redis_service }}.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
register: "is_redis_containerfile"
|
||||
|
||||
- name: Enable (Restart) redis service
|
||||
ansible.builtin.systemd:
|
||||
name: "redis_{{ redis_service }}.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
when: is_redis_conf.changed or is_redis_containerfile.changed # noqa: no-handler
|
||||
|
||||
- name: Add user in video, render group
|
||||
ansible.builtin.user:
|
||||
name: "{{ ansible_user }}"
|
||||
state: "present"
|
||||
groups: "video, render"
|
||||
append: true
|
||||
become: true
|
||||
|
||||
- name: Create immich directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/immich"
|
||||
- "containers/immich"
|
||||
- "containers/immich/ssl"
|
||||
- "containers/immich/ml"
|
||||
- "containers/immich/ml/cache"
|
||||
|
||||
- name: Deploy root certificate
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/immich/ssl/{{ root_cert_filename }}"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
notify: "notification_restart_immich"
|
||||
no_log: true
|
||||
|
||||
- name: Register secret value to podman secret
|
||||
containers.podman.podman_secret:
|
||||
name: "IMMICH_DB_PASSWORD"
|
||||
data: "{{ hostvars['console']['postgresql']['password']['immich'] }}"
|
||||
state: "present"
|
||||
force: true
|
||||
notify: "notification_restart_immich"
|
||||
no_log: true
|
||||
|
||||
- name: Deploy immich.container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/immich/immich.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/immich.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_immich"
|
||||
|
||||
- name: Deploy immich-ml.container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/immich/immich-ml.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/immich-ml.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_immich-ml"
|
||||
|
||||
- name: Enable immich.service
|
||||
ansible.builtin.systemd:
|
||||
name: "immich.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
|
||||
- name: Enable immich-ml.service
|
||||
ansible.builtin.systemd:
|
||||
name: "immich-ml.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
76
ansible/roles/app/tasks/services/set_opencloud.yaml
Normal file
76
ansible/roles/app/tasks/services/set_opencloud.yaml
Normal file
@@ -0,0 +1,76 @@
|
||||
---
|
||||
- name: Create opencloud directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['opencloud']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/opencloud"
|
||||
- "containers/opencloud"
|
||||
become: true
|
||||
|
||||
- name: Check data directory empty
|
||||
ansible.builtin.stat:
|
||||
path: "{{ node['home_path'] }}/data/containers/opencloud/.init"
|
||||
become: true
|
||||
register: "is_opencloud_init"
|
||||
|
||||
- name: Initialize opencloud
|
||||
when: not is_opencloud_init.stat.exists
|
||||
block:
|
||||
- name: Execute init command (Including pulling image)
|
||||
containers.podman.podman_container:
|
||||
name: "opencloud_init"
|
||||
image: "docker.io/opencloudeu/opencloud:{{ version['containers']['opencloud'] }}"
|
||||
command: "init"
|
||||
state: "started"
|
||||
rm: true
|
||||
detach: false
|
||||
env:
|
||||
IDM_ADMIN_PASSWORD: "{{ hostvars['console']['opencloud']['admin']['password'] }}"
|
||||
# Verify the certificate (Opencloud to Authelia, authelia uses let's encrypt.)
|
||||
OC_INSECURE: "true"
|
||||
volume:
|
||||
- "{{ node['home_path'] }}/containers/opencloud:/etc/opencloud:rw"
|
||||
- "{{ node['home_path'] }}/data/containers/opencloud:/var/lib/opencloud:rw"
|
||||
no_log: true
|
||||
|
||||
- name: Create .init file
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/data/containers/opencloud/.init"
|
||||
state: "touch"
|
||||
mode: "0644"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
|
||||
- name: Deploy configuration files
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/opencloud/etc/{{ item }}.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/opencloud/{{ item }}"
|
||||
owner: "{{ services['opencloud']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0640"
|
||||
loop:
|
||||
- "csp.yaml"
|
||||
- "proxy.yaml"
|
||||
become: true
|
||||
notify: "notification_restart_opencloud"
|
||||
|
||||
- name: Deploy container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/opencloud/opencloud.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/opencloud.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_opencloud"
|
||||
|
||||
- name: Enable opencloud.service
|
||||
ansible.builtin.systemd:
|
||||
name: "opencloud.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
124
ansible/roles/app/tasks/services/set_paperless.yaml
Normal file
124
ansible/roles/app/tasks/services/set_paperless.yaml
Normal file
@@ -0,0 +1,124 @@
|
||||
---
|
||||
- name: Set redis service name
|
||||
ansible.builtin.set_fact:
|
||||
redis_service: "paperless"
|
||||
|
||||
- name: Create redis_paperless directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['redis']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "containers/redis"
|
||||
- "containers/redis/{{ redis_service }}"
|
||||
- "containers/redis/{{ redis_service }}/data"
|
||||
become: true
|
||||
|
||||
- name: Deploy redis config file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/redis/redis.conf.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/redis/{{ redis_service }}/redis.conf"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
register: "is_redis_conf"
|
||||
|
||||
- name: Deploy redis container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/redis/redis.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/redis_{{ redis_service }}.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
register: "is_redis_containerfile"
|
||||
|
||||
- name: Enable (Restart) redis service
|
||||
ansible.builtin.systemd:
|
||||
name: "redis_{{ redis_service }}.service"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
when: is_redis_conf.changed or is_redis_containerfile.changed # noqa: no-handler
|
||||
|
||||
- name: Create paperless directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['paperless']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/paperless"
|
||||
- "data/containers/paperless/data"
|
||||
- "data/containers/paperless/media"
|
||||
- "data/containers/paperless/consume"
|
||||
- "containers/paperless"
|
||||
- "containers/paperless/ssl"
|
||||
become: true
|
||||
|
||||
|
||||
- name: Deploy root certificate
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/paperless/ssl/{{ root_cert_filename }}"
|
||||
owner: "{{ services['paperless']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
become: true
|
||||
notify: "notification_restart_paperless"
|
||||
no_log: true
|
||||
|
||||
- name: Register secret value to podman secret
|
||||
containers.podman.podman_secret:
|
||||
name: "{{ item.name }}"
|
||||
data: "{{ item.value }}"
|
||||
state: "present"
|
||||
force: true
|
||||
loop:
|
||||
- name: "PAPERLESS_SECRET_KEY"
|
||||
value: "{{ hostvars['console']['paperless']['session_secret'] }}"
|
||||
- name: "PAPERLESS_DBPASS"
|
||||
value: "{{ hostvars['console']['postgresql']['password']['paperless'] }}"
|
||||
- name: "PAPERLESS_SOCIALACCOUNT_PROVIDERS"
|
||||
value: |-
|
||||
{
|
||||
"openid_connect": {
|
||||
"SCOPE": ["openid", "profile", "email"],
|
||||
"OAUTH_PKCE_ENABLED": true,
|
||||
"APPS": [
|
||||
{
|
||||
"provider_id": "authelia",
|
||||
"name": "Authelia",
|
||||
"client_id": "paperless",
|
||||
"secret": "{{ hostvars['console']['paperless']['oidc']['secret'] }}",
|
||||
"settings": {
|
||||
"server_url": "https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}/.well-known/openid-configuration",
|
||||
"token_auth_method": "client_secret_post"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
notify: "notification_restart_paperless"
|
||||
no_log: true
|
||||
|
||||
- name: Deploy paperless.container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/paperless/paperless.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/paperless.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_paperless"
|
||||
|
||||
- name: Enable paperless.service
|
||||
ansible.builtin.systemd:
|
||||
name: "paperless.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
57
ansible/roles/app/tasks/services/set_vaultwarden.yaml
Normal file
57
ansible/roles/app/tasks/services/set_vaultwarden.yaml
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
- name: Create vaultwarden directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/vaultwarden"
|
||||
- "containers/vaultwarden"
|
||||
- "containers/vaultwarden/ssl"
|
||||
|
||||
- name: Deploy root certificate
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/vaultwarden/ssl/{{ root_cert_filename }}"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
become: true
|
||||
notify: "notification_restart_vaultwarden"
|
||||
no_log: true
|
||||
|
||||
- name: Register secret value to podman secret
|
||||
containers.podman.podman_secret:
|
||||
name: "{{ item.name }}"
|
||||
data: "{{ item.value }}"
|
||||
state: "present"
|
||||
force: true
|
||||
loop:
|
||||
- name: "VW_ADMIN_TOKEN"
|
||||
value: "{{ hostvars['console']['vaultwarden']['admin']['hash'] }}"
|
||||
- name: "VW_DATABASE_URL"
|
||||
value: "postgresql://vaultwarden:{{ hostvars['console']['postgresql']['password']['vaultwarden'] | urlencode | replace('/', '%2F') }}\
|
||||
@{{ services['postgresql']['domain'] }}.{{ domain['internal'] }}/vaultwarden_db?sslmode=verify-full&\
|
||||
sslrootcert=/etc/ssl/vaultwarden/{{ root_cert_filename }}"
|
||||
notify: "notification_restart_vaultwarden"
|
||||
no_log: true
|
||||
|
||||
- name: Deploy container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/vaultwarden/vaultwarden.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/vaultwarden.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_vaultwarden"
|
||||
|
||||
- name: Enable vaultwarden.service
|
||||
ansible.builtin.systemd:
|
||||
name: "vaultwarden.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
58
ansible/roles/app/tasks/services/set_vikunja.yaml
Normal file
58
ansible/roles/app/tasks/services/set_vikunja.yaml
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
- name: Create vikunja directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ services['vikunja']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
- "data/containers/vikunja"
|
||||
- "containers/vikunja"
|
||||
- "containers/vikunja/ssl"
|
||||
become: true
|
||||
|
||||
- name: Deploy root certificate
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/vikunja/ssl/{{ root_cert_filename }}"
|
||||
owner: "{{ services['vikunja']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
become: true
|
||||
notify: "notification_restart_vikunja"
|
||||
no_log: true
|
||||
|
||||
- name: Register secret value to podman secret
|
||||
containers.podman.podman_secret:
|
||||
name: "{{ item.name }}"
|
||||
data: "{{ item.value }}"
|
||||
state: "present"
|
||||
force: true
|
||||
loop:
|
||||
- name: "VIKUNJA_SERVICE_JWTSECRET"
|
||||
value: "{{ hostvars['console']['vikunja']['session_secret'] }}"
|
||||
- name: "VIKUNJA_DATABASE_PASSWORD"
|
||||
value: "{{ hostvars['console']['postgresql']['password']['vikunja'] }}"
|
||||
- name: "VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_CLIENTSECRET"
|
||||
value: "{{ hostvars['console']['vikunja']['oidc']['secret'] }}"
|
||||
notify: "notification_restart_vikunja"
|
||||
no_log: true
|
||||
|
||||
- name: Deploy vikunja.container file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/app/vikunja/vikunja.container.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/containers/systemd/vikunja.container"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0644"
|
||||
notify: "notification_restart_vikunja"
|
||||
|
||||
- name: Enable vikunja.service
|
||||
ansible.builtin.systemd:
|
||||
name: "vikunja.service"
|
||||
state: "started"
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
@@ -27,7 +27,7 @@
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/authelia/certs/ilnmors_root_ca.crt"
|
||||
dest: "{{ node['home_path'] }}/containers/authelia/certs/{{ root_cert_filename }}"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
|
||||
@@ -27,14 +27,14 @@
|
||||
listen: "notification_reload_networkctl"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Reload systemd-resolved.service
|
||||
- name: Restart systemd-resolved.service
|
||||
ansible.builtin.systemd:
|
||||
name: "systemd-resolved.service"
|
||||
state: "reloaded"
|
||||
state: "restarted"
|
||||
enabled: true
|
||||
become: true
|
||||
changed_when: false
|
||||
listen: "notification_reload_resolved"
|
||||
listen: "notification_restart_resolved"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
- name: Restart systemd-timesyncd
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
- name: Deploy root_ca.crt
|
||||
ansible.builtin.copy:
|
||||
content: "{{ hostvars['console']['ca']['root']['crt'] }}"
|
||||
dest: "/usr/local/share/ca-certificates/ilnmors_root_ca.crt"
|
||||
dest: "/usr/local/share/ca-certificates/{{ root_cert_filename }}"
|
||||
owner: "root"
|
||||
group: "root"
|
||||
mode: "0644"
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
group: "systemd-resolve"
|
||||
mode: "0640"
|
||||
become: true
|
||||
notify: "notification_reload_resolved"
|
||||
notify: "notification_restart_resolved"
|
||||
|
||||
- name: Restart systemd-resolved.service when it is initiated
|
||||
ansible.builtin.systemd:
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
- name: Deploy root crt for build
|
||||
ansible.builtin.copy:
|
||||
content: "{{ hostvars['console']['ca']['root']['crt'] }}"
|
||||
dest: "{{ node['home_path'] }}/containers/caddy/build/ilnmors_root_ca.crt"
|
||||
dest: "{{ node['home_path'] }}/containers/caddy/build/{{ root_cert_filename }}"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0640"
|
||||
@@ -62,7 +62,7 @@
|
||||
|
||||
- name: Build caddy container image
|
||||
containers.podman.podman_image:
|
||||
name: "ilnmors.internal/{{ node['name'] }}/caddy"
|
||||
name: "{{ domain['internal'] }}/{{ node['name'] }}/caddy"
|
||||
# check tags from container file
|
||||
tag: "{{ version['containers']['caddy'] }}"
|
||||
state: "build"
|
||||
|
||||
@@ -37,9 +37,9 @@
|
||||
KOPIA_PASSWORD: "{{ hostvars['console']['kopia']['user']['console'] }}"
|
||||
ansible.builtin.shell: |
|
||||
/usr/bin/kopia repository connect server \
|
||||
--url=https://{{ infra_uri['kopia']['domain'] }}:{{ infra_uri['kopia']['ports']['https'] }} \
|
||||
--url=https://{{ services['kopia']['domain'] }}.{{ domain['internal'] }}:{{ services['kopia']['ports']['https'] }} \
|
||||
--override-username=console \
|
||||
--override-hostname=console.ilnmors.internal
|
||||
--override-hostname=console.{{ domain['internal'] }}
|
||||
changed_when: false
|
||||
failed_when: is_kopia_connected.rc != 0
|
||||
register: "is_kopia_connected"
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
state: "directory"
|
||||
mode: "0700"
|
||||
|
||||
- name: Create contaienr data directory for app
|
||||
- name: Create container data directory for app
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/data/containers"
|
||||
owner: "{{ ansible_user }}"
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
become: true
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
@cert-authority *.ilnmors.internal {{ hostvars['console']['ssh']['ca']['pub'] }}
|
||||
@cert-authority *.{{ domain['internal'] }} {{ hostvars['console']['ssh']['ca']['pub'] }}
|
||||
dest: "/etc/ssh/ssh_known_hosts"
|
||||
owner: "root"
|
||||
group: "root"
|
||||
|
||||
@@ -68,7 +68,7 @@
|
||||
dest: "{{ node['data_path'] }}/bin/blocky-{{ version['packages']['blocky'] }}-{{ item }}.tar.gz"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
mode: "0600" # noqa: line-length
|
||||
mode: "0600"
|
||||
loop:
|
||||
- "x86_64"
|
||||
- "arm64"
|
||||
|
||||
@@ -21,8 +21,8 @@
|
||||
become: true
|
||||
|
||||
- name: Deploy ddns service files
|
||||
ansible.builtin.copy:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/systemd/fw/ddns/{{ item }}"
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/systemd/fw/ddns/{{ item }}.j2"
|
||||
dest: "{{ node['home_path'] }}/.config/systemd/user/{{ item }}"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
- name: Reload postgresql
|
||||
ansible.builtin.command:
|
||||
/usr/bin/podman exec -u postgres postgresql sh -c "pg_ctl reload"
|
||||
when: not (is_postgresql_init_run | default(false))
|
||||
when: is_postgresql_init.stat.exists
|
||||
changed_when: false
|
||||
listen: "notification_reload_postgresql"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
@@ -24,7 +24,7 @@
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
scope: "user"
|
||||
when: not (is_postgresql_init_run | default(false))
|
||||
when: is_postgresql_init.stat.exists
|
||||
changed_when: false
|
||||
listen: "notification_restart_postgresql"
|
||||
ignore_errors: true # noqa: ignore-errors
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
---
|
||||
- name: Set ca container subuid
|
||||
ansible.builtin.set_fact:
|
||||
ca_subuid: "100999"
|
||||
|
||||
- name: Create ca directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/{{ item }}"
|
||||
owner: "{{ ca_subuid }}"
|
||||
owner: "{{ services['ca']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
state: "directory"
|
||||
mode: "0770"
|
||||
@@ -32,7 +28,7 @@
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/ca/config/{{ item }}.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/ca/config/{{ item }}"
|
||||
owner: "{{ ca_subuid }}"
|
||||
owner: "{{ services['ca']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0400"
|
||||
loop:
|
||||
@@ -46,19 +42,19 @@
|
||||
content: |
|
||||
{{ item.value }}
|
||||
dest: "{{ item.path }}/{{ item.name }}"
|
||||
owner: "{{ ca_subuid }}"
|
||||
owner: "{{ services['ca']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "{{ item.mode }}"
|
||||
loop:
|
||||
- name: "ilnmors_root_ca.crt"
|
||||
- name: "{{ root_cert_filename }}"
|
||||
value: "{{ hostvars['console']['ca']['root']['crt'] }}"
|
||||
path: "{{ node['home_path'] }}/containers/ca/certs"
|
||||
mode: "0440"
|
||||
- name: "ilnmors_intermediate_ca.crt"
|
||||
- name: "{{ intermediate_cert_filename }}"
|
||||
value: "{{ hostvars['console']['ca']['intermediate']['crt'] }}"
|
||||
path: "{{ node['home_path'] }}/containers/ca/certs"
|
||||
mode: "0440"
|
||||
- name: "ilnmors_intermediate_ca.key"
|
||||
- name: "{{ intermediate_key_filename }}"
|
||||
value: "{{ hostvars['console']['ca']['intermediate']['key'] }}"
|
||||
path: "{{ node['home_path'] }}/containers/ca/secrets"
|
||||
mode: "0400"
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
---
|
||||
- name: Set grafana container subuid
|
||||
ansible.builtin.set_fact:
|
||||
grafana_subuid: "100471"
|
||||
|
||||
- name: Create grafana directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/{{ item }}"
|
||||
owner: "{{ grafana_subuid }}"
|
||||
owner: "{{ services['grafana']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
state: "directory"
|
||||
mode: "0770"
|
||||
@@ -23,8 +19,8 @@
|
||||
ansible.builtin.copy:
|
||||
content: |
|
||||
{{ hostvars['console']['ca']['root']['crt'] }}
|
||||
dest: "{{ node['home_path'] }}/containers/grafana/ssl/ilnmors_root_ca.crt"
|
||||
owner: "{{ grafana_subuid }}"
|
||||
dest: "{{ node['home_path'] }}/containers/grafana/ssl/{{ root_cert_filename }}"
|
||||
owner: "{{ services['grafana']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0400"
|
||||
become: true
|
||||
@@ -51,7 +47,7 @@
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/grafana/etc/{{ item }}.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/grafana/etc/{{ item }}"
|
||||
owner: "{{ grafana_subuid }}"
|
||||
owner: "{{ services['grafana']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0400"
|
||||
loop:
|
||||
@@ -61,11 +57,11 @@
|
||||
notify: "notification_restart_grafana"
|
||||
no_log: true
|
||||
|
||||
- name: Deploy provisioing and dashboard files
|
||||
ansible.builtin.copy:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/grafana/etc/provisioning/"
|
||||
dest: "{{ node['home_path'] }}/containers/grafana/etc/provisioning/"
|
||||
owner: "{{ grafana_subuid }}"
|
||||
- name: Deploy provisioing file
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/grafana/etc/provisioning/datasources/datasources.yaml.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/grafana/etc/provisioning/datasources/datasources.yaml"
|
||||
owner: "{{ services['grafana']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0400"
|
||||
become: true
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
---
|
||||
- name: Set ldap container subuid
|
||||
ansible.builtin.set_fact:
|
||||
ldap_subuid: "100999"
|
||||
|
||||
- name: Create ldap directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/{{ item }}"
|
||||
owner: "{{ ldap_subuid }}"
|
||||
owner: "{{ services['ldap']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
state: "directory"
|
||||
mode: "0770"
|
||||
@@ -21,11 +17,11 @@
|
||||
content: |
|
||||
{{ item.value }}
|
||||
dest: "{{ node['home_path'] }}/containers/ldap/ssl/{{ item.name }}"
|
||||
owner: "{{ ldap_subuid }}"
|
||||
owner: "{{ services['ldap']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "{{ item.mode }}"
|
||||
loop:
|
||||
- name: "ilnmors_root_ca.crt"
|
||||
- name: "{{ root_cert_filename }}"
|
||||
value: "{{ hostvars['console']['ca']['root']['crt'] }}"
|
||||
mode: "0440"
|
||||
- name: "ldap.crt"
|
||||
@@ -50,7 +46,7 @@
|
||||
# urlencode doesn't fix `/` as `%2F`. It needs replace
|
||||
- name: "LLDAP_DATABASE_URL"
|
||||
value: "postgres://ldap:{{ hostvars['console']['postgresql']['password']['ldap'] | urlencode | replace('/', '%2F') }}\
|
||||
@{{ infra_uri['postgresql']['domain'] }}/ldap_db?sslmode=verify-full&sslrootcert=/etc/ssl/ldap/ilnmors_root_ca.crt"
|
||||
@{{ services['postgresql']['domain'] }}.{{ domain['internal'] }}/ldap_db?sslmode=verify-full&sslrootcert=/etc/ssl/ldap/{{ root_cert_filename }}"
|
||||
- name: "LLDAP_KEY_SEED"
|
||||
value: "{{ hostvars['console']['ldap']['seed_key'] }}"
|
||||
- name: "LLDAP_JWT_SECRET"
|
||||
@@ -59,6 +55,8 @@
|
||||
no_log: true
|
||||
|
||||
- name: Initiate ldap (When = false, If DB data does not exist in postgresql, activate this block)
|
||||
# The reason why this task doesn't use the way to check ".init" file is this tasks can override original database.
|
||||
# Absent of ".init" file cannot guarantee DB is empty.
|
||||
when: false
|
||||
become: true
|
||||
block:
|
||||
@@ -78,7 +76,7 @@
|
||||
detach: false
|
||||
env:
|
||||
TZ: "Asia/Seoul"
|
||||
LLDAP_LDAP_BASE_DN: "dc=ilnmors,dc=internal"
|
||||
LLDAP_LDAP_BASE_DN: "{{ domain['dc'] }}"
|
||||
secrets:
|
||||
- "LLDAP_DATABASE_URL,type=env"
|
||||
- "LLDAP_KEY_SEED,type=env"
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
---
|
||||
- name: Set loki container subuid
|
||||
ansible.builtin.set_fact:
|
||||
loki_subuid: "110000" # 10001
|
||||
|
||||
- name: Create loki directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ loki_subuid }}"
|
||||
owner: "{{ services['loki']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
@@ -18,10 +14,10 @@
|
||||
become: true
|
||||
|
||||
- name: Deploy loki configuration file
|
||||
ansible.builtin.copy:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/loki/etc/loki.yaml"
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/loki/etc/loki.yaml.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/loki/etc/loki.yaml"
|
||||
owner: "{{ loki_subuid }}"
|
||||
owner: "{{ services['loki']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0600"
|
||||
become: true
|
||||
@@ -33,11 +29,11 @@
|
||||
content: |
|
||||
{{ item.value }}
|
||||
dest: "{{ node['home_path'] }}/containers/loki/ssl/{{ item.name }}"
|
||||
owner: "{{ loki_subuid }}"
|
||||
owner: "{{ services['loki']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "{{ item.mode }}"
|
||||
loop:
|
||||
- name: "ilnmors_root_ca.crt"
|
||||
- name: "{{ root_cert_filename }}"
|
||||
value: "{{ hostvars['console']['ca']['root']['crt'] }}"
|
||||
mode: "0440"
|
||||
- name: "loki.crt"
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
---
|
||||
- name: Set postgresql container subuid
|
||||
ansible.builtin.set_fact:
|
||||
postgresql_subuid: "100998"
|
||||
|
||||
- name: Set connected services list
|
||||
ansible.builtin.set_fact:
|
||||
# telegraf has no database
|
||||
connected_services:
|
||||
- "ldap"
|
||||
- "authelia"
|
||||
- "grafana"
|
||||
- "vaultwarden"
|
||||
- "gitea"
|
||||
- "immich"
|
||||
- "paperless"
|
||||
- "vikunja"
|
||||
- "affine"
|
||||
|
||||
- name: Create postgresql directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ postgresql_subuid }}"
|
||||
owner: "{{ services['postgresql']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
@@ -38,7 +39,7 @@
|
||||
|
||||
- name: Build postgresql container image
|
||||
containers.podman.podman_image:
|
||||
name: "ilnmors.internal/{{ node['name'] }}/postgres"
|
||||
name: "{{ domain['internal'] }}/{{ node['name'] }}/postgres"
|
||||
# check tags from container file
|
||||
tag: "pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}"
|
||||
state: "build"
|
||||
@@ -52,7 +53,7 @@
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/postgresql/config/{{ item }}.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/postgresql/config/{{ item }}"
|
||||
owner: "{{ postgresql_subuid }}"
|
||||
owner: "{{ services['postgresql']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0600"
|
||||
loop:
|
||||
@@ -67,11 +68,11 @@
|
||||
content: |
|
||||
{{ item.value }}
|
||||
dest: "{{ node['home_path'] }}/containers/postgresql/ssl/{{ item.name }}"
|
||||
owner: "{{ postgresql_subuid }}"
|
||||
owner: "{{ services['postgresql']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "{{ item.mode }}"
|
||||
loop:
|
||||
- name: "ilnmors_root_ca.crt"
|
||||
- name: "{{ root_cert_filename }}"
|
||||
value: "{{ hostvars['console']['ca']['root']['crt'] }}"
|
||||
mode: "0440"
|
||||
- name: "postgresql.crt"
|
||||
@@ -87,15 +88,13 @@
|
||||
no_log: true
|
||||
|
||||
- name: Check data directory empty
|
||||
ansible.builtin.find:
|
||||
paths: "{{ node['home_path'] }}/containers/postgresql/data/"
|
||||
hidden: true
|
||||
file_type: "any"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ node['home_path'] }}/containers/postgresql/data/.init"
|
||||
become: true
|
||||
register: "is_data_dir_empty"
|
||||
register: "is_postgresql_init"
|
||||
|
||||
- name: Prepare initiating DB
|
||||
when: is_data_dir_empty.matched == 0
|
||||
when: not is_postgresql_init.stat.exists
|
||||
become: true
|
||||
block:
|
||||
# `init/pg_cluster.sql` should be fetched from postgresql's backup directory before running initiating
|
||||
@@ -103,23 +102,28 @@
|
||||
ansible.builtin.copy:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/postgresql/init/pg_cluster.sql"
|
||||
dest: "{{ node['home_path'] }}/containers/postgresql/init/0_pg_cluster.sql"
|
||||
owner: "{{ postgresql_subuid }}"
|
||||
owner: "{{ services['postgresql']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0600"
|
||||
|
||||
- name: Deploy resoring data sql files
|
||||
- name: Deploy restoring data sql files
|
||||
ansible.builtin.copy:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/postgresql/init/pg_{{ item }}.sql"
|
||||
dest: "{{ node['home_path'] }}/containers/postgresql/init/{{ index_num + 1 }}_pg_{{ item }}.sql"
|
||||
owner: "{{ postgresql_subuid }}"
|
||||
owner: "{{ services['postgresql']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0600"
|
||||
loop: "{{ connected_services }}"
|
||||
loop_control:
|
||||
index_var: index_num
|
||||
- name: Set is_postgresql_init_run
|
||||
ansible.builtin.set_fact:
|
||||
is_postgresql_init_run: true
|
||||
|
||||
- name: Create .init file
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/postgresql/data/.init"
|
||||
state: "touch"
|
||||
mode: "0644"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "svadmins"
|
||||
|
||||
- name: Deploy container file
|
||||
ansible.builtin.template:
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
---
|
||||
- name: Set prometheus container subuid
|
||||
ansible.builtin.set_fact:
|
||||
prometheus_subuid: "165533" # nobody - 65534
|
||||
|
||||
- name: Create prometheus directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ prometheus_subuid }}"
|
||||
owner: "{{ services['prometheus']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
@@ -21,7 +17,7 @@
|
||||
ansible.builtin.template:
|
||||
src: "{{ hostvars['console']['node']['config_path'] }}/services/containers/infra/prometheus/etc/{{ item }}.j2"
|
||||
dest: "{{ node['home_path'] }}/containers/prometheus/etc/{{ item }}"
|
||||
owner: "{{ prometheus_subuid }}"
|
||||
owner: "{{ services['prometheus']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0600"
|
||||
loop:
|
||||
@@ -37,11 +33,11 @@
|
||||
content: |
|
||||
{{ item.value }}
|
||||
dest: "{{ node['home_path'] }}/containers/prometheus/ssl/{{ item.name }}"
|
||||
owner: "{{ prometheus_subuid }}"
|
||||
owner: "{{ services['prometheus']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "{{ item.mode }}"
|
||||
loop:
|
||||
- name: "ilnmors_root_ca.crt"
|
||||
- name: "{{ root_cert_filename }}"
|
||||
value: "{{ hostvars['console']['ca']['root']['crt'] }}"
|
||||
mode: "0440"
|
||||
- name: "prometheus.crt"
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
---
|
||||
- name: Set x509-exporter container subuid
|
||||
ansible.builtin.set_fact:
|
||||
x509_exporter_subuid: "165533" # nobody - 65534
|
||||
|
||||
- name: Create x509-exporter directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ node['home_path'] }}/containers/{{ item }}"
|
||||
state: "directory"
|
||||
owner: "{{ x509_exporter_subuid }}"
|
||||
owner: "{{ services['x509-exporter']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0770"
|
||||
loop:
|
||||
@@ -20,7 +16,7 @@
|
||||
content: |
|
||||
{{ item.value }}
|
||||
dest: "{{ node['home_path'] }}/containers/x509-exporter/certs/{{ item.name }}"
|
||||
owner: "{{ x509_exporter_subuid }}"
|
||||
owner: "{{ services['x509-exporter']['subuid'] }}"
|
||||
group: "svadmins"
|
||||
mode: "0440"
|
||||
loop:
|
||||
|
||||
@@ -5,17 +5,27 @@ define NET4_SERVER = {{ hostvars['fw']['network4']['subnet']['server'] }}
|
||||
define NET6_SERVER = {{ hostvars['fw']['network6']['subnet']['server'] }}
|
||||
define HOSTS4_CONSOLE = { {{ hostvars['fw']['network4']['console'].values() | join(', ') }} }
|
||||
define HOSTS6_CONSOLE = { {{ hostvars['fw']['network6']['console'].values() | join(', ') }} }
|
||||
define HOSTS4_AUTH = {{ hostvars['fw']['network4']['auth']['server'] }}
|
||||
define HOSTS6_AUTH = {{ hostvars['fw']['network6']['auth']['server'] }}
|
||||
define PORTS_SSH = 22
|
||||
define PORTS_HTTP = 80
|
||||
define PORTS_HTTP_FORWARD = 2080
|
||||
define PORTS_HTTPS = 443
|
||||
define PORTS_HTTPS_FORWARD = 2443
|
||||
|
||||
table inet nat {
|
||||
chain prerouting {
|
||||
type nat hook prerouting priority dstnat; policy accept;
|
||||
tcp dport $PORTS_HTTP dnat to :$PORTS_HTTP_FORWARD comment "dnat http ports to $PORTS_HTTP_FORWARD"
|
||||
tcp dport $PORTS_HTTPS dnat to :$PORTS_HTTPS_FORWARD comment "dnat https ports to $PORTS_HTTPS_FORWARD"
|
||||
}
|
||||
chain postrouting {
|
||||
|
||||
}
|
||||
chain output {
|
||||
type nat hook output priority dstnat; policy accept;
|
||||
oifname "lo" tcp dport $PORTS_HTTP dnat to :$PORTS_HTTP_FORWARD comment "dnat http ports to $PORTS_HTTP_FORWARD out of LOCALHOST"
|
||||
oifname "lo" tcp dport $PORTS_HTTPS dnat to :$PORTS_HTTPS_FORWARD comment "dnat https ports to $PORTS_HTTPS_FORWARD out of LOCALHOST"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +38,10 @@ table inet filter {
|
||||
meta l4proto { icmp, icmpv6 } accept comment "allow icmp connection"
|
||||
ip saddr $HOSTS4_CONSOLE tcp dport $PORTS_SSH accept comment "allow ipv4 ssh connection: CONSOLE > APP"
|
||||
ip6 saddr $HOSTS6_CONSOLE tcp dport $PORTS_SSH accept comment "allow ipv6 ssh connection: CONSOLE > APP"
|
||||
ip saddr { $HOSTS4_CONSOLE, $HOSTS4_AUTH } tcp dport $PORTS_HTTP_FORWARD ct original proto-dst $PORTS_HTTP accept comment "allow ipv4 http connection: CONSOLE, AUTH > APP"
|
||||
ip6 saddr { $HOSTS6_CONSOLE, $HOSTS6_AUTH } tcp dport $PORTS_HTTP_FORWARD ct original proto-dst $PORTS_HTTP accept comment "allow ipv6 http connection: CONSOLE, AUTH > APP"
|
||||
ip saddr { $HOSTS4_CONSOLE, $HOSTS4_AUTH } tcp dport $PORTS_HTTPS_FORWARD ct original proto-dst $PORTS_HTTPS accept comment "allow ipv4 https connection: CONSOLE, AUTH > APP"
|
||||
ip6 saddr { $HOSTS6_CONSOLE, $HOSTS6_AUTH } tcp dport $PORTS_HTTPS_FORWARD ct original proto-dst $PORTS_HTTPS accept comment "allow ipv6 https connection: CONSOLE, AUTH > APP"
|
||||
}
|
||||
chain forward {
|
||||
type filter hook forward priority 0; policy drop;
|
||||
|
||||
@@ -3,32 +3,32 @@
|
||||
::1 {{ node['local_san'] }}
|
||||
{% if node['name'] == 'console' %}
|
||||
# Hosts IPv4
|
||||
{{ hostvars['fw']['network4']['firewall']['server'] }} fw.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['vmm']['client'] }} init.vmm.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['vmm']['server'] }} vmm.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['infra']['server'] }} infra.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['auth']['server'] }} auth.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['app']['server'] }} app.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['firewall']['server'] }} fw.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network4']['vmm']['client'] }} init.vmm.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network4']['vmm']['server'] }} vmm.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network4']['infra']['server'] }} infra.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network4']['auth']['server'] }} auth.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network4']['app']['server'] }} app.{{ domain['internal'] }}
|
||||
# Hosts IPv6
|
||||
{{ hostvars['fw']['network6']['firewall']['server'] }} fw.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['vmm']['client'] }} init.vmm.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['vmm']['server'] }} vmm.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['infra']['server'] }} infra.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['auth']['server'] }} auth.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['app']['server'] }} app.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['firewall']['server'] }} fw.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network6']['vmm']['client'] }} init.vmm.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network6']['vmm']['server'] }} vmm.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network6']['infra']['server'] }} infra.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network6']['auth']['server'] }} auth.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network6']['app']['server'] }} app.{{ domain['internal'] }}
|
||||
{% else %}
|
||||
# IPv4
|
||||
# Crowdsec, blocky, bind(fw)
|
||||
{{ hostvars['fw']['network4']['firewall']['server'] }} ntp.ilnmors.internal crowdsec.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['blocky']['server'] }} blocky.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['bind']['server'] }} bind.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['firewall']['server'] }} ntp.{{ domain['internal'] }} crowdsec.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network4']['blocky']['server'] }} blocky.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network4']['bind']['server'] }} bind.{{ domain['internal'] }}
|
||||
# DB, LDAP, CA, Prometheus, Loki, mail (infra)
|
||||
{{ hostvars['fw']['network4']['infra']['server'] }} postgresql.ilnmors.internal ldap.ilnmors.internal prometheus.ilnmors.internal loki.ilnmors.internal mail.ilnmors.internal ca.ilnmors.internal
|
||||
{{ hostvars['fw']['network4']['infra']['server'] }} postgresql.{{ domain['internal'] }} ldap.{{ domain['internal'] }} prometheus.{{ domain['internal'] }} loki.{{ domain['internal'] }} mail.{{ domain['internal'] }} ca.{{ domain['internal'] }}
|
||||
# IPv6
|
||||
# Crowdsec, blocky, bind(fw)
|
||||
{{ hostvars['fw']['network6']['firewall']['server'] }} ntp.ilnmors.internal crowdsec.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['blocky']['server'] }} blocky.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['bind']['server'] }} bind.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['firewall']['server'] }} ntp.{{ domain['internal'] }} crowdsec.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network6']['blocky']['server'] }} blocky.{{ domain['internal'] }}
|
||||
{{ hostvars['fw']['network6']['bind']['server'] }} bind.{{ domain['internal'] }}
|
||||
# DB, LDAP, CA, Prometheus, Loki, mail (infra)
|
||||
{{ hostvars['fw']['network6']['infra']['server'] }} postgresql.ilnmors.internal ldap.ilnmors.internal prometheus.ilnmors.internal loki.ilnmors.internal mail.ilnmors.internal ca.ilnmors.internal
|
||||
{{ hostvars['fw']['network6']['infra']['server'] }} postgresql.{{ domain['internal'] }} ldap.{{ domain['internal'] }} prometheus.{{ domain['internal'] }} loki.{{ domain['internal'] }} mail.{{ domain['internal'] }} ca.{{ domain['internal'] }}
|
||||
{% endif %}
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
DNS=1.1.1.2 1.0.0.2
|
||||
DNS=2606:4700:4700::1112 2606:4700:4700::1002
|
||||
{% endif %}
|
||||
cache=false
|
||||
cache=false
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[Time]
|
||||
NTP=ntp.ilnmors.internal
|
||||
NTP=ntp.{{ domain['internal'] }}
|
||||
FallbackNTP=0.debian.pool.ntp.org 1.debian.pool.ntp.org 2.debian.pool.ntp.org 3.debian.pool.ntp.org
|
||||
|
||||
@@ -30,6 +30,7 @@ define HOSTS4_INFRA = {{ hostvars['fw']['network4']['infra']['server'] }}
|
||||
define HOSTS4_AUTH = {{ hostvars['fw']['network4']['auth']['server'] }}
|
||||
define HOSTS4_APP = {{ hostvars['fw']['network4']['app']['server'] }}
|
||||
define HOSTS4_NAS = {{ hostvars['fw']['network4']['nas']['client'] }}
|
||||
define HOSTS4_PRINTER = {{ hostvars['fw']['network4']['printer']['client'] }}
|
||||
|
||||
define HOSTS6_FW = { {{ hostvars['fw']['network6']['firewall'].values() | join(', ') }} }
|
||||
define HOSTS6_BLOCKY = {{ hostvars['fw']['network6']['blocky']['server'] }}
|
||||
@@ -146,6 +147,8 @@ table inet filter {
|
||||
# Kopia/NAS Console > NAS
|
||||
oifname $IF_CLIENT ip saddr $HOSTS4_CONSOLE ip daddr $HOSTS4_NAS tcp dport { $PORTS_NAS, $PORTS_KOPIA } accept comment "allow ipv4 web connection (DSM, KOPIA): CONSOLE > FW > CLIENT NAS"
|
||||
oifname $IF_CLIENT ip6 saddr $HOSTS6_CONSOLE ip6 daddr $HOSTS6_NAS tcp dport { $PORTS_NAS, $PORTS_KOPIA } accept comment "allow ipv6 web connection (DSM, KOPIA): CONSOLE > FW > CLIENT NAS"
|
||||
# Printer
|
||||
oifname $IF_CLIENT ip saddr $HOSTS4_CONSOLE ip daddr $HOSTS4_PRINTER accept comment "allow ipv4 printer connection: CONSOLE > FW > PRINTER"
|
||||
|
||||
iifname $IF_WAN jump wan comment "set WAN interface rules"
|
||||
iifname $IF_CLIENT jump client comment "set CLIENT interface rules"
|
||||
|
||||
@@ -113,6 +113,12 @@ postgresql:
|
||||
ldap: ENC[AES256_GCM,data:mJrxIhXynHxJhncw3upHpOkXIw+Ka9bmDBJwkDjYl+D9Pg4RDvL6WzBjthw=,iv:y8MUYo6VhgTzbWh/+n7/hf1Jw+L2KcdxKvulPJ67xn8=,tag:4ZFpj1UdOwXmaZjYvC/s3A==,type:str]
|
||||
grafana: ENC[AES256_GCM,data:P9okJ7bcsqmeGstkSwbDq/RgnG+lFrgAOvcj8A5lOTpmHaSlXGiKG+ybXa0=,iv:Di1ghnxIbAb/u7uo/mJCC3QYVjdweTHaQDZmXTx8OG4=,tag:DT3a1zgU9sTr0BXpyoZ/SQ==,type:str]
|
||||
authelia: ENC[AES256_GCM,data:OqyloAADO6KKEaBjGLsJc9GTe77wn6IvA1VCD2dfCWxx+zgzUYh87fK1XX8=,iv:QIOHNTdNnzcY/f3Co8dPdNHykhBnYRhm43nt35hbALM=,tag:DLQq58GrZd+Ul7MSn6s9uQ==,type:str]
|
||||
vaultwarden: ENC[AES256_GCM,data:BPj5eFo54DTZ82n3yTIqEbm7kb/jWT0n2kZY//oV5q48eRch3C2RBuxn/Ko=,iv:DGC4ipHMyVs25gc4sNMt8LN1RsHjiR/b303vgiFoxMY=,tag:k1eb4DoRPLKvvMstSI1faQ==,type:str]
|
||||
gitea: ENC[AES256_GCM,data:l+pBCzyQa3000SE9z1R4htD0V0ONsBtKy92dfgsVYsZ3XlEyVJDIBOsugwM=,iv:5t/oHW1vFAmV/s2Ze/cV9Vuqo96Qu6QvZeRbio7VX2s=,tag:4zeQaXiXIzBpy+tXsxmN7Q==,type:str]
|
||||
immich: ENC[AES256_GCM,data:11jvxTKA/RL0DGL6y2/X092hnDohj6yTrYGK4IVojqBd1gCOBnDvUjgmx14=,iv:oBfHxsx9nxhyKY/WOuWfybxEX2bf+lHEtsaifFRS9lg=,tag:tAfkBdgQ8ZEkLIFcDICKDw==,type:str]
|
||||
paperless: ENC[AES256_GCM,data:6VBrBbjVoam7SkZCSvoBTdrfkUoDghdGTiBmFLul04X/okXOHeC5zusJffY=,iv:iZumcJ3TWwZD77FzYx8THwCqC+EbnXUBrEKuPh3zgV8=,tag:u2m8SppAdxZ/duNdpuS3oQ==,type:str]
|
||||
vikunja: ENC[AES256_GCM,data:/+wQdoFPTBG2elI9kZbAVWrHZ0DhMaYr4dc+2z9QNdb3TcDS2PEia0JuSAg=,iv:MViZTyUD8YqMmxSTWCQpJ30f/KQdQGOzPlRHHsQ8lAw=,tag:zov3POno139dkMxFDpj2gg==,type:str]
|
||||
affine: ENC[AES256_GCM,data:XPXrcszsV06YqCJZ7CDqc4rCwqqNlbtLCFYfLAQ8jamLtft8L2UVrMA4WZo=,iv:vrWdBeckxB9tmEE628j4jhU+hSpE6TXYMGt0hh1Cg84=,tag:hlWwWUGht8NqWTZREMsa1Q==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
@@ -131,7 +137,7 @@ ldap:
|
||||
authelia: ENC[AES256_GCM,data:G8ZGsLKqEmMzQ5NMAgirF5BQraHNqixtI6dyyaeNhTdXebjJZML52xL36p4=,iv:ZtHAsFYmrQxr+qoQLPW/eme0+nsT148KRsXmW/LNLlU=,tag:Pvjs/eylkgxJpmGBsRmjcw==,type:str]
|
||||
grafana: ENC[AES256_GCM,data:vWmU3ZKcolETWAY74C3OMD8gMXDeYk+DqssACL0xefIPi5IkbrhYWmnWAnA=,iv:wcRms3Zp8kPM4USRPVa0UHpCTK36SWhK9C8yHSWu2Cs=,tag:gU5S/6fdMZVd/ih3Yd5uJA==,type:str]
|
||||
il: ENC[AES256_GCM,data:/CyMeo1+rIUAYiB25nI0,iv:jsyiiRN5z9GqcUnTZ0CZo4s+umTc2zeY2FPp+tVOC9o=,tag:cwOHcqMysCxX57w3a+Pzpg==,type:str]
|
||||
morsalin: null
|
||||
morsalin: ENC[AES256_GCM,data:YryNch8hF6rx,iv:bNIBur3Jcib8BvKjJ0MejpemsurYTP8rCxo6b2R5yEo=,tag:9dIIgqEPtbeixtgJ1OtMnQ==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
@@ -170,6 +176,85 @@ authelia:
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:MiobmVqj3dPargzws5q7Cpggnvw=,iv:VzBh1ZWqyByyTDn90pl0//TsjAYO5QLpfbrxnMF5OU4=,tag:7Y16L9i/homdtEpWynzsdg==,type:comment]
|
||||
vaultwarden:
|
||||
admin:
|
||||
password: ENC[AES256_GCM,data:FzD80H5lPNyjTRGd/IcIGir35KLF8gN3qmTzEQrlfutHh08mk0Vh9X8irHA=,iv:cfn5432g2MvkuBJLB2zDJmU0sLgMefVvpy0bP/4/oPY=,tag:7MHEv8XswdQZNTARvxs55g==,type:str]
|
||||
hash: ENC[AES256_GCM,data:Ae7bhnL5pxfNV3+r/PCDIpxx0tHcNiB/s4sm7OoxVpp+tcEpXZ9vsv38sZjx+Z8t2vlmiYHc72PEdapcAihleuLMBPM1dAVn8LqzzWtVhfmCOoUZI8Z5L3VL8eyyz6npiw==,iv:c2MEIbrnVsR+bUx3zLLTnKSElFTH7JRl23HmmTlWEBg=,tag:mnMrER9pQEKAdloYMeRa3g==,type:str]
|
||||
il:
|
||||
password: ENC[AES256_GCM,data:mhLb55ENatpE59Rbzk2Uq6iBKc0Jj/9x4fOANJOHR0WvSxTQRKJ94cCq9Ykp/chWbdgYYPrwU5oD0Yo17zqb,iv:6bzKq5WaKhuOsQ8zSSH9ZrQYbDPB1nv/bFoQou1ycL4=,tag:bSmuUodl+/9nzxZ7YBoezA==,type:str]
|
||||
morsalin:
|
||||
password: ENC[AES256_GCM,data:ibg5/MfLH7pSY2pEmjM=,iv:+aV5muP/9BYoKwTGQxKEL+IGY9P+O3GVKGgSuTzT+U0=,tag:rgqcmJvd1RtvWJ91PCxYIg==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:HAN9BBEl1CW11LAf3x8=,iv:hwqCErtmPqGJ+y86D9MoxJwixvbcJONlyT900Y5DOug=,tag:kCxsG353ltbOCKLe85adCg==,type:comment]
|
||||
gitea:
|
||||
il:
|
||||
password: ENC[AES256_GCM,data:Bs5/t5mNbSv5+ek9gNHZp5qqxitM4Kq0Xgh2JV6LiFw9lZJSOVT4JPLRP5M=,iv:z50+naWOTVL8lEgBgm51j6hLjS8ve2UcRRKukvtykM0=,tag:TF9rPxjoLe/vAyvl23PiCg==,type:str]
|
||||
token: ENC[AES256_GCM,data:CiYEXEKLLRDp++iga5YmhyPB2bSvqhhDgSOhmiulp9n9SsBVQ3s5MQ==,iv:+oetgEH/IORz74Xoz5zgDjD3BLyledZTqlYlCWaDrRk=,tag:S9O09YEMuSexuNW3ojEw5Q==,type:str]
|
||||
oidc:
|
||||
secret: ENC[AES256_GCM,data:qjQosi83oaK47wX4VtDktDFlIU3FQgmDpwyiSgJNPDq2Dtc/QEKzdv5/ZpQ=,iv:aUJx5a6Wj7lhaD19aPiZWIE/MWKUgH0muxtTSkwfg8M=,tag:DttjXee7ntxRAbqbFq6qeQ==,type:str]
|
||||
hash: ENC[AES256_GCM,data:iVls8UzdP6JTfRosET6nsk3RcEtFQ8ak6GtPuBjcqAv3lyA9oSLwvvC8MvGaT1aHCB+5y/lspuloHEaAcVGNuzGCIVLg9Adut4a7LYDTHNOZnJIKLj8ldL1ytD5XVdbeBhZGCqoTs088oO0HVCQiDzpJL5NdmM3Ru6egyYBCLLUOlPA=,iv:er63GEC+kxNDscvspvvLiq17VSg0GeZ2w3jmGojF9PM=,tag:F1pDpaQrVdomIlZ8psVpAw==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:HvMeGuC8JwK50pO1E/nm,iv:5NFTyjesMX0ZnBpH+hEv8jQ0G2NvrDtT23CUyLbQcUo=,tag:qWmPvQpADTeD+W8nWXRQvA==,type:comment]
|
||||
immich:
|
||||
il:
|
||||
password: ENC[AES256_GCM,data:PbDFc4m7rNPPN1mCjcvhbKwf/EbiJxdvO/iMspf9jMuCqQyGv7h6VrZqk98=,iv:hlMAp5wXXkFO9+ekq3A2/ioF/EX8Uau0puhb4TAHkRQ=,tag:pfS0W2JNaFNMpBlKgZ3Pjw==,type:str]
|
||||
oidc:
|
||||
secret: ENC[AES256_GCM,data:9ENcp2Ns21OLXDY05zRoSdP+93EiwSH8MGzZZpxK7sToe4QLUXWt9w6xQIE=,iv:Q/VcnArZHs/J2YLRVFXt3Mp+LYfuq4PD/trqO8Simig=,tag:BY/mOyZpYmoAc7NrASlmSA==,type:str]
|
||||
hash: ENC[AES256_GCM,data:mrML2CWFFtGjq8wfWipVpv+pjJRSHe74VGC7Eoa6588R5C/sCnC3W5aI+dsRCZN3LRCjHAkOJJgjeYrwcXYdKRauXsAYR51dNSsHqqSN3WebLxapRDwcYu5e4j5RN1aPHsysr7GaQ4hhe5rKW4ORCGC3Cp3Ob+LChPy4bdCAZG3bN9k=,iv:Q4hmqhq+dvIr7DxCpcqP4E0NKyFZkOeTnDpGctmCxXM=,tag:/gji6AFkHnYwkQf3FSQUxA==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:bzMt0Ox0Za4dOhoo7S6dYCdK32JI9Q==,iv:PRTryIJk0tR545XY0LoHwklvsJp5+A5bEljNmzUvRhY=,tag:EVsjRUGMOadaNbMu0Xr4XA==,type:comment]
|
||||
actualbudget:
|
||||
oidc:
|
||||
secret: ENC[AES256_GCM,data:TE2umZ9Vvr7cSfA2+TAfRadIWZN3hyOKQ6U9NqJFm5e9iiw1avI+QlnYcKI=,iv:rUWoclBRqh0tsGnMq29395Fn2NP7AXnSCd0s+S8jQ6I=,tag:qPX/TcdIo6BJeex7wmi02Q==,type:str]
|
||||
hash: ENC[AES256_GCM,data:UjhNkGj+sxbnmPUx1V5kVYwZnzsB0aEvN8YV29lcvMbSnf9xpQWwD5C93Zu8SYrnS/p88qZpGBgAjr9Pcly3y0H1YMRt9zzbHZU3Uo0DPDrSWRQdeB/8LkcM/cwMAs8arS6PO03ECNnN5Z6aTmFdFnLjUkvUuSWMFscItAzMzhWCpeY=,iv:B06LI7Cq3NN8haOLfN3gWIpUFnvdUlq6D2XmARojDpk=,tag:MflE8qcY5j/aAA7xfPCqng==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:McPUAbIUvtC1gdPaxTgAxAMCMWcLfg==,iv:Tp6idRf7he3sYzo8LW596C905JAaoTIhIoDUzSyRT0k=,tag:4mZQ0Swu1X9uuwjsRNhr2A==,type:comment]
|
||||
paperless:
|
||||
session_secret: ENC[AES256_GCM,data:siwCs2noeVpg9DCEZybnmo/oz11BdrHSTnHciMOu/6g=,iv:XVjhu10TIujIdUopN9+TVVqRade9EvItDWxym6YXnZs=,tag:TxLYm+4Bo7IMaTQBtMg9pQ==,type:str]
|
||||
il:
|
||||
password: ENC[AES256_GCM,data:9bJHf+chTg1rppgNVafNgEuvwQ69Gx+w5d65hu68q9XeeaVb2pO9HE4BOgg=,iv:1kaXBg/iOoIZxDjEVEdaMJLDtp6zQjep3vxLmIgQN5o=,tag:+MgX8Oa3tmhjx6u9aHkDfQ==,type:str]
|
||||
oidc:
|
||||
secret: ENC[AES256_GCM,data:wjRDVCJsINM4z5946a6uZD+6bhN5BChLMdRzgMEJFGRGFNcXd7A1p2Iqn4I=,iv:Y4QDA09L8ULKr4hhvoiduzCD8Hifo1gAnpzjCr8e520=,tag:R0RvGxYnXo3zwykXJykRug==,type:str]
|
||||
hash: ENC[AES256_GCM,data:pali6WwPNhJA+6QL4O+tKv42PnpGqmojb8JQUZLqxGizv1bJSCgdUN8upCy5Ke0DYZs5P+JY5vh23xfMZFnHduGxGwOuPX6J5lYgvJRV58LqS3/+yIBBprTJyro3MwsurTTEWesgKMr8/2H9lirhaLjWUOSPxAmQ6e4wPNpHycDVyj4=,iv:cg2trI7t1MfIcMo1/M+IY6JEl2msDoKRGgAx/Y5nyGk=,tag:gnOq7sBq9z5zrRY0yhIabg==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:V7DJHA2JQirfBsrCGhXrhg==,iv:+jYqX9hGNnuyYj9o9LpCYFVOoD6nSrtc4t40Ag0mMzo=,tag:1wSxKtkJm42reUxdwYDvlg==,type:comment]
|
||||
vikunja:
|
||||
session_secret: ENC[AES256_GCM,data:CMyw8JGHyTczGsrOJJwQBKfXMU4Sudvwkur1Lgx4o64=,iv:F2VmpqddiDT4jGaGDKGl6FARsQOt3lLz3X6TjC2MIVU=,tag:UJYyzrl/FX1BNwY4ROFncA==,type:str]
|
||||
oidc:
|
||||
secret: ENC[AES256_GCM,data:QwqndYsfr+fh9OLkHYtLYCa6WUdhnL7A4btz1d1eelTwq3Kps5S6BUN5qZg=,iv:51N8byIAAUh4ky7YBAuEJOBEWu1d9AX5W1m37/cLlCM=,tag:GD7jbxNGd748TCPgqsxyMg==,type:str]
|
||||
hash: ENC[AES256_GCM,data:ORifyT4u1V2CyBCNBgF72wwS2i05mlzA4iIVEa1cH9aaE69PdiQvGGzMHK+tmlfpVaVQEENSt1QDUSSlMyeuZT/3a0JwAvlz+XDbpS7bicL2cB6DCa4JyEd/rbGRXs0/COfxPxXzYv7jq9gd2uSJ+cCGYb/93WuEXSEI6PHi+FF7N94=,iv:FVSGySa4YB2vwenqSagBzxeIexg91ewvcQMix+etmng=,tag:yyQtOgzOZypba+rV3A1K9g==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:EsRGZP7snPchEAMoQN5PoQpiOA==,iv:A/8POGq3pIw7aX5S2vyKtI2vPqH0FT6yZnpe/vVbifw=,tag:BgUYHX2zxIL7yLS0JbI1Yg==,type:comment]
|
||||
opencloud:
|
||||
admin:
|
||||
password: ENC[AES256_GCM,data:VKG7sNTTLHCXRGf4SAlR91+hvc7PaNrnpJX/4kItVcT9W1Hdl/yKgHHD7M8=,iv:WwWnx9KuN+i/Ugwv+HY4IGDZrLHk71hsobGFOn9kml0=,tag:SS6ihrtZjLnlAJR59lw+gw==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:k55osvepVeB1RC5hZ4IF,iv:AlhfmWwn/DiSESWc+ULJSOLUhnrKAIfWr7MeiwV8qc8=,tag:hOgptwUcY6nVxPIhu+DYgw==,type:comment]
|
||||
affine:
|
||||
secret_key: ENC[AES256_GCM,data:LLX78DpYnha1JWhgw0sHLzIVq/oIzvT+nB7zgli4mroGbnt7WZaXCx34zKkYRwYj/+0L4IFFVdkzKtK5DO84SgFkS2Bk2iNdCMqIx80CpyiD8IWAcyRu5d6hh82PlgyxU80T/4nbLbIn0GLubPTTeUX8GC3VxRU=,iv:DnmvbhlygSHes0jAkIm4+WXMUQLzr4R4dNa33rO67v8=,tag:+2wlh+/ekiTyShWM4XBbUw==,type:str]
|
||||
il:
|
||||
password: ENC[AES256_GCM,data:4zxiQAzXTR+fraRjYT657BIwSqrih3lMPFFSibQdardRMjskAbuRYIQA6mo=,iv:ub3giRG9vCFSuwRXDazYTqWbjENzQUWR36290Kruj1o=,tag:C2Ixd2eTEgzBvUNCNBtJuA==,type:str]
|
||||
oidc:
|
||||
secret: ENC[AES256_GCM,data:eRDBrqLZR7MFLlsUwk7Wg7FzxDov7vJLIWQRuKq7vrXbPSJkMcy9jfG2rL4=,iv:UaSoi7gODXgjzihJIDVIdDHJcSAZNV8UKfGeM6YzxqI=,tag:cOUDblcMStP8E4fp+s1WRQ==,type:str]
|
||||
hash: ENC[AES256_GCM,data:jE1CvFo+mjb/Xc3Ft5ky7on03vcnv79cw/5g/xaldXsv94VRrIjmfGMgHAj07r8j5mDpP34A5bYO1PSe9DYrwRcsXa9OUQuzm/8avFy9wVZDhBUUAGR+jiW1BP9hc6nmSpPVPtle+3sbqOB0ZMjXWwlcAcuknOtuhH1mzwmaDP9yf+M=,iv:CSSaXY/6MpHBMhPLUWPkabIeJ9zpZkcVjiEhxVF0zJM=,tag:f72ekkjJs7Qmh1K9wC8L9w==,type:str]
|
||||
#ENC[AES256_GCM,data:ODXFUxxxdQ==,iv:s9zJVx6wo6x517tbNvC+FZ0dFzqbjqeLI6rXBq72hQA=,tag:bXoV2I3LbpmQyddJrtS3Qg==,type:comment]
|
||||
#
|
||||
#
|
||||
#ENC[AES256_GCM,data:T4Wtn49AAxPd2QUFTR+q,iv:bH5goGWBDqumAat9dUv2OwfCUJUpuVqncTMqMBZUXhI=,tag:G+W6hHA+yftQ+4RJpXrxHg==,type:comment]
|
||||
switch:
|
||||
password: ENC[AES256_GCM,data:qu0f9L7A0eFq/UCpaRs=,iv:W8LLOp3MSfd/+EfNEZNf91K8GgI5eUfVPoWTRES2C0Y=,tag:Q5FlAOfwqwJwPvd7k6i+0g==,type:str]
|
||||
@@ -199,7 +284,7 @@ sops:
|
||||
UmliaFNxVTBqRkI1QWJpWGpTRWxETW8KEY/8AfU73UOzCGhny1cNnd5dCNv7bHXt
|
||||
k+uyWPPi+enFkVaceSwMFrA66uaWWrwAj11sXEB7yzvGFPrnAGezjQ==
|
||||
-----END AGE ENCRYPTED FILE-----
|
||||
lastmodified: "2026-03-14T19:40:47Z"
|
||||
mac: ENC[AES256_GCM,data:EUVSxs6FPhKMSSmHe8P/d0IyBZsNb3q7AYj06j98bklAMYYVOludVePdh45MSvn92lDn712Muy6pqcJzDpsPWyxgXngywTu2SGV1yRCyA7U7RloRxlNROuDiugMkJWOtHcKArytVChUHT2PnzagAJR2kBSApbjUsC/xUTMBpsNM=,iv:SsJW2fMNEJHT2M+gjW5TKu6AYoxsf9jKf5T9KgJoF40=,tag:ItVweaSxts2Cm1VKkLp0/w==,type:str]
|
||||
lastmodified: "2026-04-06T14:32:22Z"
|
||||
mac: ENC[AES256_GCM,data:OFiSsBBAzOUoOwnAwhaplQQ8k2kUo+Avzk475BpaiOJoaB2c0wsJ3siP15tcLMrav4Qw8boZFo64v+rjdMoNI/MRo1EOYWNr1ZRMqHzwmQeaiMH2QcfoRZ0oLqrn5ekQztuPR9ULjDYZb63AwVGmzseUf4R5lGXgdgN5tjU/pH4=,iv:hqzDwryMuJ7JnkBazzDSznw05m7k61Sk61aPgO3JtpU=,tag:Lhhlgwy+YuQ1S0hkbsjecg==,type:str]
|
||||
unencrypted_suffix: _unencrypted
|
||||
version: 3.12.1
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Actual Budget
|
||||
|
||||
[Container]
|
||||
Image=ghcr.io/actualbudget/actual-server:{{ version['containers']['actualbudget'] }}
|
||||
ContainerName=actual-budget
|
||||
HostName=actual-budget
|
||||
|
||||
PublishPort={{ services['actualbudget']['ports']['http'] }}:5006
|
||||
|
||||
Volume=%h/data/containers/actual-budget:/data:rw
|
||||
|
||||
Environment="TZ=Asia/Seoul"
|
||||
Environment="ACTUAL_OPENID_DISCOVERY_URL=https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}/.well-known/openid-configuration"
|
||||
Environment="ACTUAL_OPENID_CLIENT_ID=actual-budget"
|
||||
Environment="ACTUAL_OPENID_SERVER_HOSTNAME=https://{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="ACTUAL_OPENID_AUTH_METHOD=oauth2"
|
||||
Secret=ACTUAL_OPENID_CLIENT_SECRET,type=env
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
49
config/services/containers/app/affine/affine.container.j2
Normal file
49
config/services/containers/app/affine/affine.container.j2
Normal file
@@ -0,0 +1,49 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=AFFiNE
|
||||
|
||||
After=redis_affine.service manticore_affine.service
|
||||
Wants=redis_affine.service manticore_affine.service
|
||||
|
||||
[Container]
|
||||
Image=ghcr.io/toeverything/affine:{{ version['containers']['affine'] }}
|
||||
ContainerName=affine
|
||||
HostName=affine
|
||||
|
||||
PublishPort={{ services['affine']['ports']['http'] }}:3010
|
||||
|
||||
Volume=%h/data/containers/affine:/root/.affine/storage:rw
|
||||
Volume=%h/containers/affine/config:/root/.affine/config
|
||||
Volume=%h/containers/affine/ssl:/etc/ssl/affine:ro
|
||||
|
||||
# General
|
||||
Environment="TZ=Asia/Seoul"
|
||||
## OIDC callback URIs
|
||||
Environment="AFFINE_SERVER_HOST={{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="AFFINE_SERVER_EXTERNAL_URL=https://{{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="AFFINE_SERVER_HTTPS=true"
|
||||
|
||||
Secret=AFFINE_PRIVATE_KEY,type=env
|
||||
|
||||
# Database
|
||||
Secret=AFFINE_DATABASE_URL,type=env,target=DATABASE_URL
|
||||
## Enable AI function: this needs pgvector
|
||||
|
||||
# Redis
|
||||
Environment="REDIS_SERVER_HOST=host.containers.internal"
|
||||
Environment="REDIS_SERVER_PORT={{ services['affine']['ports']['redis'] }}"
|
||||
|
||||
# Indexer
|
||||
Environment="AFFINE_INDEXER_ENABLED=true"
|
||||
Environment="AFFINE_INDEXER_SEARCH_ENDPOINT=http://host.containers.internal:{{ services['affine']['ports']['manticore'] }}"
|
||||
|
||||
[Service]
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['postgresql']['domain'] }}.{{ domain['internal'] }} {{ services['postgresql']['ports']['tcp'] }}
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
51
config/services/containers/app/gitea/gitea.container.j2
Normal file
51
config/services/containers/app/gitea/gitea.container.j2
Normal file
@@ -0,0 +1,51 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Gitea
|
||||
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Container]
|
||||
Image=docker.io/gitea/gitea:{{ version['containers']['gitea'] }}
|
||||
|
||||
ContainerName=gitea
|
||||
HostName=gitea
|
||||
|
||||
PublishPort={{ services['gitea']['ports']['http'] }}:3000/tcp
|
||||
|
||||
Volume=%h/data/containers/gitea:/data:rw
|
||||
Volume=%h/containers/gitea/ssl:/etc/ssl/gitea:ro
|
||||
|
||||
# General
|
||||
Environment="TZ=Asia/Seoul"
|
||||
Environment="GITEA__server__DISABLE_SSH=true"
|
||||
# Database
|
||||
Environment="GITEA__database__DB_TYPE=postgres"
|
||||
Environment="GITEA__database__HOST={{ services['postgresql']['domain'] }}.{{ domain['internal'] }}:{{ services['postgresql']['ports']['tcp'] }}"
|
||||
Environment="GITEA__database__NAME=gitea_db"
|
||||
Environment="GITEA__database__USER=gitea"
|
||||
Secret=GITEA__database__PASSWD,type=env
|
||||
Environment="GITEA__database__SSL_MODE=verify-full"
|
||||
Environment="PGSSLROOTCERT=/etc/ssl/gitea/{{ root_cert_filename }}"
|
||||
# OAuth2 client
|
||||
Environment="GITEA__oauth2_client__ACCOUNT_LINKING=auto"
|
||||
# OIDC configuration
|
||||
Environment="GITEA__openid__ENABLE_OPENID_SIGNIN=false"
|
||||
Environment="GITEA__openid__ENABLE_OPENID_SIGNUP=true"
|
||||
Environment="GITEA__openid__WHITELISTED_URIS={{ services['authelia']['domain'] }}.{{ domain['public'] }}"
|
||||
# automatic create user via authelia
|
||||
Environment="GITEA__service__DISABLE_REGISTRATION=false"
|
||||
Environment="GITEA__service__ALLOW_ONLY_EXTERNAL_REGISTRATION=true"
|
||||
Environment="GITEA__service__SHOW_REGISTRATION_BUTTON=false"
|
||||
|
||||
|
||||
[Service]
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['postgresql']['domain'] }}.{{ domain['internal'] }} {{ services['postgresql']['ports']['tcp'] }}
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
32
config/services/containers/app/immich/immich-ml.container.j2
Normal file
32
config/services/containers/app/immich/immich-ml.container.j2
Normal file
@@ -0,0 +1,32 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Immich Machine Learning
|
||||
|
||||
After=immich.service
|
||||
Wants=immich.service
|
||||
|
||||
[Container]
|
||||
Image=ghcr.io/immich-app/immich-machine-learning:{{ version['containers']['immich'] }}-openvino
|
||||
|
||||
ContainerName=immich-ml
|
||||
HostName=immich-ml
|
||||
|
||||
PublishPort={{ services['immich-ml']['ports']['http'] }}:3003
|
||||
|
||||
# iGPU access for OpenVINO
|
||||
AddDevice=/dev/dri:/dev/dri
|
||||
PodmanArgs=--group-add keep-groups
|
||||
|
||||
Volume=%h/containers/immich/ml/cache:/cache:rw
|
||||
|
||||
Environment="TZ=Asia/Seoul"
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
53
config/services/containers/app/immich/immich.container.j2
Normal file
53
config/services/containers/app/immich/immich.container.j2
Normal file
@@ -0,0 +1,53 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Immich
|
||||
|
||||
After=redis_immich.service
|
||||
Wants=redis_immich.service
|
||||
|
||||
[Container]
|
||||
Image=ghcr.io/immich-app/immich-server:{{ version['containers']['immich'] }}
|
||||
|
||||
ContainerName=immich
|
||||
HostName=immich
|
||||
|
||||
PublishPort={{ services['immich']['ports']['http'] }}:2283
|
||||
|
||||
# iGPU access
|
||||
AddDevice=/dev/dri:/dev/dri
|
||||
PodmanArgs=--group-add keep-groups
|
||||
|
||||
# Volumes
|
||||
Volume=%h/data/containers/immich:/data:rw
|
||||
Volume=%h/containers/immich/ssl:/etc/ssl/immich:ro
|
||||
|
||||
# Environment
|
||||
Environment="TZ=Asia/Seoul"
|
||||
# The new environment from version 2.7.0 to enable CSP
|
||||
Environment="IMMICH_HELMET_FILE=true"
|
||||
|
||||
# Redis
|
||||
Environment="REDIS_HOSTNAME=host.containers.internal"
|
||||
Environment="REDIS_PORT={{ services['immich']['ports']['redis'] }}"
|
||||
Environment="REDIS_DBINDEX=0"
|
||||
|
||||
# Database
|
||||
Environment="DB_HOSTNAME={{ services['postgresql']['domain'] }}.{{ domain['internal'] }}"
|
||||
Environment="DB_PORT={{ services['postgresql']['ports']['tcp'] }}"
|
||||
Environment="DB_USERNAME=immich"
|
||||
Environment="DB_DATABASE_NAME=immich_db"
|
||||
Environment="DB_PASSWORD_FILE=/run/secrets/DB_PASSWORD"
|
||||
Environment="DB_SSL_MODE=verify-full"
|
||||
Environment="NODE_EXTRA_CA_CERTS=/etc/ssl/immich/{{ root_cert_filename }}"
|
||||
Secret=IMMICH_DB_PASSWORD,target=/run/secrets/DB_PASSWORD
|
||||
|
||||
[Service]
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['postgresql']['domain'] }}.{{ domain['internal'] }} {{ services['postgresql']['ports']['tcp'] }}
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
@@ -0,0 +1,25 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Manticore - {{ manticore_service }}
|
||||
|
||||
[Container]
|
||||
Image=docker.io/manticoresearch/manticore:{{ version['containers']['manticore'] }}
|
||||
ContainerName=manticore_{{ manticore_service }}
|
||||
HostName=manticore_{{ manticore_service }}
|
||||
|
||||
PublishPort={{ services[manticore_service]['ports']['manticore'] }}:9308
|
||||
|
||||
Volume=%h/data/containers/manticore/{{ manticore_service }}:/var/lib/manticore:rw
|
||||
|
||||
# General
|
||||
Environment="TZ=Asia/Seoul"
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
38
config/services/containers/app/opencloud/etc/csp.yaml.j2
Normal file
38
config/services/containers/app/opencloud/etc/csp.yaml.j2
Normal file
@@ -0,0 +1,38 @@
|
||||
directives:
|
||||
child-src:
|
||||
- '''self'''
|
||||
connect-src:
|
||||
- '''self'''
|
||||
- 'blob:'
|
||||
- 'https://raw.githubusercontent.com/opencloud-eu/awesome-apps'
|
||||
- 'https://update.opencloud.eu'
|
||||
- 'https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}'
|
||||
# default-src:
|
||||
# - '''none'''
|
||||
font-src:
|
||||
- '''self'''
|
||||
frame-ancestors:
|
||||
- '''self'''
|
||||
frame-src:
|
||||
- '''self'''
|
||||
- 'blob:'
|
||||
img-src:
|
||||
- '''self'''
|
||||
- 'data:'
|
||||
- 'blob:'
|
||||
manifest-src:
|
||||
- '''self'''
|
||||
media-src:
|
||||
- '''self'''
|
||||
# object-src:
|
||||
# - '''none'''
|
||||
script-src:
|
||||
- '''self'''
|
||||
- '''unsafe-inline'''
|
||||
- '''unsafe-eval'''
|
||||
style-src:
|
||||
- '''self'''
|
||||
- '''unsafe-inline'''
|
||||
worker-src:
|
||||
- '''self'''
|
||||
- 'blob:'
|
||||
17
config/services/containers/app/opencloud/etc/proxy.yaml.j2
Normal file
17
config/services/containers/app/opencloud/etc/proxy.yaml.j2
Normal file
@@ -0,0 +1,17 @@
|
||||
role_assignment:
|
||||
driver: "oidc"
|
||||
oidc_role_mapper:
|
||||
role_claim: "preferred_username"
|
||||
role_mapping:
|
||||
{% for admin_user in ['il'] %}
|
||||
- role_name: "admin"
|
||||
claim_value: "{{ admin_user }}"
|
||||
{% endfor %}
|
||||
{% for general_user in ['morsalin', 'eunkyoung'] %}
|
||||
- role_name: "user"
|
||||
claim_value: "{{ general_user }}"
|
||||
{% endfor %}
|
||||
# - role_name: "spaceadmin"
|
||||
# claim_value: ""
|
||||
# - role_name: user-light
|
||||
# claim_value: ""
|
||||
@@ -0,0 +1,60 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=OpenCloud
|
||||
|
||||
[Container]
|
||||
Image=docker.io/opencloudeu/opencloud:{{ version['containers']['opencloud'] }}
|
||||
ContainerName=opencloud
|
||||
HostName=opencloud
|
||||
|
||||
PublishPort={{ services['opencloud']['ports']['http'] }}:9200
|
||||
|
||||
Volume=%h/containers/opencloud:/etc/opencloud:rw
|
||||
Volume=%h/data/containers/opencloud:/var/lib/opencloud:rw
|
||||
|
||||
# General
|
||||
Environment="TZ=Asia/Seoul"
|
||||
# Log level info
|
||||
Environment="OC_LOG_LEVEL=info"
|
||||
# TLS configuration
|
||||
Environment="PROXY_TLS=false"
|
||||
Environment="OC_INSECURE=true"
|
||||
# Connection
|
||||
Environment="PROXY_HTTP_ADDR=0.0.0.0:9200"
|
||||
Environment="OC_URL=https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
## CSP file location: allow authelia public domain
|
||||
Environment="PROXY_CSP_CONFIG_FILE_LOCATION=/etc/opencloud/csp.yaml"
|
||||
# OIDC
|
||||
Environment="OC_OIDC_ISSUER=https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}"
|
||||
Environment="PROXY_OIDC_REWRITE_WELLKNOWN=true"
|
||||
## OIDC CLIENT CONFIGURATION and SCOPES
|
||||
Environment="WEB_OIDC_CLIENT_ID=opencloud"
|
||||
Environment="WEB_OIDC_SCOPE=openid profile email"
|
||||
## auto sign-in from authelia
|
||||
Environment="PROXY_AUTOPROVISION_ACCOUNTS=true"
|
||||
## Stop using internal idP service
|
||||
Environment="OC_EXCLUDE_RUN_SERVICES=idp"
|
||||
## Don't limit special characters
|
||||
Environment="GRAPH_USERNAME_MATCH=none"
|
||||
|
||||
|
||||
# OIDC standard link environments
|
||||
#Environment="WEB_OIDC_AUTHORITY=https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}"
|
||||
#Environment="WEBFINGER_OIDC_ISSUER=https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}"
|
||||
#Environment="OC_OIDC_CLIENT_ID=opencloud"
|
||||
#Environment="OC_OIDC_CLIENT_SCOPES=openid profile email groups"
|
||||
#Environment="WEBFINGER_ANDROID_OIDC_CLIENT_ID=opencloud"
|
||||
#Environment="WEBFINGER_ANDROID_OIDC_CLIENT_SCOPES=openid profile email groups offline_access"
|
||||
#Environment="WEBFINGER_DESKTOP_OIDC_CLIENT_ID=opencloud"
|
||||
#Environment="WEBFINGER_DESKTOP_OIDC_CLIENT_SCOPES=openid profile email groups offline_access"
|
||||
#Environment="WEBFINGER_IOS_OIDC_CLIENT_ID=opencloud"
|
||||
#Environment="WEBFINGER_IOS_OIDC_CLIENT_SCOPES=openid profile email groups offline_access"
|
||||
[Service]
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
@@ -0,0 +1,59 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Paperless
|
||||
|
||||
After=redis_paperless.service
|
||||
Wants=redis_paperless.service
|
||||
|
||||
[Container]
|
||||
Image=ghcr.io/paperless-ngx/paperless-ngx:{{ version['containers']['paperless'] }}
|
||||
ContainerName=paperless
|
||||
HostName=paperless
|
||||
PublishPort={{ services['paperless']['ports']['http'] }}:8000/tcp
|
||||
|
||||
# Volumes
|
||||
Volume=%h/data/containers/paperless/data:/usr/src/paperless/data:rw
|
||||
Volume=%h/data/containers/paperless/media:/usr/src/paperless/media:rw
|
||||
Volume=%h/data/containers/paperless/consume:/usr/src/paperless/consume:rw
|
||||
Volume=%h/containers/paperless/ssl:/etc/ssl/paperless:ro
|
||||
|
||||
# General
|
||||
Environment="TZ=Asia/Seoul"
|
||||
Environment="PAPERLESS_TIME_ZONE=Asia/Seoul"
|
||||
Environment="PAPERLESS_URL=https://{{ services['paperless']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="PAPERLESS_OCR_LANGUAGE=kor+eng"
|
||||
Environment="PAPERLESS_OCR_LANGUAGES=kor"
|
||||
# Environment="PAPERLESS_OCR_MODE=force"
|
||||
# Environment="PAPERLESS_TASK_WORKERS=1"
|
||||
# Environment="PAPERLESS_THREADS_PER_WORKER=1"
|
||||
Environment="PAPERLESS_WORKER_TIMEOUT=7200"
|
||||
Secret=PAPERLESS_SECRET_KEY,type=env
|
||||
|
||||
# Redis
|
||||
Environment="PAPERLESS_REDIS=redis://host.containers.internal:{{ services['paperless']['ports']['redis'] }}"
|
||||
|
||||
# Database
|
||||
Environment="PAPERLESS_DBHOST={{ services['postgresql']['domain'] }}.{{ domain['internal'] }}"
|
||||
Environment="PAPERLESS_DBPORT={{ services['postgresql']['ports']['tcp'] }}"
|
||||
Environment="PAPERLESS_DBNAME=paperless_db"
|
||||
Environment="PAPERLESS_DBUSER=paperless"
|
||||
Environment="PAPERLESS_DBSSLMODE=verify-full"
|
||||
Environment="PAPERLESS_DBSSLROOTCERT=/etc/ssl/paperless/{{ root_cert_filename }}"
|
||||
Secret=PAPERLESS_DBPASS,type=env
|
||||
|
||||
# OIDC
|
||||
Environment="PAPERLESS_APPS=allauth.socialaccount.providers.openid_connect"
|
||||
Environment="PAPERLESS_SOCIAL_AUTO_SIGNUP=true"
|
||||
Environment="PAPERLESS_SOCIALACCOUNT_ALLOW_SIGNUPS=true"
|
||||
Secret=PAPERLESS_SOCIALACCOUNT_PROVIDERS,type=env
|
||||
|
||||
[Service]
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['postgresql']['domain'] }}.{{ domain['internal'] }} {{ services['postgresql']['ports']['tcp'] }}
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
4
config/services/containers/app/redis/redis.conf.j2
Normal file
4
config/services/containers/app/redis/redis.conf.j2
Normal file
@@ -0,0 +1,4 @@
|
||||
databases 16
|
||||
bind 0.0.0.0
|
||||
port 6379
|
||||
protected-mode no
|
||||
31
config/services/containers/app/redis/redis.container.j2
Normal file
31
config/services/containers/app/redis/redis.container.j2
Normal file
@@ -0,0 +1,31 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Redis - {{ redis_service }}
|
||||
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Container]
|
||||
Image=docker.io/library/redis:{{ version['containers']['redis'] }}
|
||||
|
||||
ContainerName=redis_{{ redis_service }}
|
||||
HostName=redis_{{ redis_service }}
|
||||
|
||||
PublishPort={{ services[redis_service]['ports']['redis'] }}:6379
|
||||
|
||||
Volume=%h/containers/redis/{{ redis_service }}/data:/data:rw
|
||||
Volume=%h/containers/redis/{{ redis_service }}/redis.conf:/usr/local/etc/redis/redis.conf:ro
|
||||
|
||||
Exec=redis-server /usr/local/etc/redis/redis.conf
|
||||
|
||||
Environment="TZ=Asia/Seoul"
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
@@ -0,0 +1,34 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Vaultwarden
|
||||
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Container]
|
||||
Image=docker.io/vaultwarden/server:{{ version['containers']['vaultwarden'] }}
|
||||
|
||||
ContainerName=vaultwarden
|
||||
HostName=vaultwarden
|
||||
|
||||
PublishPort={{ services['vaultwarden']['ports']['http'] }}:80/tcp
|
||||
|
||||
Volume=%h/data/containers/vaultwarden:/data:rw
|
||||
Volume=%h/containers/vaultwarden/ssl:/etc/ssl/vaultwarden:ro
|
||||
|
||||
Environment="TZ=Asia/Seoul"
|
||||
Environment="DOMAIN=https://{{ services['vaultwarden']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="SIGNUPS_ALLOWED=false"
|
||||
Secret=VW_ADMIN_TOKEN,type=env,target=ADMIN_TOKEN
|
||||
Secret=VW_DATABASE_URL,type=env,target=DATABASE_URL
|
||||
|
||||
[Service]
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['postgresql']['domain'] }}.{{ domain['internal'] }} {{ services['postgresql']['ports']['tcp'] }}
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
57
config/services/containers/app/vikunja/vikunja.container.j2
Normal file
57
config/services/containers/app/vikunja/vikunja.container.j2
Normal file
@@ -0,0 +1,57 @@
|
||||
[Quadlet]
|
||||
DefaultDependencies=false
|
||||
|
||||
[Unit]
|
||||
Description=Vikunja
|
||||
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Container]
|
||||
Image=docker.io/vikunja/vikunja:{{ version['containers']['vikunja'] }}
|
||||
ContainerName=vikunja
|
||||
HostName=vikunja
|
||||
PublishPort={{ services['vikunja']['ports']['http'] }}:3456/tcp
|
||||
|
||||
# Volumes
|
||||
Volume=%h/data/containers/vikunja:/app/vikunja/files:rw
|
||||
Volume=%h/containers/vikunja/ssl:/etc/ssl/vikunja:ro
|
||||
|
||||
# General
|
||||
Environment="TZ=Asia/Seoul"
|
||||
Environment="VIKUNJA_DEFAULTSETTINGS_TIMEZONE=Asia/Seoul"
|
||||
Environment="VIKUNJA_SERVICE_TIMEZONE=Asia/Seoul"
|
||||
Environment="VIKUNJA_SERVICE_PUBLICURL=https://{{ services['vikunja']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="VIKUNJA_SERVICE_ENABLEREGISTRATION=false"
|
||||
Secret=VIKUNJA_SERVICE_JWTSECRET,type=env
|
||||
|
||||
|
||||
# Database
|
||||
Environment="VIKUNJA_DATABASE_TYPE=postgres"
|
||||
Environment="VIKUNJA_DATABASE_HOST={{ services['postgresql']['domain'] }}.{{ domain['internal'] }}"
|
||||
Environment="VIKUNJA_DATABASE_USER=vikunja"
|
||||
Environment="VIKUNJA_DATABASE_DATABASE=vikunja_db"
|
||||
Environment="VIKUNJA_DATABASE_SSLMODE=verify-full"
|
||||
Environment="VIKUNJA_DATABASE_SSLROOTCERT=/etc/ssl/vikunja/{{ root_cert_filename }}"
|
||||
Secret=VIKUNJA_DATABASE_PASSWORD,type=env
|
||||
|
||||
|
||||
# OIDC
|
||||
Environment="VIKUNJA_AUTH_OPENID_ENABLED=true"
|
||||
Environment="VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_NAME=Authelia"
|
||||
Environment="VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_AUTHURL=https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}"
|
||||
Environment="VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_CLIENTID=vikunja"
|
||||
# Environment="VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_SCOPE=" default value = openid email profile
|
||||
# Vikunja doesn't support OIDC and local dual login.
|
||||
# Environment="VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_USERNAMEFALLBACK=true"
|
||||
# Environment="VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_EMAILFALLBACK=true"
|
||||
Secret=VIKUNJA_AUTH_OPENID_PROVIDERS_authelia_CLIENTSECRET,type=env
|
||||
|
||||
[Service]
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['postgresql']['domain'] }}.{{ domain['internal'] }} {{ services['postgresql']['ports']['tcp'] }}
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
TimeoutStopSec=120
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
@@ -15,7 +15,7 @@ ContainerName=authelia
|
||||
HostName=authelia
|
||||
|
||||
# Web UI
|
||||
PublishPort=9091:9091/tcp
|
||||
PublishPort={{ services['authelia']['ports']['http'] }}:9091/tcp
|
||||
|
||||
|
||||
Volume=%h/containers/authelia/config:/config:rw
|
||||
@@ -56,8 +56,9 @@ Exec=--config /config/authelia.yaml
|
||||
# Wait for dependency
|
||||
# They run as rootless podman container, so their port is not opened until they are normaly running
|
||||
# Check their ports with nc command
|
||||
ExecStartPre=/usr/bin/nc -zv {{ infra_uri['postgresql']['domain'] }} {{ infra_uri['postgresql']['ports']['tcp'] }}
|
||||
ExecStartPre=/usr/bin/nc -zv {{ infra_uri['ldap']['domain'] }} {{ infra_uri['ldap']['ports']['ldaps'] }}
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['postgresql']['domain'] }}.{{ domain['internal'] }} {{ services['postgresql']['ports']['tcp'] }}
|
||||
# services['ldap']['ports']['ldaps'] is 6360, but nftables works on 636 the original port
|
||||
ExecStartPre=/usr/bin/nc -zv {{ services['ldap']['domain'] }}.{{ domain['internal'] }} 636
|
||||
ExecStartPre=sleep 5
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
|
||||
@@ -10,7 +10,7 @@ theme: 'auto'
|
||||
# Server configuration
|
||||
server:
|
||||
# TLS will be applied on caddy
|
||||
address: 'tcp://:9091/'
|
||||
address: 'tcp://:{{ services['authelia']['ports']['http'] }}/'
|
||||
|
||||
# Log configuration
|
||||
log:
|
||||
@@ -20,7 +20,7 @@ log:
|
||||
# TOTP configuration
|
||||
totp:
|
||||
# issure option is for 2FA app. It works as identifier. "My homelab' or 'ilnmors.internal', 'Authelia - ilnmors'
|
||||
issuer: 'ilnmors.internal'
|
||||
issuer: '{{ domain['internal'] }}'
|
||||
|
||||
# Identity validation confituration
|
||||
identity_validation:
|
||||
@@ -31,21 +31,21 @@ identity_validation:
|
||||
authentication_backend:
|
||||
ldap:
|
||||
# ldaps uses 636 -> NAT automatically change port 636 in output packet -> 2636 which lldap server uses.
|
||||
address: 'ldaps://ldap.ilnmors.internal'
|
||||
address: 'ldaps://{{ services['ldap']['domain'] }}.{{ domain['internal'] }}'
|
||||
implementation: 'lldap'
|
||||
# tls configruation, it uses certificates_directory's /etc/ssl/authelia/ilnmors_root_ca.crt
|
||||
# tls configruation, it uses certificates_directory's /etc/ssl/authelia/{{ root_cert_filename }}
|
||||
tls:
|
||||
server_name: 'ldap.ilnmors.internal'
|
||||
server_name: '{{ services['ldap']['domain'] }}.{{ domain['internal'] }}'
|
||||
skip_verify: false
|
||||
# LLDAP base DN
|
||||
base_dn: 'dc=ilnmors,dc=internal'
|
||||
base_dn: '{{ domain['dc'] }}'
|
||||
additional_users_dn: 'ou=people'
|
||||
additional_groups_dn: 'ou=groups'
|
||||
# LLDAP filters
|
||||
users_filter: '(&(|({username_attribute}={input})({mail_attribute}={input}))(objectClass=person))'
|
||||
groups_filter: '(&(member={dn})(objectClass=groupOfNames))'
|
||||
# LLDAP bind account configuration
|
||||
user: 'uid=authelia,ou=people,dc=ilnmors,dc=internal'
|
||||
user: 'uid=authelia,ou=people,{{ domain['dc'] }}'
|
||||
password: '' # $AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE option is designated in container file
|
||||
|
||||
# Access control configuration
|
||||
@@ -53,14 +53,12 @@ access_control:
|
||||
default_policy: 'deny'
|
||||
rules:
|
||||
# authelia portal
|
||||
- domain: 'authelia.ilnmors.internal'
|
||||
- domain: '{{ services['authelia']['domain'] }}.{{ domain['public'] }}'
|
||||
policy: 'bypass'
|
||||
- domain: 'authelia.ilnmors.com'
|
||||
policy: 'bypass'
|
||||
- domain: 'test.ilnmors.com'
|
||||
policy: 'one_factor'
|
||||
subject:
|
||||
- 'group:admins'
|
||||
# - domain: 'test.ilnmors.com'
|
||||
# policy: 'one_factor'
|
||||
# subject:
|
||||
# - 'group:admins'
|
||||
# Session provider configuration
|
||||
session:
|
||||
secret: '' # $AUTHELIA_SESSION_SECRET_FILE is designated in container file
|
||||
@@ -68,8 +66,8 @@ session:
|
||||
inactivity: '24 hours' # Session maintains for 24 hours without actions
|
||||
cookies:
|
||||
- name: 'authelia_public_session'
|
||||
domain: 'ilnmors.com'
|
||||
authelia_url: 'https://authelia.ilnmors.com'
|
||||
domain: '{{ domain['public'] }}'
|
||||
authelia_url: 'https://{{ services['authelia']['domain'] }}.{{ domain['public'] }}'
|
||||
same_site: 'lax'
|
||||
|
||||
# This authelia doesn't use Redis.
|
||||
@@ -78,12 +76,12 @@ session:
|
||||
storage:
|
||||
encryption_key: '' # $AUTHELIA_STORAGE_ENCRYPTION_KEY_FILE is designated in container file
|
||||
postgres:
|
||||
address: 'tcp://{{ infra_uri['postgresql']['domain'] }}:{{ infra_uri['postgresql']['ports']['tcp'] }}'
|
||||
address: 'tcp://{{ services['postgresql']['domain'] }}.{{ domain['internal'] }}:{{ services['postgresql']['ports']['tcp'] }}'
|
||||
database: 'authelia_db'
|
||||
username: 'authelia'
|
||||
password: '' # $AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE is designated in container file
|
||||
tls:
|
||||
server_name: '{{ infra_uri['postgresql']['domain'] }}'
|
||||
server_name: '{{ services['postgresql']['domain'] }}.{{ domain['internal'] }}'
|
||||
skip_verify: false
|
||||
|
||||
# Notification provider
|
||||
@@ -95,13 +93,24 @@ notifier:
|
||||
identity_providers:
|
||||
oidc:
|
||||
hmac_secret: '' # $AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE
|
||||
# For the app which doesn't use secret.
|
||||
cors:
|
||||
endpoints:
|
||||
- 'authorization'
|
||||
- 'token'
|
||||
- 'revocation'
|
||||
- 'introspection'
|
||||
- 'userinfo'
|
||||
allowed_origins:
|
||||
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}'
|
||||
allowed_origins_from_client_redirect_uris: true
|
||||
jwks:{% raw %}
|
||||
- algorithm: 'RS256'
|
||||
use: 'sig'
|
||||
key: {{ secret "/run/secrets/AUTHELIA_JWKS_RS256" | mindent 10 "|" | msquote }}
|
||||
- algorithm: 'ES256'
|
||||
use: 'sig'
|
||||
key: {{ secret "/run/secrets/AUTHELIA_JWKS_ES256" | mindent 10 "|" | msquote }}{% endraw %}
|
||||
key: {{ secret "/run/secrets/AUTHELIA_JWKS_ES256" | mindent 10 "|" | msquote }}{% endraw %}
|
||||
clients:
|
||||
# https://www.authelia.com/integration/openid-connect/clients/synology-dsm/
|
||||
- client_id: 'dsm'
|
||||
@@ -117,7 +126,7 @@ identity_providers:
|
||||
require_pkce: false
|
||||
pkce_challenge_method: ''
|
||||
redirect_uris:
|
||||
- 'https://{{ infra_uri['nas']['domain'] }}:{{ infra_uri['nas']['ports']['https'] }}'
|
||||
- 'https://{{ services['nas']['domain'] }}.{{ domain['internal'] }}:{{ services['nas']['ports']['https'] }}'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
@@ -131,3 +140,228 @@ identity_providers:
|
||||
userinfo_signed_response_alg: 'none'
|
||||
# [ client_secret_post | client_secret_basic ]
|
||||
token_endpoint_auth_method: 'client_secret_post'
|
||||
# https://www.authelia.com/integration/openid-connect/clients/gitea/
|
||||
- client_id: 'gitea'
|
||||
client_name: 'gitea'
|
||||
client_secret: '{{ hostvars['console']['gitea']['oidc']['hash'] }}'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: false
|
||||
pkce_challenge_method: ''
|
||||
redirect_uris:
|
||||
- 'https://{{ services['gitea']['domain']['public'] }}.{{ domain['public'] }}/user/oauth2/authelia/callback'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'email'
|
||||
- 'profile'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
access_token_signed_response_alg: 'none'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'client_secret_basic'
|
||||
# https://www.authelia.com/integration/openid-connect/clients/immich/
|
||||
- client_id: 'immich'
|
||||
client_name: 'immich'
|
||||
client_secret: '{{ hostvars['console']['immich']['oidc']['hash'] }}'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: false
|
||||
pkce_challenge_method: ''
|
||||
redirect_uris:
|
||||
- 'https://{{ services['immich']['domain']['public'] }}.{{ domain['public'] }}/auth/login'
|
||||
- 'https://{{ services['immich']['domain']['public'] }}.{{ domain['public'] }}/user-settings'
|
||||
- 'app.immich:///oauth-callback'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
access_token_signed_response_alg: 'none'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'client_secret_post'
|
||||
# https://www.authelia.com/integration/openid-connect/clients/actual-budget/
|
||||
- client_id: 'actual-budget'
|
||||
client_name: 'Actual Budget'
|
||||
client_secret: '{{ hostvars['console']['actualbudget']['oidc']['hash'] }}'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: false
|
||||
pkce_challenge_method: ''
|
||||
redirect_uris:
|
||||
- 'https://{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}/openid/callback'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'groups'
|
||||
- 'email'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
access_token_signed_response_alg: 'none'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'client_secret_basic'
|
||||
# https://www.authelia.com/integration/openid-connect/clients/paperless/
|
||||
- client_id: 'paperless'
|
||||
client_name: 'Paperless'
|
||||
client_secret: '{{ hostvars['console']['paperless']['oidc']['hash'] }}'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: true
|
||||
pkce_challenge_method: 'S256'
|
||||
redirect_uris:
|
||||
- 'https://{{ services['paperless']['domain']['public'] }}.{{ domain['public'] }}/accounts/oidc/authelia/login/callback/'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
- 'groups'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
access_token_signed_response_alg: 'none'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'client_secret_post'
|
||||
# https://www.authelia.com/integration/openid-connect/clients/vikunja/
|
||||
- client_id: 'vikunja'
|
||||
client_name: 'Vikunja'
|
||||
client_secret: '{{ hostvars['console']['vikunja']['oidc']['hash'] }}'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: false
|
||||
pkce_challenge_method: ''
|
||||
redirect_uris:
|
||||
- 'https://{{ services['vikunja']['domain']['public'] }}.{{ domain['public'] }}/auth/openid/authelia'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
access_token_signed_response_alg: 'none'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'client_secret_basic'
|
||||
# OpenCloud configuration
|
||||
## https://docs.opencloud.eu/docs/admin/configuration/authentication-and-user-management/external-idp/
|
||||
## Web
|
||||
- client_id: 'opencloud'
|
||||
client_name: 'OpenCloud'
|
||||
public: true
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: true
|
||||
pkce_challenge_method: 'S256'
|
||||
redirect_uris:
|
||||
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}/'
|
||||
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}/oidc-callback.html'
|
||||
- 'https://{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}/oidc-silent-redirect.html'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
- 'groups'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
access_token_signed_response_alg: 'RS256'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'none'
|
||||
## desktop
|
||||
- client_id: 'OpenCloudDesktop'
|
||||
client_name: 'OpenCloud'
|
||||
public: true
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: true
|
||||
pkce_challenge_method: 'S256'
|
||||
redirect_uris:
|
||||
- 'http://localhost'
|
||||
- 'http://127.0.0.1'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
- 'groups'
|
||||
- 'offline_access'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
- 'refresh_token'
|
||||
access_token_signed_response_alg: 'RS256'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'none'
|
||||
## Android
|
||||
- client_id: 'OpenCloudAndroid'
|
||||
client_name: 'OpenCloud'
|
||||
public: true
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: true
|
||||
pkce_challenge_method: 'S256'
|
||||
redirect_uris:
|
||||
- 'oc://android.opencloud.eu'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
- 'groups'
|
||||
- 'offline_access'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
- 'refresh_token'
|
||||
access_token_signed_response_alg: 'RS256'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'none'
|
||||
## IOS
|
||||
- client_id: 'OpenCloudIOS'
|
||||
client_name: 'OpenCloud'
|
||||
public: true
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: true
|
||||
pkce_challenge_method: 'S256'
|
||||
redirect_uris:
|
||||
- 'oc://ios.opencloud.eu'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
- 'groups'
|
||||
- 'offline_access'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
- 'refresh_token'
|
||||
access_token_signed_response_alg: 'RS256'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'none'
|
||||
# https://docs.affine.pro/self-host-affine/administer/oauth-2-0
|
||||
- client_id: 'affine'
|
||||
client_name: 'Affine'
|
||||
client_secret: '{{ hostvars['console']['affine']['oidc']['hash'] }}'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
require_pkce: false
|
||||
pkce_challenge_method: ''
|
||||
redirect_uris:
|
||||
- 'https://{{ services['affine']['domain']['public'] }}.{{ domain['public'] }}/oauth/callback'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
access_token_signed_response_alg: 'none'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
token_endpoint_auth_method: 'client_secret_post'
|
||||
|
||||
@@ -12,6 +12,6 @@ RUN xcaddy build \
|
||||
FROM docker.io/library/caddy:{{ version['containers']['caddy'] }}
|
||||
|
||||
COPY --from=builder /usr/bin/caddy /usr/bin/caddy
|
||||
COPY ./ilnmors_root_ca.crt /usr/local/share/ca-certificates/ilnmors_root_ca.crt
|
||||
COPY ./{{ root_cert_filename }} /usr/local/share/ca-certificates/{{ root_cert_filename }}
|
||||
|
||||
RUN update-ca-certificates
|
||||
|
||||
@@ -14,18 +14,18 @@ Wants=network-online.target
|
||||
|
||||
|
||||
[Container]
|
||||
Image=ilnmors.internal/{{ node['name'] }}/caddy:{{ version['containers']['caddy'] }}
|
||||
Image={{ domain['internal'] }}/{{ node['name'] }}/caddy:{{ version['containers']['caddy'] }}
|
||||
|
||||
ContainerName=caddy_{{ node['name'] }}
|
||||
HostName=caddy_{{ node['name'] }}
|
||||
{% if node['name'] == 'infra' %}
|
||||
AddHost={{ infra_uri['ca']['domain'] }}:host-gateway
|
||||
AddHost={{ infra_uri['prometheus']['domain'] }}:host-gateway
|
||||
AddHost={{ infra_uri['loki']['domain'] }}:host-gateway
|
||||
AddHost={{ services['ca']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
AddHost={{ services['prometheus']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
AddHost={{ services['loki']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
{% endif %}
|
||||
|
||||
PublishPort=2080:80/tcp
|
||||
PublishPort=2443:443/tcp
|
||||
PublishPort={{ services['caddy']['ports']['http'] }}:80/tcp
|
||||
PublishPort={{ services['caddy']['ports']['https'] }}:443/tcp
|
||||
|
||||
Volume=%h/containers/caddy/etc:/etc/caddy:ro
|
||||
Volume=%h/containers/caddy/data:/data:rw
|
||||
|
||||
79
config/services/containers/common/caddy/etc/app/Caddyfile.j2
Normal file
79
config/services/containers/common/caddy/etc/app/Caddyfile.j2
Normal file
@@ -0,0 +1,79 @@
|
||||
{
|
||||
servers {
|
||||
# Only accept packets from auth main caddy
|
||||
trusted_proxies static {{ hostvars['fw']['network4']['auth']['server'] }} {{ hostvars['fw']['network6']['auth']['server'] }}
|
||||
}
|
||||
}
|
||||
# Private TLS ACME with DNS-01-challenge
|
||||
(private_tls) {
|
||||
tls {
|
||||
issuer acme {
|
||||
dir https://{{ services['ca']['domain'] }}.{{ domain['internal'] }}:{{ services['ca']['ports']['https'] }}/acme/acme@{{ domain['internal'] }}/directory
|
||||
dns rfc2136 {
|
||||
server {{ services['bind']['domain'] }}.{{ domain['internal'] }}:{{ services['bind']['ports']['dns'] }}
|
||||
key_name acme-key
|
||||
key_alg hmac-sha256
|
||||
key "{file./run/secrets/CADDY_ACME_KEY}"
|
||||
}
|
||||
resolvers {{ services['bind']['domain'] }}.{{ domain['internal'] }}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{{ node['name'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
metrics
|
||||
}
|
||||
# test.app.ilnmors.internal {
|
||||
# import private_tls
|
||||
# root * /usr/share/caddy
|
||||
# file_server
|
||||
# }
|
||||
{{ services['vaultwarden']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['vaultwarden']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
{{ services['gitea']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['gitea']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
{{ services['immich']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['immich']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
{{ services['actualbudget']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['actualbudget']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
{{ services['paperless']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['paperless']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
{{ services['vikunja']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['vikunja']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
{{ services['opencloud']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['opencloud']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
{{ services['affine']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
reverse_proxy host.containers.internal:{{ services['affine']['ports']['http'] }} {
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
# CrowdSec LAPI connection
|
||||
crowdsec {
|
||||
api_url https://{{ infra_uri['crowdsec']['domain'] }}:{{ infra_uri['crowdsec']['ports']['https'] }}
|
||||
api_url https://{{ services['crowdsec']['domain'] }}.{{ domain['internal'] }}:{{ services['crowdsec']['ports']['https'] }}
|
||||
api_key "{file./run/secrets/CADDY_CROWDSEC_KEY}"
|
||||
}
|
||||
}
|
||||
@@ -15,48 +15,130 @@
|
||||
roll_size 100MiB
|
||||
roll_keep 1
|
||||
}
|
||||
format json
|
||||
format json
|
||||
}
|
||||
}
|
||||
# Private TLS ACME with DNS-01-challenge
|
||||
(private_tls) {
|
||||
tls {
|
||||
issuer acme {
|
||||
dir https://{{ infra_uri['ca']['domain'] }}:{{ infra_uri['ca']['ports']['https'] }}/acme/acme@ilnmors.internal/directory
|
||||
dir https://{{ services['ca']['domain'] }}.{{ domain['internal'] }}:{{ services['ca']['ports']['https'] }}/acme/acme@{{ domain['internal'] }}/directory
|
||||
dns rfc2136 {
|
||||
server {{ infra_uri['bind']['domain'] }}:{{ infra_uri['bind']['ports']['dns'] }}
|
||||
server {{ services['bind']['domain'] }}.{{ domain['internal'] }}:{{ services['bind']['ports']['dns'] }}
|
||||
key_name acme-key
|
||||
key_alg hmac-sha256
|
||||
key "{file./run/secrets/CADDY_ACME_KEY}"
|
||||
}
|
||||
resolvers {{ services['bind']['domain'] }}.{{ domain['internal'] }}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Public domain
|
||||
authelia.ilnmors.com {
|
||||
{{ services['authelia']['domain'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy host.containers.internal:9091
|
||||
reverse_proxy host.containers.internal:{{ services['authelia']['ports']['http'] }}
|
||||
}
|
||||
}
|
||||
test.ilnmors.com {
|
||||
# test.ilnmors.com {
|
||||
# import crowdsec_log
|
||||
# route {
|
||||
# crowdsec
|
||||
# forward_auth host.containers.internal:9091 {
|
||||
# # Authelia Forward Auth endpoint URI
|
||||
# uri /api/authz/forward-auth
|
||||
# copy_headers Remote-User Remote-Groups Remote-Email Remote-Name
|
||||
# }
|
||||
# root * /usr/share/caddy
|
||||
# file_server
|
||||
# }
|
||||
# }
|
||||
# test.app.ilnmors.com {
|
||||
# import crowdsec_log
|
||||
# route {
|
||||
# crowdsec
|
||||
# reverse_proxy https://test.app.ilnmors.internal {
|
||||
# header_up Host {http.reverse_proxy.upstream.host}
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
{{ services['vaultwarden']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
forward_auth host.containers.internal:9091 {
|
||||
# Authelia Forward Auth endpoint URI
|
||||
uri /api/authz/forward-auth
|
||||
copy_headers Remote-User Remote-Groups Remote-Email Remote-Name
|
||||
reverse_proxy https://{{ services['vaultwarden']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
{{ services['gitea']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://{{ services['gitea']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
{{ services['immich']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://{{ services['immich']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://{{ services['actualbudget']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
{{ services['paperless']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://{{ services['paperless']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
{{ services['vikunja']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://{{ services['vikunja']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://{{ services['opencloud']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
{{ services['affine']['domain']['public'] }}.{{ domain['public'] }} {
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://{{ services['affine']['domain']['internal'] }}.{{ domain['internal'] }} {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
root * /usr/share/caddy
|
||||
file_server
|
||||
}
|
||||
}
|
||||
|
||||
# Internal domain
|
||||
auth.ilnmors.internal {
|
||||
{{ node['name'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
metrics
|
||||
}
|
||||
|
||||
@@ -2,39 +2,40 @@
|
||||
(private_tls) {
|
||||
tls {
|
||||
issuer acme {
|
||||
dir https://{{ infra_uri['ca']['domain'] }}:{{ infra_uri['ca']['ports']['https'] }}/acme/acme@ilnmors.internal/directory
|
||||
dir https://{{ services['ca']['domain'] }}.{{ domain['internal'] }}:{{ services['ca']['ports']['https'] }}/acme/acme@{{ domain['internal'] }}/directory
|
||||
dns rfc2136 {
|
||||
server {{ infra_uri['bind']['domain'] }}:{{ infra_uri['bind']['ports']['dns'] }}
|
||||
server {{ services['bind']['domain'] }}.{{ domain['internal'] }}:{{ services['bind']['ports']['dns'] }}
|
||||
key_name acme-key
|
||||
key_alg hmac-sha256
|
||||
key "{file./run/secrets/CADDY_ACME_KEY}"
|
||||
}
|
||||
resolvers {{ services['bind']['domain'] }}.{{ domain['internal'] }}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
infra.ilnmors.internal {
|
||||
{{ node['name'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
metrics
|
||||
}
|
||||
|
||||
{{ infra_uri['ldap']['domain'] }} {
|
||||
{{ services['ldap']['domain'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
route {
|
||||
reverse_proxy host.containers.internal:{{ infra_uri['ldap']['ports']['http'] }}
|
||||
reverse_proxy host.containers.internal:{{ services['ldap']['ports']['http'] }}
|
||||
}
|
||||
}
|
||||
|
||||
{{ infra_uri['prometheus']['domain'] }} {
|
||||
{{ services['prometheus']['domain'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
route {
|
||||
reverse_proxy https://{{ infra_uri['prometheus']['domain'] }}:{{ infra_uri['prometheus']['ports']['https'] }}
|
||||
reverse_proxy https://{{ services['prometheus']['domain'] }}.{{ domain['internal'] }}:{{ services['prometheus']['ports']['https'] }}
|
||||
}
|
||||
}
|
||||
|
||||
grafana.ilnmors.internal {
|
||||
{{ services['grafana']['domain'] }}.{{ domain['internal'] }} {
|
||||
import private_tls
|
||||
route {
|
||||
reverse_proxy host.containers.internal:3000
|
||||
reverse_proxy host.containers.internal:{{ services['grafana']['ports']['http'] }}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ Image=docker.io/smallstep/step-ca:{{ version['containers']['step'] }}
|
||||
ContainerName=ca
|
||||
HostName=ca
|
||||
|
||||
PublishPort=9000:9000/tcp
|
||||
PublishPort={{ services['ca']['ports']['https'] }}:9000/tcp
|
||||
|
||||
Volume=%h/containers/ca/certs:/home/step/certs:ro
|
||||
Volume=%h/containers/ca/secrets:/home/step/secrets:ro
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"root": "/home/step/certs/ilnmors_root_ca.crt",
|
||||
"root": "/home/step/certs/{{ root_cert_filename }}",
|
||||
"federatedRoots": null,
|
||||
"crt": "/home/step/certs/ilnmors_intermediate_ca.crt",
|
||||
"key": "/home/step/secrets/ilnmors_intermediate_ca.key",
|
||||
"crt": "/home/step/certs/{{ intermediate_cert_filename }}",
|
||||
"key": "/home/step/secrets/{{ intermediate_key_filename }}",
|
||||
"address": ":9000",
|
||||
"insecureAddress": "",
|
||||
"dnsNames": [
|
||||
"{{ infra_uri['ca']['domain'] }}"
|
||||
"{{ services['ca']['domain'] }}.{{ domain['internal'] }}"
|
||||
],
|
||||
"logger": {
|
||||
"format": "text"
|
||||
@@ -21,8 +21,9 @@
|
||||
"x509": {
|
||||
"allow": {
|
||||
"dns": [
|
||||
"ilnmors.internal",
|
||||
"*.ilnmors.internal"
|
||||
"{{ domain['internal'] }}",
|
||||
"*.{{ domain['internal'] }}",
|
||||
"*.app.{{ domain['internal'] }}"
|
||||
]
|
||||
},
|
||||
"allowWildcardNames": true
|
||||
@@ -31,7 +32,7 @@
|
||||
"provisioners": [
|
||||
{
|
||||
"type": "ACME",
|
||||
"name": "acme@ilnmors.internal",
|
||||
"name": "acme@{{ domain['internal'] }}",
|
||||
"claims": {
|
||||
"defaultTLSCertDuration": "2160h0m0s",
|
||||
"enableSSHCA": true,
|
||||
@@ -57,5 +58,5 @@
|
||||
"maxVersion": 1.3,
|
||||
"renegotiation": false
|
||||
},
|
||||
"commonName": "ilnmors Online CA"
|
||||
"commonName": "{{ domain['internal'] }} Online CA"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"ca-url": "https://{{ infra_uri['ca']['domain'] }}:{{ infra_uri['ca']['ports']['https'] }}",
|
||||
"ca-url": "https://{{ services['ca']['domain'] }}.{{ domain['internal'] }}:{{ services['ca']['ports']['https'] }}",
|
||||
"ca-config": "/home/step/config/ca.json",
|
||||
"fingerprint": "215c851d2d0d2dbf90fc3507425207c29696ffd587c640c94a68dddb1d84d8e8",
|
||||
"root": "/home/step/certs/ilnmors_root_ca.crt"
|
||||
"root": "/home/step/certs/{{ root_cert_filename }}"
|
||||
}
|
||||
|
||||
@@ -7,19 +7,19 @@ provisioning = /etc/grafana/provisioning
|
||||
|
||||
[server]
|
||||
protocol = http
|
||||
http_port = 3000
|
||||
domain = grafana.ilnmors.internal
|
||||
root_url = http://grafana.ilnmors.internal/
|
||||
http_port = {{ services['grafana']['ports']['http'] }}
|
||||
domain = {{ services['grafana']['domain'] }}.{{ domain['internal'] }}
|
||||
root_url = http://{{ services['grafana']['domain'] }}.{{ domain['internal'] }}/
|
||||
router_logging = false
|
||||
|
||||
[database]
|
||||
type = postgres
|
||||
host = {{ infra_uri['postgresql']['domain'] }}:{{ infra_uri['postgresql']['ports']['tcp'] }}
|
||||
host = {{ services['postgresql']['domain'] }}.{{ domain['internal'] }}:{{ services['postgresql']['ports']['tcp'] }}
|
||||
name = grafana_db
|
||||
user = grafana
|
||||
password = $__file{/run/secrets/GF_DB_PASSWORD}
|
||||
ssl_mode = verify-full
|
||||
ca_cert_path = /etc/ssl/grafana/ilnmors_root_ca.crt
|
||||
ca_cert_path = /etc/ssl/grafana/{{ root_cert_filename }}
|
||||
|
||||
[auth.ldap]
|
||||
enabled = true
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# https://github.com/lldap/lldap/blob/main/example_configs/grafana_ldap_config.toml
|
||||
[[servers]]
|
||||
host = "{{ infra_uri['ldap']['domain'] }}"
|
||||
port = {{ infra_uri['ldap']['ports']['ldaps'] }}
|
||||
host = "{{ services['ldap']['domain'] }}.{{ domain['internal'] }}"
|
||||
port = {{ services['ldap']['ports']['ldaps'] }}
|
||||
# Activate STARTTLS or LDAPS
|
||||
use_ssl = true
|
||||
# true = STARTTLS, false = LDAPS
|
||||
@@ -9,16 +9,16 @@ start_tls = false
|
||||
tls_ciphers = []
|
||||
min_tls_version = ""
|
||||
ssl_skip_verify = false
|
||||
root_ca_cert = "/etc/ssl/grafana/ilnmors_root_ca.crt"
|
||||
root_ca_cert = "/etc/ssl/grafana/{{ root_cert_filename }}"
|
||||
# mTLS option, it is not needed
|
||||
# client_cert = "/path/to/client.crt"
|
||||
# client_key = "/path/to/client.key"
|
||||
|
||||
bind_dn = "uid=grafana,ou=people,dc=ilnmors,dc=internal"
|
||||
bind_dn = "uid=grafana,ou=people,{{ domain['dc'] }}"
|
||||
bind_password = "$__file{/run/secrets/LDAP_BIND_PASSWORD}"
|
||||
|
||||
search_filter = "(|(uid=%s)(mail=%s))"
|
||||
search_base_dns = ["dc=ilnmors,dc=internal"]
|
||||
search_base_dns = ["{{ domain['dc'] }}"]
|
||||
|
||||
[servers.attributes]
|
||||
member_of = "memberOf"
|
||||
@@ -28,20 +28,20 @@ surname = "sn"
|
||||
username = "uid"
|
||||
|
||||
group_search_filter = "(&(objectClass=groupOfUniqueNames)(uniqueMember=%s))"
|
||||
group_search_base_dns = ["ou=groups,dc=ilnmors,dc=internal"]
|
||||
group_search_base_dns = ["ou=groups,{{ domain['dc'] }}"]
|
||||
group_search_filter_user_attribute = "uid"
|
||||
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=lldap_admin,ou=groups,dc=ilnmors,dc=internal"
|
||||
group_dn = "cn=lldap_admin,ou=groups,{{ domain['dc'] }}"
|
||||
org_role = "Admin"
|
||||
grafana_admin = true
|
||||
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=admins,ou=groups,dc=ilnmors,dc=internal"
|
||||
group_dn = "cn=admins,ou=groups,{{ domain['dc'] }}"
|
||||
org_role = "Editor"
|
||||
grafana_admin = false
|
||||
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=users,ou=groups,dc=ilnmors,dc=internal"
|
||||
group_dn = "cn=users,ou=groups,{{ domain['dc'] }}"
|
||||
org_role = "Viewer"
|
||||
grafana_admin = false
|
||||
|
||||
@@ -4,7 +4,7 @@ apiVersion: 1
|
||||
datasources:
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
url: https://prometheus.ilnmors.internal:9090
|
||||
url: https://{{ services['prometheus']['domain'] }}.{{ domain['internal'] }}:{{ services['prometheus']['ports']['https'] }}
|
||||
access: proxy
|
||||
isDefault: true
|
||||
jsonData:
|
||||
@@ -12,11 +12,11 @@ datasources:
|
||||
tlsAuthWithCACert: true
|
||||
httpMethod: POST
|
||||
secureJsonData:
|
||||
tlsCACert: "$__file{/etc/ssl/grafana/ilnmors_root_ca.crt}"
|
||||
tlsCACert: "$__file{/etc/ssl/grafana/{{ root_cert_filename }}}"
|
||||
|
||||
- name: Loki
|
||||
type: loki
|
||||
url: https://loki.ilnmors.internal:3100
|
||||
url: https://{{ services['loki']['domain'] }}.{{ domain['internal'] }}:{{ services['loki']['ports']['https'] }}
|
||||
access: proxy
|
||||
jsonData:
|
||||
tlsAuth: false
|
||||
@@ -25,5 +25,5 @@ datasources:
|
||||
httpHeaderName1: "X-Scope-OrgID"
|
||||
maxLines: 1000
|
||||
secureJsonData:
|
||||
tlsCACert: "$__file{/etc/ssl/grafana/ilnmors_root_ca.crt}"
|
||||
httpHeaderValue1: "ilnmors.internal"
|
||||
tlsCACert: "$__file{/etc/ssl/grafana/{{ root_cert_filename }}}"
|
||||
httpHeaderValue1: "{{ domain['internal'] }} "
|
||||
@@ -13,12 +13,12 @@ Image=docker.io/grafana/grafana:{{ version['containers']['grafana'] }}
|
||||
ContainerName=grafana
|
||||
HostName=grafana
|
||||
|
||||
AddHost={{ infra_uri['postgresql']['domain'] }}:host-gateway
|
||||
AddHost={{ infra_uri['ldap']['domain'] }}:host-gateway
|
||||
AddHost={{ infra_uri['prometheus']['domain'] }}:host-gateway
|
||||
AddHost={{ infra_uri['loki']['domain'] }}:host-gateway
|
||||
AddHost={{ services['postgresql']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
AddHost={{ services['ldap']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
AddHost={{ services['prometheus']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
AddHost={{ services['loki']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
|
||||
PublishPort=3000:3000/tcp
|
||||
PublishPort={{ services['grafana']['ports']['http'] }}:3000/tcp
|
||||
|
||||
Volume=%h/containers/grafana/data:/var/lib/grafana:rw
|
||||
Volume=%h/containers/grafana/etc:/etc/grafana:ro
|
||||
|
||||
@@ -13,11 +13,11 @@ Image=docker.io/lldap/lldap:{{ version['containers']['ldap'] }}
|
||||
ContainerName=ldap
|
||||
HostName=ldap
|
||||
# They are at the same host (for Pasta, it is needed)
|
||||
AddHost={{ infra_uri['postgresql']['domain'] }}:host-gateway
|
||||
AddHost={{ services['postgresql']['domain'] }}.{{ domain['internal'] }}:host-gateway
|
||||
# For LDAPS - 636 > 6360 nftables
|
||||
PublishPort=6360:6360/tcp
|
||||
PublishPort={{ services['ldap']['ports']['ldaps'] }}:6360/tcp
|
||||
# Web UI
|
||||
PublishPort=17170:17170/tcp
|
||||
PublishPort={{ services['ldap']['ports']['http'] }}:17170/tcp
|
||||
|
||||
|
||||
Volume=%h/containers/ldap/data:/data:rw
|
||||
@@ -27,7 +27,7 @@ Volume=%h/containers/ldap/ssl:/etc/ssl/ldap:ro
|
||||
Environment="TZ=Asia/Seoul"
|
||||
|
||||
# Domain
|
||||
Environment="LLDAP_LDAP_BASE_DN=dc=ilnmors,dc=internal"
|
||||
Environment="LLDAP_LDAP_BASE_DN={{ domain['dc'] }}"
|
||||
|
||||
# LDAPS
|
||||
Environment="LLDAP_LDAPS_OPTIONS__ENABLED=true"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
server:
|
||||
http_listen_address: "::"
|
||||
http_listen_port: 3100
|
||||
http_listen_port: {{ services['loki']['ports']['https'] }}
|
||||
http_tls_config:
|
||||
cert_file: /etc/ssl/loki/loki.crt
|
||||
key_file: /etc/ssl/loki/loki.key
|
||||
@@ -13,7 +13,7 @@ Image=docker.io/grafana/loki:{{ version['containers']['loki'] }}
|
||||
ContainerName=loki
|
||||
HostName=loki
|
||||
|
||||
PublishPort=3100:3100/tcp
|
||||
PublishPort={{ services['loki']['ports']['https'] }}:3100/tcp
|
||||
|
||||
Volume=%h/containers/loki/data:/loki:rw
|
||||
Volume=%h/containers/loki/etc:/etc/loki:ro
|
||||
|
||||
@@ -8,11 +8,11 @@ listen_addresses = '*'
|
||||
# Max connections
|
||||
max_connections = 250
|
||||
# listen_port
|
||||
port = 5432
|
||||
port = {{ services['postgresql']['ports']['tcp'] }}
|
||||
|
||||
# SSL
|
||||
ssl = on
|
||||
ssl_ca_file = '/etc/ssl/postgresql/ilnmors_root_ca.crt'
|
||||
ssl_ca_file = '/etc/ssl/postgresql/{{ root_cert_filename }}'
|
||||
ssl_cert_file = '/etc/ssl/postgresql/postgresql.crt'
|
||||
ssl_key_file = '/etc/ssl/postgresql/postgresql.key'
|
||||
ssl_ciphers = 'HIGH:!aNULL:!MD5'
|
||||
|
||||
@@ -8,12 +8,12 @@ After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Container]
|
||||
Image=ilnmors.internal/{{ node['name'] }}/postgres:pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}
|
||||
Image={{ domain['internal'] }}/{{ node['name'] }}/postgres:pg{{ version['containers']['postgresql'] }}-vectorchord{{ version['containers']['vectorchord'] }}
|
||||
|
||||
ContainerName=postgresql
|
||||
HostName=postgresql
|
||||
|
||||
PublishPort=5432:5432/tcp
|
||||
PublishPort={{ services['postgresql']['ports']['tcp'] }}:5432/tcp
|
||||
|
||||
Volume=%h/containers/postgresql/data:/var/lib/postgresql:rw
|
||||
Volume=%h/containers/postgresql/config:/config:ro
|
||||
|
||||
@@ -23,8 +23,8 @@ scrape_configs:
|
||||
# metrics_path defaults to '/metrics'
|
||||
scheme: "https"
|
||||
tls_config:
|
||||
ca_file: "/etc/ssl/prometheus/ilnmors_root_ca.crt"
|
||||
server_name: "{{ infra_uri['prometheus']['domain'] }}"
|
||||
ca_file: "/etc/ssl/prometheus/{{ root_cert_filename }}"
|
||||
server_name: "{{ services['prometheus']['domain'] }}.{{ domain['internal'] }}"
|
||||
static_configs:
|
||||
- targets: ["localhost:9090"]
|
||||
# The label name is added as a label `label_name=<label_value>` to any timeseries scraped from this config.
|
||||
|
||||
@@ -13,7 +13,7 @@ Image=docker.io/prom/prometheus:{{ version['containers']['prometheus'] }}
|
||||
ContainerName=prometheus
|
||||
HostName=prometheus
|
||||
|
||||
PublishPort=9090:9090/tcp
|
||||
PublishPort={{ services['prometheus']['ports']['https'] }}:9090/tcp
|
||||
|
||||
Volume=%h/containers/prometheus/data:/prometheus:rw
|
||||
Volume=%h/containers/prometheus/etc:/etc/prometheus:ro
|
||||
|
||||
@@ -13,7 +13,7 @@ HostName=X509-exporter
|
||||
|
||||
Volume=%h/containers/x509-exporter/certs:/certs:ro
|
||||
|
||||
PublishPort=9793:9793
|
||||
PublishPort={{ services['x509-exporter']['ports']['http'] }}:9793
|
||||
|
||||
Exec=--listen-address :9793 --watch-dir=/certs
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
//// Metric ouput
|
||||
prometheus.remote_write "prometheus" {
|
||||
endpoint {
|
||||
url = "https://{{ infra_uri['prometheus']['domain'] }}:{{ infra_uri['prometheus']['ports']['https'] }}/api/v1/write"
|
||||
url = "https://{{ services['prometheus']['domain'] }}.{{ domain['internal'] }}:{{ services['prometheus']['ports']['https'] }}/api/v1/write"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,8 +71,8 @@ prometheus.scrape "system" {
|
||||
////// For Crowdsec metrics
|
||||
prometheus.scrape "crowdsec" {
|
||||
targets = [
|
||||
{ "__address__" = "{{ infra_uri['crowdsec']['domain'] }}:6060", "job" = "crowdsec" },
|
||||
{ "__address__" = "{{ infra_uri['crowdsec']['domain'] }}:60601", "job" = "crowdsec-bouncer" },
|
||||
{ "__address__" = "{{ services['crowdsec']['domain'] }}.{{ domain['internal'] }}:6060", "job" = "crowdsec" },
|
||||
{ "__address__" = "{{ services['crowdsec']['domain'] }}.{{ domain['internal'] }}:60601", "job" = "crowdsec-bouncer" },
|
||||
]
|
||||
honor_labels = true
|
||||
forward_to = [prometheus.relabel.default_label.receiver]
|
||||
@@ -83,7 +83,7 @@ prometheus.scrape "crowdsec" {
|
||||
////// For postgresql metrics
|
||||
prometheus.exporter.postgres "postgresql" {
|
||||
data_source_names = [
|
||||
"postgres://alloy@{{ infra_uri['postgresql']['domain'] }}:{{ infra_uri['postgresql']['ports']['tcp'] }}/postgres?sslmode=verify-full",
|
||||
"postgres://alloy@{{ services['postgresql']['domain'] }}.{{ domain['internal'] }}:{{ services['postgresql']['ports']['tcp'] }}/postgres?sslmode=verify-full",
|
||||
]
|
||||
}
|
||||
prometheus.scrape "postgresql" {
|
||||
@@ -93,7 +93,7 @@ prometheus.scrape "postgresql" {
|
||||
///// For certificates metrics
|
||||
prometheus.scrape "x509" {
|
||||
targets = [
|
||||
{ "__address__" = "{{ node['name'] }}.ilnmors.internal:9793" },
|
||||
{ "__address__" = "{{ node['name'] }}.{{ domain['internal'] }}:{{ services['x509-exporter']['ports']['http'] }}" },
|
||||
]
|
||||
forward_to = [prometheus.relabel.default_label.receiver]
|
||||
}
|
||||
@@ -103,7 +103,7 @@ prometheus.scrape "x509" {
|
||||
////// For Input Caddy metrics
|
||||
prometheus.scrape "caddy" {
|
||||
targets = [
|
||||
{ "__address__" = "{{ node['name'] }}.ilnmors.internal:443" },
|
||||
{ "__address__" = "{{ node['name'] }}.{{ domain['internal'] }}:443" },
|
||||
]
|
||||
scheme = "https"
|
||||
forward_to = [prometheus.relabel.default_label.receiver]
|
||||
@@ -114,8 +114,8 @@ prometheus.scrape "caddy" {
|
||||
//// Logs output
|
||||
loki.write "loki" {
|
||||
endpoint {
|
||||
url = "https://{{ infra_uri['loki']['domain'] }}:{{ infra_uri['loki']['ports']['https'] }}/loki/api/v1/push"
|
||||
tenant_id = "ilnmors.internal"
|
||||
url = "https://{{ services['loki']['domain'] }}.{{ domain['internal'] }}:{{ services['loki']['ports']['https'] }}/loki/api/v1/push"
|
||||
tenant_id = "{{ domain['internal'] }}"
|
||||
}
|
||||
}
|
||||
//// Logs relabel
|
||||
@@ -203,12 +203,11 @@ loki.relabel "caddy_relabel" {
|
||||
loki.process "journal_parser" {
|
||||
forward_to = [loki.write.loki.receiver]
|
||||
// Severity parsing
|
||||
// If content of log includes "level" information, change the level
|
||||
stage.logfmt {
|
||||
mapping = {
|
||||
"content_level" = "level",
|
||||
}
|
||||
stage.regex {
|
||||
// Regex to extract the log level from the content.
|
||||
expression = "(?i)(?:level[\"\\s:=]+|\\[|\\s|^)(?P<content_level>info|warn|warning|error|debug|fatal|critical|trace)(?:[\"\\]\\s]|$)"
|
||||
}
|
||||
|
||||
stage.labels {
|
||||
values = {
|
||||
"level" = "content_level",
|
||||
|
||||
@@ -8,7 +8,7 @@ log_compression: true
|
||||
log_max_size: 100
|
||||
log_max_backups: 3
|
||||
log_max_age: 30
|
||||
api_url: "https://{{ infra_uri['crowdsec']['domain'] }}:{{ infra_uri['crowdsec']['ports']['https'] }}"
|
||||
api_url: "https://{{ services['crowdsec']['domain'] }}.{{ domain['internal'] }}:{{ services['crowdsec']['ports']['https'] }}"
|
||||
api_key: "{{ hostvars['console']['crowdsec']['bouncer']['fw'] }}"
|
||||
insecure_skip_verify: false
|
||||
disable_ipv6: false
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
name: crowdsecurity/whitelists
|
||||
description: "Whitelist console/admin hosts only"
|
||||
description: "Local whitelist policy"
|
||||
whitelist:
|
||||
reason: "trusted admin hosts"
|
||||
reason: "rules"
|
||||
ip:
|
||||
# Console IP lists
|
||||
- "127.0.0.1"
|
||||
- "::1"
|
||||
- "{{ hostvars['fw']['network4']['console']['client'] }}"
|
||||
- "{{ hostvars['fw']['network4']['console']['wg'] }}"
|
||||
- "{{ hostvars['fw']['network6']['console']['client'] }}"
|
||||
- "{{ hostvars['fw']['network6']['console']['wg'] }}"
|
||||
{% if node['name'] == 'auth' %}
|
||||
expression:
|
||||
# budget local-first sql scrap rule
|
||||
- "evt.Meta.target_fqdn == '{{ services['actualbudget']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_path contains '/data/migrations/'"
|
||||
# immich thumbnail request 404 error false positive
|
||||
- "evt.Meta.target_fqdn == '{{ services['immich']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_path contains '/api/assets/' && evt.Meta.http_path contains '/thumbnail'"
|
||||
# opencloud chunk request false positive
|
||||
- "evt.Meta.target_fqdn == '{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_path contains '/js/chunks/'"
|
||||
{% endif %}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
url: https://{{ infra_uri['crowdsec']['domain'] }}:{{ infra_uri['crowdsec']['ports']['https'] }}
|
||||
url: https://{{ services['crowdsec']['domain'] }}.{{ domain['internal'] }}:{{ services['crowdsec']['ports']['https'] }}
|
||||
login: {{ node['name'] }}
|
||||
password: {{ hostvars['console']['crowdsec']['machine'][node['name']] }}
|
||||
|
||||
@@ -21,9 +21,9 @@ ProtectHome=tmpfs
|
||||
InaccessiblePaths=/boot /root
|
||||
|
||||
{% if node['name'] == 'infra' %}
|
||||
BindReadOnlyPaths=/home/infra/containers/postgresql/backups
|
||||
BindReadOnlyPaths=%h/containers/postgresql/backups
|
||||
{% elif node['name'] == 'app' %}
|
||||
BindReadOnlyPaths=/home/app/data
|
||||
BindReadOnlyPaths=%h/data
|
||||
{% endif %}
|
||||
# In root namescope, %u always bring 0
|
||||
BindPaths=/etc/kopia
|
||||
@@ -32,9 +32,9 @@ BindPaths=/var/cache/kopia
|
||||
EnvironmentFile=/etc/secrets/{{ kopia_uid }}/kopia.env
|
||||
|
||||
ExecStartPre=/usr/bin/kopia repository connect server \
|
||||
--url=https://{{ infra_uri['kopia']['domain'] }}:{{ infra_uri['kopia']['ports']['https'] }} \
|
||||
--url=https://{{ services['kopia']['domain'] }}.{{ domain['internal'] }}:{{ services['kopia']['ports']['https'] }} \
|
||||
--override-username={{ node['name'] }} \
|
||||
--override-hostname={{ node['name'] }}.ilnmors.internal
|
||||
--override-hostname={{ node['name'] }}.{{ domain['internal'] }}
|
||||
|
||||
{% if node['name'] == 'infra' %}
|
||||
ExecStart=/usr/bin/kopia snapshot create \
|
||||
|
||||
@@ -12,4 +12,4 @@ StandardError=journal
|
||||
EnvironmentFile=/etc/secrets/%U/ddns.env
|
||||
|
||||
# Run the script
|
||||
ExecStart=/usr/local/bin/ddns.sh -d "ilnmors.com"
|
||||
ExecStart=/usr/local/bin/ddns.sh -d "{{ domain['public'] }}"
|
||||
@@ -19,7 +19,7 @@
|
||||
},
|
||||
{
|
||||
"name": "domain-name",
|
||||
"data": "ilnmors.internal."
|
||||
"data": "{{ domain['internal'] }}."
|
||||
}
|
||||
],
|
||||
"reservations": [
|
||||
@@ -65,7 +65,7 @@
|
||||
},
|
||||
{
|
||||
"name": "domain-name",
|
||||
"data": "ilnmors.internal."
|
||||
"data": "{{ domain['internal'] }}."
|
||||
}
|
||||
],
|
||||
"id": 2,
|
||||
|
||||
0
data/create_all_structure.sh
Normal file → Executable file
0
data/create_all_structure.sh
Normal file → Executable file
@@ -90,7 +90,7 @@ Kea in fw already reserved DSM's IP. However it is necessary to set IP address s
|
||||
## Authelia OIDC
|
||||
|
||||
- **!CAUTION!** It can be set after authelia is implemented
|
||||
- Following [here](../../../config/containers/auth/authelia/config/authelia.yaml.j2) for Authelia configuration
|
||||
- Following [here](../../config/services/containers/auth/authelia/config/authelia.yaml.j2) for Authelia configuration
|
||||
- Control Panel:Domain/LDAP:SSO Client
|
||||
- Login Settings: \[x\] Select SSO by default on the login page
|
||||
- Services
|
||||
@@ -192,9 +192,9 @@ BindPaths=/var/cache/kopia
|
||||
EnvironmentFile=/etc/secrets/{{ kopia_uid }}/kopia.env
|
||||
|
||||
ExecStartPre=/usr/bin/kopia repository connect server \
|
||||
--url=https://{{ infra_uri['kopia']['domain'] }}:{{ infra_uri['kopia']['ports']['https'] }} \
|
||||
--url=https://{{ services['kopia']['domain'] }}.{{ domain['internal'] }}:{{ services['kopia']['ports']['https'] }} \
|
||||
--override-username={{ node['name'] }} \
|
||||
--override-hostname={{ node['name'] }}.ilnmors.internal
|
||||
--override-hostname={{ node['name'] }}.{{ domain['internal'] }}
|
||||
|
||||
ExecStart=/usr/bin/kopia snapshot create \
|
||||
/path/to/backup
|
||||
|
||||
@@ -1,5 +1,26 @@
|
||||
# Git configuration
|
||||
|
||||
## Convention
|
||||
|
||||
- `type(scope): subject`
|
||||
|
||||
- type:
|
||||
- feat: Append the new feature
|
||||
- fix: Fix the bug or errors
|
||||
- docs: Fix the documentations
|
||||
- refactor: Modify code structure without functional changes
|
||||
- perf: Improve the performance
|
||||
- chore: Modify system, package manager, etc configuration
|
||||
- style: Fix code formatting, etc...
|
||||
|
||||
## Commit and tags
|
||||
|
||||
- In this homelab, `[Infra_structure_change]:[Services_change]:[Documents_and_configuration_change]` is the tagging rule.
|
||||
- Tagging and commit should be distinguished.
|
||||
- The change which affects system: tagging
|
||||
- The change which doesn't affect system: commit
|
||||
- `git commit -m "docs(git): define git convention"`
|
||||
|
||||
## Local git
|
||||
|
||||
```bash
|
||||
@@ -29,14 +50,8 @@ git add .
|
||||
# Check git changes
|
||||
git status
|
||||
git commit -m "1.0.0: Release IaaS baseline"
|
||||
# git commit -m "docs: update 07-git.md to add the way to manage git system"
|
||||
# Make current documents as snapshot
|
||||
git tag -a 1.0.0 -m "IaaS baseline"
|
||||
# Make special changes
|
||||
# In this homelab, [Infra_structure_change]:[Services_change]:[Documents_and_configuration_change]
|
||||
# Tagging and commit should be distinguished.
|
||||
# The change which affects system: tagging
|
||||
# The change which doesn't affect system: commit
|
||||
|
||||
|
||||
# Commands
|
||||
git status # What files are changed
|
||||
@@ -69,3 +84,27 @@ git tag -a 0.0.1-caddy-app -m "caddy-app: Start caddy-app branch"
|
||||
git switch main
|
||||
git merge caddy-app
|
||||
```
|
||||
|
||||
## Connect local git and remote git
|
||||
|
||||
- Set this after gitea is implemented
|
||||
|
||||
```bash
|
||||
# Add git remote repository
|
||||
git config --global credential.helper store
|
||||
git remote add origin https://gitea.ilnmors.com/il/ilnmors-homelab.git
|
||||
# For first time to make connection between local and remote git
|
||||
git push -u origin main
|
||||
# Username for 'https://gitea.ilnmors.com': il
|
||||
# Password for 'https://il@gitea.ilnmors.com': gitea.il.token
|
||||
git push --tags
|
||||
# After first connection, -u origin main option is not needed
|
||||
git add $PATH
|
||||
git commit -m "comment"
|
||||
git tag -a $VERSION -m "comment"
|
||||
git push && git push --tags
|
||||
# -f and --force-with-lease can be used in commit and tags
|
||||
# -f option and --force-with-lease
|
||||
# -f: just overwrite forcefully without any condition. it is dangerous, because it doesn't care the changes (commit) from other local git
|
||||
# --force-with-lease: overwrite forcefully only when there's no changes (commit) from other local git
|
||||
```
|
||||
|
||||
22
docs/services/app/actual-budget.md
Normal file
22
docs/services/app/actual-budget.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Actual budget
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### Create oidc secret and hash
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- access to auth vm
|
||||
- `podman exec -it authelia sh`
|
||||
- `authelia crypto hash generate pbkdf2 --password 'actualbudget.oidc.secret'`
|
||||
- Save this value in secrets.yaml in `actualbudget.oidc.secret` and `actualbudget.oidc.hash`
|
||||
|
||||
## Configuration
|
||||
|
||||
### Initialization
|
||||
|
||||
- Use current domain
|
||||
- ok
|
||||
- Start Using OpenID
|
||||
- Start fresh
|
||||
- Server Online: User directory: Add User: the user name which is defined on LDAP
|
||||
- Server Online: User Access: enable user
|
||||
122
docs/services/app/affine.md
Normal file
122
docs/services/app/affine.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# affine
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### Create database
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `postgresql.password.affine`
|
||||
- Access infra server to create affine_db with `podman exec -it postgresql psql -U postgres`
|
||||
|
||||
```SQL
|
||||
CREATE USER affine WITH PASSWORD 'postgresql.password.affine';
|
||||
CREATE DATABASE affine_db;
|
||||
ALTER DATABASE affine_db OWNER TO affine;
|
||||
\connect affine_db
|
||||
CREATE EXTENSION IF NOT EXISTS vector;
|
||||
\dx
|
||||
-- Check the extension is activated with `\dx`
|
||||
-- postgresql image is built with `pgvector` and `vectorchord` already
|
||||
```
|
||||
|
||||
### Create oidc secret and hash
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- access to auth vm
|
||||
- `podman exec -it authelia sh`
|
||||
- `authelia crypto hash generate pbkdf2 --password 'affine.oidc.secret'`
|
||||
- Save this value in secrets.yaml in `affine.oidc.secret` and `affine.oidc.hash`
|
||||
|
||||
### Create secret key value
|
||||
|
||||
- Create the secret with `openssl genpkey -algorithm ed25519 -outform PEM`
|
||||
- Save this value in secrets.yaml in `affine.secret_key`
|
||||
|
||||
### Create admin password
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `affine.il.password`
|
||||
|
||||
### Add postgresql dump backup list
|
||||
|
||||
- [set_postgresql.yaml](../../../ansible/roles/infra/tasks/services/set_postgresql.yaml)
|
||||
|
||||
```yaml
|
||||
- name: Set connected services list
|
||||
ansible.builtin.set_fact:
|
||||
connected_services:
|
||||
- ...
|
||||
- "affine"
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### About community edition limitation
|
||||
|
||||
- Workspace seats
|
||||
- The number of members itself \(account\) are unlimited.
|
||||
- However the number of members who work on the same workspace simultaneously \(seats\) are designated as 10 members.
|
||||
- Workspace storage quota
|
||||
- Originally, self-hosted version has no limitation in storage quota and uploading file size.
|
||||
- Now, there is some limitation even in the self-hosted version.
|
||||
- It will be changed when the application is updating
|
||||
|
||||
### Following feature which will be applied in this system
|
||||
|
||||
- Linking local caldav vaikal or radicale ...
|
||||
- Apply AI function with API
|
||||
|
||||
### Access to affine
|
||||
|
||||
- https://affine.ilnmors.com
|
||||
- Getting started
|
||||
- admin name
|
||||
- admin E-mail
|
||||
- admin password
|
||||
- Initial setting allows only 32 digit password, now just set temporary password
|
||||
|
||||
### Server configuration
|
||||
- https://affine.ilnmors.com/admin
|
||||
|
||||
#### Server
|
||||
|
||||
- A recognizable name for the server. Will be shown when connected with AFFiNE Desktop.
|
||||
- Ilnmors
|
||||
|
||||
#### Auth
|
||||
|
||||
- [ ] Whether allow new registrations
|
||||
- [x] Whether allow new registration via configured oauth
|
||||
- Minimum length requirement of password: 8
|
||||
- Maximum length requirement of password: 50
|
||||
- save
|
||||
|
||||
#### Oauth configuration
|
||||
|
||||
```ini
|
||||
# These options are required
|
||||
## OIDC callback URIs
|
||||
Environment="AFFINE_SERVER_HOST={{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="AFFINE_SERVER_EXTERNAL_URL=https://{{ services['affine']['domain']['public'] }}.{{ domain['public'] }}"
|
||||
Environment="AFFINE_SERVER_HTTPS=true"
|
||||
```
|
||||
- OIDC Oauth provider config
|
||||
|
||||
```json
|
||||
{
|
||||
"clientId":"affine",
|
||||
"clientSecret":"affine.oidc.secret",
|
||||
"issuer":"https://authelia.ilnmors.com",
|
||||
"args":{
|
||||
"scope": "openid profile email"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- save
|
||||
|
||||
#### Flags
|
||||
|
||||
- [ ] Whether allow guest users to create demo workspaces
|
||||
- save
|
||||
|
||||
93
docs/services/app/gitea.md
Normal file
93
docs/services/app/gitea.md
Normal file
@@ -0,0 +1,93 @@
|
||||
# gitea
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### Create database
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `postgresql.password.gitea`
|
||||
- Access infra server to create gitea_db with `podman exec -it postgresql psql -U postgres`
|
||||
|
||||
```SQL
|
||||
CREATE USER gitea WITH PASSWORD 'postgresql.password.gitea';
|
||||
CREATE DATABASE gitea_db;
|
||||
ALTER DATABASE gitea_db OWNER TO gitea;
|
||||
```
|
||||
|
||||
### Create oidc secret and hash
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- access to auth vm
|
||||
- `podman exec -it authelia sh`
|
||||
- `authelia crypto hash generate pbkdf2 --password 'gitea.oidc.secret'`
|
||||
- Save this value in secrets.yaml in `gitea.oidc.secret` and `gitea.oidc.hash`
|
||||
|
||||
### Create admin password
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `gitea.admin.password`
|
||||
-
|
||||
### Add postgresql dump backup list
|
||||
|
||||
- [set_postgresql.yaml](../../../ansible/roles/infra/tasks/services/set_postgresql.yaml)
|
||||
|
||||
```yaml
|
||||
- name: Set connected services list
|
||||
ansible.builtin.set_fact:
|
||||
connected_services:
|
||||
- ...
|
||||
- "gitea"
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Site installation
|
||||
|
||||
- **!CAUTION!** This is needed Only when first installing. This process isn't needed when the data is restored on `~/data/containers/gitea`
|
||||
- https://gitea.ilnmors.com
|
||||
- General setting
|
||||
- Site Title: Gitea: ilnmors
|
||||
- Administrator Account Setting
|
||||
- Administrator Username: il
|
||||
- Email Address il@ilnmors.internal
|
||||
- Password & confirm password: gitea.il.password
|
||||
- Install Gitea
|
||||
|
||||
### Oauth configuration
|
||||
|
||||
- Site Administration
|
||||
- Identity & Access: Authentication Sources: Add Authentication Source
|
||||
- Configure the following options:
|
||||
- Authentication Type: OAuth2
|
||||
- Authentication Name: authelia
|
||||
- OAuth2 Provider: OpenID Connect
|
||||
- Client ID (Key): gitea
|
||||
- Client Secret: gitea.oidc.secret
|
||||
- OpenID Connect Auto Discovery URL: https://authelia.ilnmors.com/.well-known/openid-configuration
|
||||
|
||||
### Link Account via OAuth2
|
||||
|
||||
- Login with autheila
|
||||
- Register New Account
|
||||
- Link to Existing Account
|
||||
- Link the account of administrational account which set at the inital install page
|
||||
- id and password
|
||||
|
||||
### Set repository access token
|
||||
|
||||
- Settings: Applications: Generate New Token
|
||||
- Token name: ilnmors-homelab
|
||||
- Repository and Organization Access: All
|
||||
- repository - Read and Write
|
||||
- Generate Token
|
||||
- Copy token value and save this in `secrets.yaml`
|
||||
|
||||
## Connect with loacl git repository
|
||||
|
||||
### Create remote git repository
|
||||
|
||||
- Repository: `+`
|
||||
- Name: ilnmors-homelab
|
||||
- etc: default vaules
|
||||
|
||||
- Following [07-git.md](../../runbook/07-git.md)
|
||||
@@ -9,4 +9,4 @@ After reboot, check the render device.
|
||||
```bash
|
||||
ls -l /dev/dri
|
||||
# crw-rw---- 1 root video 226, 0 ... card0
|
||||
# crw-rw---- 1 root render 226, 128 ... renderD128
|
||||
# crw-rw---- 1 root render 226, 128 ... renderD128
|
||||
|
||||
86
docs/services/app/immich.md
Normal file
86
docs/services/app/immich.md
Normal file
@@ -0,0 +1,86 @@
|
||||
# immich
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### Create database
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `postgresql.password.immich`
|
||||
- Access infra server to create immich_db with `podman exec -it postgresql psql -U postgres`
|
||||
|
||||
```SQL
|
||||
CREATE USER immich WITH PASSWORD 'postgresql.password.immich';
|
||||
CREATE DATABASE immich_db;
|
||||
ALTER DATABASE immich_db OWNER TO immich;
|
||||
\connect immich_db
|
||||
CREATE EXTENSION IF NOT EXISTS vchord CASCADE;
|
||||
CREATE EXTENSION IF NOT EXISTS cube CASCADE;
|
||||
CREATE EXTENSION IF NOT EXISTS earthdistance CASCADE;
|
||||
\dx
|
||||
-- Check the extension is activated with `\dx`
|
||||
-- postgresql image is built with `pgvector` and `vectorchord` already
|
||||
```
|
||||
|
||||
### Create oidc secret and hash
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- access to auth vm
|
||||
- `podman exec -it authelia sh`
|
||||
- `authelia crypto hash generate pbkdf2 --password 'immich.oidc.secret'`
|
||||
- Save this value in secrets.yaml in `immich.oidc.secret` and `immich.oidc.hash`
|
||||
|
||||
### Create admin password
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `immich.il.password`
|
||||
|
||||
### Add postgresql dump backup list
|
||||
|
||||
- [set_postgresql.yaml](../../../ansible/roles/infra/tasks/services/set_postgresql.yaml)
|
||||
|
||||
```yaml
|
||||
- name: Set connected services list
|
||||
ansible.builtin.set_fact:
|
||||
connected_services:
|
||||
- ...
|
||||
- "immich"
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Access to immich
|
||||
|
||||
- https://immich.ilnmors.com
|
||||
- Getting started
|
||||
- admin E-mail
|
||||
- admin password
|
||||
- admin name
|
||||
- Theme
|
||||
- language
|
||||
- Server privacy
|
||||
- map
|
||||
- version check
|
||||
- User privacy
|
||||
- google cast \(disable\)
|
||||
- Storage template
|
||||
- `{{y}}/{{MM}}/{{y}}{{MM}}{{dd}}_{{hh}}{{mm}}{{ss}}`
|
||||
- Backups
|
||||
- Mobile App
|
||||
- Done
|
||||
|
||||
### Oauth configuration
|
||||
|
||||
- Administartion: Authentication Settings: OAuth: Enable
|
||||
- Issuer URL: https://auth.example.com/.well-known/openid-configuration
|
||||
- Client ID: immich
|
||||
- Client Secret: immich.oidc.secret
|
||||
- Scope: openid profile email
|
||||
- Button Text: Login with Authelia
|
||||
- Auto Register: Enable if desired
|
||||
|
||||
### Machine learning configuration
|
||||
|
||||
- Administration: Machine Learning Settings: Enable
|
||||
- URL: http://host.containers.internal:3003
|
||||
- **!CAUTION!**
|
||||
- immich-ml should contain `-openvino` to use GPU for machine learning.
|
||||
25
docs/services/app/opencloud.md
Normal file
25
docs/services/app/opencloud.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# opencloud
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### oidc secret and hash
|
||||
|
||||
- Opencloud uses PKEC, therefore it doesn't need client secret
|
||||
|
||||
### Create admin password
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `opencloud.admin.password`
|
||||
|
||||
## Configuration
|
||||
|
||||
- **!CAUTION!** OpenCloud application \(Android, IOS, Desktop\) doesn't support standard OIDC. Every scopes and client id is hardcoded.
|
||||
- WEBFINGER_\[DESKTOP|ANDROID|IOS\]_OIDC_CLIENT_ID, WEBFINGER_\[DESKTOP|ANDROID|IOS\]_OIDC_CLIENT_SCOPES don't work on official app.
|
||||
- It is impossible to set group claim in scopes. Therefore, it is hard to control roles with token including group claim.
|
||||
- When authelia doesn't work, annotate `OC_EXCLUDE_RUN_SERVICES=idp` and restart to container to use local admin.
|
||||
- This app doesn't support regex on role_assignment mapping.
|
||||
- When the new user added, manage proxy.yaml.j2 manually until they will support regex or fallback mapping, or fix the hardcoded scopes on applications.
|
||||
|
||||
### csp
|
||||
|
||||
- Fix `csp.yaml`
|
||||
116
docs/services/app/paperless-ngx.md
Normal file
116
docs/services/app/paperless-ngx.md
Normal file
@@ -0,0 +1,116 @@
|
||||
# paperless
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### Create database
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `postgresql.password.paperless`
|
||||
- Access infra server to create paperless_db with `podman exec -it postgresql psql -U postgres`
|
||||
|
||||
```SQL
|
||||
CREATE USER paperless WITH PASSWORD 'postgresql.password.paperless';
|
||||
CREATE DATABASE paperless_db;
|
||||
ALTER DATABASE paperless_db OWNER TO paperless;
|
||||
```
|
||||
|
||||
### Create oidc secret and hash
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- access to auth vm
|
||||
- `podman exec -it authelia sh`
|
||||
- `authelia crypto hash generate pbkdf2 --password 'paperless.oidc.secret'`
|
||||
- Save this value in secrets.yaml in `paperless.oidc.secret` and `paperless.oidc.hash`
|
||||
- Use `client_secret_post`, django encodes the secret value wrong frequently.
|
||||
|
||||
### Create session secret value
|
||||
|
||||
- Create the secret with `LC_ALL=C tr -dc 'A-Za-z0-9!#%&()*+,-./:;<=>?@[\]^_{|}~' </dev/urandom | head -c 32`
|
||||
- Save this value in secrets.yaml in `paperless.session_secret`
|
||||
|
||||
### Create admin password
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `paperless.il.password`
|
||||
|
||||
### Add postgresql dump backup list
|
||||
|
||||
- [set_postgresql.yaml](../../../ansible/roles/infra/tasks/services/set_postgresql.yaml)
|
||||
|
||||
```yaml
|
||||
- name: Set connected services list
|
||||
ansible.builtin.set_fact:
|
||||
connected_services:
|
||||
- ...
|
||||
- "paperless"
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Access to paperless
|
||||
|
||||
- https://paperless.ilnmors.com
|
||||
- name: il
|
||||
- E-mail: il@ilnmors.internal
|
||||
- password: `paperless.il.password`
|
||||
|
||||
### Oauth configuration
|
||||
|
||||
- My Profiles: Connect new social account: Authelia
|
||||
- Continue
|
||||
- Login with Authelia
|
||||
|
||||
### OCR configuration
|
||||
|
||||
- Configuration: OCR settings
|
||||
- Output Type: pdfa
|
||||
- Mode: skip
|
||||
- When the archive file has broken ocr text, then conduct replcae command manually
|
||||
- Skip archive File: never
|
||||
- Deskew: disable \(toggle to enable and once more to active disable option\)
|
||||
- rotate: disable \(toggle to enable and once more to active disable option\)
|
||||
|
||||
## The non-standard pdf file
|
||||
|
||||
- Some pdf files doesn't follow the standard, for example korean court or government pdf files.
|
||||
- Before upload this kind of non-standard pdf files, convert it first.
|
||||
- This process uses ghostscript and powershell in Windows for console
|
||||
|
||||
```PowerShell
|
||||
# 1. The engine
|
||||
$gsPath = "C:\Program Files\gs\gs10.07.0\bin\gswin64c.exe"
|
||||
|
||||
# 2. new folder which the converted file will be stored
|
||||
$outputDirName = "converted_pdfs"
|
||||
$outputDir = Join-Path (Get-Location) $outputDirName
|
||||
if (!(Test-Path $outputDir)) { New-Item -ItemType Directory -Path $outputDir }
|
||||
|
||||
# 3. Find all pdf files
|
||||
$files = Get-ChildItem -Filter *.pdf
|
||||
|
||||
foreach ($file in $files) {
|
||||
if ($file.FullName -like "*$outputDirName*") { continue }
|
||||
|
||||
$inputPath = $file.FullName
|
||||
$outputPath = Join-Path $outputDir $file.Name
|
||||
|
||||
Write-Host "convert: $($file.Name)" -ForegroundColor Cyan
|
||||
|
||||
$gsArgs = @(
|
||||
"-sDEVICE=pdfwrite",
|
||||
"-dCompatibilityLevel=1.4",
|
||||
"-dPDFSETTINGS=/default",
|
||||
"-dNOPAUSE",
|
||||
"-dQUIET",
|
||||
"-dBATCH",
|
||||
"-dNoOutputFonts", # Change all text as image
|
||||
"-sOutputFile=$outputPath",
|
||||
"$inputPath"
|
||||
)
|
||||
|
||||
# 실행
|
||||
& $gsPath @gsArgs
|
||||
}
|
||||
|
||||
Write-Host "`n[Complete] All file is stored in '$outputDirName'." -ForegroundColor Green
|
||||
```
|
||||
49
docs/services/app/vaultwarden.md
Normal file
49
docs/services/app/vaultwarden.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# vaultwarden
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### Create database
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `postgresql.password.vaultwarden`
|
||||
- Access infra server to create vaultwarden_db with `podman exec -it postgresql psql -U postgres`
|
||||
|
||||
```SQL
|
||||
CREATE USER vaultwarden WITH PASSWORD 'postgresql.password.vaultwarden';
|
||||
CREATE DATABASE vaultwarden_db;
|
||||
ALTER DATABASE vaultwarden_db OWNER TO vaultwarden;
|
||||
```
|
||||
|
||||
### Create admin hash
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- https://argon2.online/
|
||||
- salt: auto generate
|
||||
- parallelism: factor 4
|
||||
- memory cost: 65536
|
||||
- iterations: 3
|
||||
- hash length: 32
|
||||
- type: argon2id
|
||||
- Save this value in secrets.yaml in `vaultwarden.admin.password` and `vaultwarden.admin.hash`
|
||||
|
||||
### Add postgresql dump backup list
|
||||
|
||||
- [set_postgresql.yaml](../../../ansible/roles/infra/tasks/services/set_postgresql.yaml)
|
||||
|
||||
```yaml
|
||||
- name: Set connected services list
|
||||
ansible.builtin.set_fact:
|
||||
connected_services:
|
||||
- ...
|
||||
- "vaultwarden"
|
||||
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
- https://vault.ilnmors.com/admin
|
||||
- token value: vaultwarden.admin.password
|
||||
- Users:Invite User:Email
|
||||
- add
|
||||
- https://vault.ilnmors.com
|
||||
- Create account and input the Email which added in admin page
|
||||
62
docs/services/app/vikunja.md
Normal file
62
docs/services/app/vikunja.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# vikunja
|
||||
|
||||
## Prerequisite
|
||||
|
||||
### Create database
|
||||
|
||||
- Create the password with `openssl rand -base64 32`
|
||||
- Save this value in secrets.yaml in `postgresql.password.vikunja`
|
||||
- Access infra server to create vikunja_db with `podman exec -it postgresql psql -U postgres`
|
||||
|
||||
```SQL
|
||||
CREATE USER vikunja WITH PASSWORD 'postgresql.password.vikunja';
|
||||
CREATE DATABASE vikunja_db;
|
||||
ALTER DATABASE vikunja_db OWNER TO vikunja;
|
||||
```
|
||||
|
||||
### Create oidc secret and hash
|
||||
|
||||
- Create the secret with `openssl rand -base64 32`
|
||||
- access to auth vm
|
||||
- `podman exec -it authelia sh`
|
||||
- `authelia crypto hash generate pbkdf2 --password 'vikunja.oidc.secret'`
|
||||
- Save this value in secrets.yaml in `vikunja.oidc.secret` and `vikunja.oidc.hash`
|
||||
|
||||
### Create session secret value
|
||||
|
||||
- Create the secret with `LC_ALL=C tr -dc 'A-Za-z0-9!#%&()*+,-./:;<=>?@[\]^_{|}~' </dev/urandom | head -c 32`
|
||||
- Save this value in secrets.yaml in `vikunja.session_secret`
|
||||
|
||||
### Add postgresql dump backup list
|
||||
|
||||
- [set_postgresql.yaml](../../../ansible/roles/infra/tasks/services/set_postgresql.yaml)
|
||||
|
||||
```yaml
|
||||
- name: Set connected services list
|
||||
ansible.builtin.set_fact:
|
||||
connected_services:
|
||||
- ...
|
||||
- "vikunja"
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
- !NOTE!
|
||||
- vikunja doesn't support local + oidc dual login environment. Don't use local account.
|
||||
- Just use oidc login.
|
||||
- ignore process below.
|
||||
|
||||
### Create local account
|
||||
|
||||
- Access to app vm via ssh
|
||||
|
||||
```bash
|
||||
ssh app
|
||||
podman exec -it vikunja /app/vikunja/vikunja user create --email il@ilnmors.internal --username il
|
||||
# Enter Password: vikunja.il.password
|
||||
# Confirm Password: vikunja.il.password
|
||||
```
|
||||
|
||||
- https://vikunja.ilnmors.com
|
||||
- Try to login locally
|
||||
- Try to login via Authelia
|
||||
@@ -28,17 +28,23 @@ test.ilnmors.com
|
||||
import crowdsec_log
|
||||
route {
|
||||
crowdsec
|
||||
reverse_proxy https://test.app.ilnmors.internal
|
||||
reverse_proxy https://test.app.ilnmors.internal {
|
||||
header_up Host {http.reverse_proxy.upstream.host}
|
||||
}
|
||||
}
|
||||
}
|
||||
# App server
|
||||
{
|
||||
servers {
|
||||
trusted_proxies static {{ hostvars['fw']['network4']['auth']['server'] }} {{ hostvars['fw']['network6']['auth']['server'] }}
|
||||
}
|
||||
}
|
||||
test.app.ilnmors.internal
|
||||
{
|
||||
import internal_tls
|
||||
trusted_proxies {{ hostvars['fw']['network4']['auth']['server'] }} {{ hostvars['fw']['network6']['auth']['server'] }}
|
||||
route {
|
||||
reverse_proxy host.containers.internal:3000 {
|
||||
header_up Host {header.X-Forwarded-Host} {Host}
|
||||
header_up Host {http.request.header.X-Forwarded-Host}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -228,6 +228,23 @@ auth@auth:~$ sudo cscli alerts list
|
||||
│ 9 │ Ip:138.68.144.227 │ crowdsecurity/jira_cve-2021-26086 │ │ │ ban:1 │ 2026-03-06 04:18:18.35776077 +0000 UTC │
|
||||
╰────┴────────────────────┴───────────────────────────────────┴─────────┴────┴───────────┴─────────────────────────────────────────╯
|
||||
|
||||
## Log check and inspect
|
||||
|
||||
fw@fw:~$ sudo cscli alerts inspect 230 -d
|
||||
|
||||
- check the log and analyze and make expression
|
||||
- e.g. immich
|
||||
- evt.Meta.target_fqdn == 'immich.ilnmors.com' && evt.Meta.http_path contains '/api/assets/' && evt.Meta.http_path contains '/thumbnail'
|
||||
- e.g. opencloud
|
||||
- "evt.Meta.target_fqdn == '{{ services['opencloud']['domain']['public'] }}.{{ domain['public'] }}' && evt.Meta.http_path contains '/js/chunks/'"
|
||||
- free false positive decision
|
||||
|
||||
fw@fw:~$ sudo cscli decision list
|
||||
╭─────────┬──────────┬───────────────────┬──────────────────────────────────────┬────────┬─────────┬────────────────────────┬────────┬────────────────────┬──────────╮
|
||||
│ ID │ Source │ Scope:Value │ Reason │ Action │ Country │ AS │ Events │ expiration │ Alert ID │
|
||||
├─────────┼──────────┼───────────────────┼──────────────────────────────────────┼────────┼─────────┼────────────────────────┼────────┼────────────────────┼──────────┤
|
||||
│ 5280078 │ crowdsec │ Ip:223.195.50.112 │ crowdsecurity/http-crawl-non_statics │ ban │ KR │ 9769 Sejong University │ 43 │ 3h42m21.824049012s │ 430 │
|
||||
╰─────────┴──────────┴───────────────────┴──────────────────────────────────────┴────────┴─────────┴────────────────────────┴────────┴────────────────────┴──────────╯
|
||||
fw@fw:~$ sudo cscli decision delete --id 5280078
|
||||
INFO[04-04-2026 09:55:02] 1 decision(s) deleted
|
||||
|
||||
|
||||
@@ -74,17 +74,16 @@
|
||||
- [x] Prometheus \(alloy - push\)
|
||||
- [x] Loki \(alloy\)
|
||||
- [x] Grafana
|
||||
<!--
|
||||
<!--
|
||||
Mail service is not needed, especially Diun is not needed.
|
||||
- Postfix
|
||||
- Dovecot
|
||||
- mbsync
|
||||
- Diun
|
||||
-->
|
||||
- Study \(Rootless container\):
|
||||
- Kali
|
||||
- Debian
|
||||
|
||||
-->
|
||||
|
||||
## auth \(Authorization\)
|
||||
|
||||
@@ -115,30 +114,31 @@
|
||||
- Disk: SATA Controller
|
||||
- Services:
|
||||
- OIDC native services:
|
||||
- OpenCloud \(with Radicale, Collabora Web Office\)
|
||||
- Vikunja \(with CalDAV\)
|
||||
- Gitea
|
||||
- Outline
|
||||
- Wiki.js
|
||||
- [x] Vaultwarden
|
||||
- [x] Gitea
|
||||
- [x] Immich
|
||||
- [x] Actual budget
|
||||
- [x] Paperless-ngx
|
||||
- [x] vikunja - When affine is verified to substitute kanban board and etc, then disable this service.
|
||||
- [x] OpenCloud
|
||||
- [x] affine \(Notion substitution\)
|
||||
- [ ] Radicale
|
||||
- [ ] Collabora office
|
||||
- WriteFreely
|
||||
- Immich
|
||||
- MediaCMS
|
||||
- Funkwhale
|
||||
- Kavita
|
||||
- Audiobookshelf
|
||||
- we-promise/sure - budget
|
||||
- Paperless-ngx
|
||||
- Miniflux
|
||||
- Linkwarden
|
||||
- Ralph
|
||||
- Conduit
|
||||
- SnappyMail
|
||||
- Vaultwarden
|
||||
<!--
|
||||
<!--
|
||||
- n8n
|
||||
-->
|
||||
- Forward_auth
|
||||
- Homepage
|
||||
-->
|
||||
|
||||
## External Backup server
|
||||
|
||||
|
||||
Reference in New Issue
Block a user