Initial commit

This commit is contained in:
2025-03-22 14:29:48 +01:00
commit da7781035d
87 changed files with 3160 additions and 0 deletions

12
.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
# Ignore terraform state/vars/lock files
terraform.tfstate
terraform.tfstate.backup
.terraform.lock.hcl
.terraform
*.tfvars
# Ignore output kubeconfig (contains secrets)
kubeconfig
# Ignore docker environment files (contains secrets)
.env

9
.post-commit-sops.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/bash
readarray f < <(git diff-tree --no-commit-id --name-only HEAD -r)
for filepath in "${f[@]}"; do
filepath=$(echo "${filepath}" | tr -d '\n')
filename=$(basename ${filepath})
if [[ "${filename}" =~ values.ya?ml$ ]] || [[ "${filename}" =~ secrets?.ya?ml$ ]]; then
sops -d -i "${filepath}"
fi
done

8
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,8 @@
repos:
- repo: local
hooks:
- id: encrypt-files
name: encrypt files
entry: .pre-commit-sops.sh
language: script
pass_filenames: true

7
.pre-commit-sops.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
for filename in "$@"; do
if [[ "${filename}" =~ values.ya?ml$ ]] || [[ "${filename}" =~ secrets?.ya?ml$ ]]; then
sops -e -i "${filename}"
git add "${filename}"
fi
done

63
.sops.pub.asc Normal file
View File

@@ -0,0 +1,63 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBGfOH3UBEACsB2EhJ/W8loVJTXK5XhoPovRRQ4fD4ntZR+mbw3n7JCdQDWOW
A0YXkZ0nLk5RYKxVMeHusTfUbkDt9wyCX2X2kWHpwEVmupJszCFiNeYQlzNRqMfN
ld8iS/fJR6Gh2etU9iWJqrFJkhiwRzNMiDdYJ5nDn+8WozzyFOg4GiYn78OQ1tan
FYz11MfGQBvt07y6OS2ZpNBBiWsNf1UA2HVDunIQGMyEpAX7sH7QpbPOFhcWMBiC
CQcbdvJNrVeTxyVJaj+mgndvVh5Vr6WW6vuCfAdnhCF/rvRKnpo2Prf3eWzTWBy0
JuOQpAb8zp2coxdzPSMZg2f6n/uSZ9+8vatPeY4dc49Mf/5nSZu1hAQDw8+bz+l4
2E7cinESNLF1b4DtW21DPbQab/0Q5tyJGplwZWbUfO7Q7lo7y+Nw1/uq2npOHZ2m
ohRGbtEHHVlWffIdS08pciWcSoTKIviyaS6y1bioJXcidGlQPiKFzw398toIzji8
+8npm6TmhDkxjnfNiGIJCgw0GtgjiBnazCbQ9aUF+RhSciG3+BrU4p/E1wygg75Y
xgZysYWY2OujCLsTdgR5IXC28FzvYxax56QEO2gJGmHYvGyqWen6yKywgO3JtLmc
yCQz6t4/JDg2kY0MGrKjOgislFAA1RJBpu+q0ylq/uRZh+JiknFcN4msNQARAQAB
tCprdWJlLXRhbG9zLmxvY2FsIChrdWJlLXRhbG9zIGZsdXggc2VjcmV0cymJAlIE
EwEIADwWIQTcaRAmjmV/9wun7CiZdElOdpON3AUCZ84fdQMbLwQFCwkIBwICIgIG
FQoJCAsCBBYCAwECHgcCF4AACgkQmXRJTnaTjdyemRAApn7E0pUmY8asD2iKrl9O
Y+OZinidREuUQyp4UxrQfbZIeS1nqYfTw70+n/PPt1uuXZwmrJIGMuj91gV7VXiU
38Xlm5R9/YeGJJdBGSfqd2woMvU/2BeHou+JTcNoazeYkNkp6UmrqEnYenI1suKe
LZKLD/dEXFXTwW5iwAowvTlD08ARMXU+pZUnUHUDaGeUQSWRKxAZk0ClcclgtevC
Y+LWDcSpY4l0b15sb7gZhwcX7ZHfvYmH3Owm81Kyoqw495y8xIMaVD15Q1XCU6P0
gSKZNS2OFoQWG/63sNjXaIufXddnP4qHdbJGrDMz6MzOoWlR/qFV7kGwrj/k+Rm8
N7Gg2iLxQ/H5SBWngXvbUqnMoqIavPXlkZKppQtoZYRcXPdlgimCHWpnslKGskLH
gR972eQCp0yFel3xe5oY+Hc3Wrl0Lw5F51IHqlthpUDVZ0gZ/yFmOb1d3BgmySgb
CyYfGV+pbAonk0qGlENGUXFJlNc+2l9O4P/efd9LD2bvZwBdpaa18zZz04EBJP+U
ZOOcevbTf18DH93kgH5QPrszs5HjdtsldVNxBmBxWVSbCTTaZzXXVtg9mDQ6bg5O
qHbk2O2OK6lEpAV9cfsRNlfJ1mp9crMQ8G3K4BLHCLEXrNO+BwwAAzyhByVBo6pO
GDXOAs4Ny+aZzCoc7Fk4fwO5Ag0EZ84fdQEQAMRqcUho5e5gT3zgdiPBFzji0wWU
Zt8HDlJtAdING9FGe92graMbCts2mlJ5jDlbG241S/xha4HklpA7t5Fp3RxA5u3p
jJtalpU1J5WrbpMc70Twja2PDs8FHl1DDXRLepowLA7uboT9Oa/rceVy+BupZaYL
c7bGyZLQkXqirD7SMNggJLsRyWwbaFju4hGRYfPrSwSXs1rCQfohKwxsUXI7Ra6J
xSGD5T10zcVGgqIEAb+qpFGxrT1+QJ5Hfd6EUJ8MFN4aADBY+/HXp7c3zW2jB9hv
VkDggBimsL4nbUSo6er4b5Ses4/rIy3EkFk6ELjSx7zwt7MyXWcj9I2HGZa022JO
bAjbUZLuM9rmhKUtGyyBtjMRC0VH6MvbuUQViUQv52V15kWngHO895fhr7CgJ9gS
MhoFCtFtgSoGqZPJW/OhDNh4S8Sr9La+hvCkqJTF6hzf5yKEt1+bEW8Q6sH60WCh
NfbXTMBpS8iJ0owRBexfpkYssWhN+C/x6x1f27hrxvxLTxh4i+X/xqG1X8xsxbm4
VPz6kmMXLEHQuP+FxFUkmeOEK2bX2ycIZr8gKZ+IC/eqs+i0L1WfdXdaaAYniMm7
3RqYIOc9PSxAR8P0XtxevhZwEEfRZaEjigPrpAiGEx/lv25ZyRIfXOnj2i7tsrEQ
Ra4He5S9NhRqhVB5ABEBAAGJBGwEGAEIACAWIQTcaRAmjmV/9wun7CiZdElOdpON
3AUCZ84fdQIbLgJACRCZdElOdpON3MF0IAQZAQgAHRYhBKccy+7fMJ6dyo/fnbuy
4qQr71wiBQJnzh91AAoJELuy4qQr71wieLAQAI4Rxg3u5r+IYr5+SM+kDqZIuzNs
dlKxjyJFAJC6E7ewm3KxI1GhwHaHhpAeyFrTB9URqR8z7dM6yF7kAvMAUv+JYuRj
XUKxIAMiZeHOGJh0Bwmk26kzYl/zKIWAdfLHFaeyWiOJg1TEuMhdMZjEEKdjUCQn
DSjzi5XEgbQ4RcedkTV4XuXLJcr6SpKBCtr81DovjDBKiK1eAq8yZjlhct/MfZuD
pffqU3s4af1LKVGCHbi8aF0G07AziUpP6TBMW8bv7BCKKFklDUSsx+F4DzKk79Uj
X6OLXdsx4gCoGTqmaji0ql/zoW1n10gxLfgOgcsf7RnlkOkez/RaxVeNA+yuPy7Y
/iX4ZlinbIie+fzmpc+3+zMPNpIn7coM8PUvl3A2ml1PLDaBvHTQGLM6ZTjc+l4A
u21U7YbY1Blc0o8wDKy8EOV7EDcsK5DxWXHCpm+cn/xtLnKjEA+I0H7BjVuqgrnc
NEV6ttPJaiAXuTBTHGhDpWXn8ZjawYUxr9rrQcspXxOm7SkvZqoEPrgF0gxxu8ti
19DVJ67mSadSQWsnfPhbH+RRF1HpgRQV34vM2l25+w+kpTRLab7ub49oEQUdGuwp
9U3VerNxLtKIDeYOZemV130rZ3NMFatva3SocEjXMgtqehudGB3oyc2J1tPbELKH
WaAtZir/+9kYjpmJN5sQAKsgYu9+dFXarhGD9yTp8nDREf9/RkqVLFyzDkxxk9Qs
WF/cMd+5CB5f0QbX20vYSTsCJU/Pz/IMQwYJxk+hUo9quKkVvBw5Ep7jFnXLSYXZ
17hXmIYUoLuM+9xdws4to7X5k6fhgvoqimHbc3pG0RTk35MIhrGgHqVJ6uhOYJhX
xxhIN5XAfZdw5BIn9JESi451ix970KGnsK5tMFGrA5R4gcshRDAzUvMFp61NB79D
lw4kNrG1Rwd03LZQK+9GrtHhB4HN1jKexLYi/GcUkgX2xwmrgdmkZlkHeecIUSIt
lwUwHR7CbdTd38ziQuP/DGZ+EFs/4n1m9JgNwqXeMwo2XhxBcPZwPuS7PUvwcfoH
a+0GT+opAxzbNz+eGUuyQkMQy13sLa3BkTnSXlrrjotSmEeVLwNzWyYxaZe96BPQ
wEHWJZPjMGvtRfyuyyQC0Kw1UDsS120xlniATP+Dr2FEqFBoYA5JJa2/dBi/jb7n
W+LMoB5MVNiDryEGXUIoFdP5Zjk80/Prau07gdgrsiPG8Dg/WrcJMouQONUk0zSD
Ve8RAegkeBqjz60/CEhIVCQ/G6flfu0mxEpCJLe/fPWcVrhDZz+ah6AZbdb7Yos0
NMykcWY+U5rGdX6S9JNaE9o7NXyascCl070NbiO3dN4Hi0hunSX98NWCy0RzElGM
=/ubk
-----END PGP PUBLIC KEY BLOCK-----

7
.sops.yaml Normal file
View File

@@ -0,0 +1,7 @@
creation_rules:
- path_regex: .*values.yaml$
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
pgp: DC6910268E657FF70BA7EC289974494E76938DDC
- path_regex: .*.yaml
encrypted_regex: ^(data|stringData)$
pgp: DC6910268E657FF70BA7EC289974494E76938DDC

82
README.md Normal file
View File

@@ -0,0 +1,82 @@
<h1 align="center">
homeprod
</h1>
<div align="center">
Personal home production environment mono-repo
</div>
### Hardware and operating systems
<img align="left" width="100" src="https://vhaudiquet.fr/public/github_assets/homeprod/p330_sff.png"/>
#### Lenovo ThinkStation P330 SFF
Specifications :
```
Intel Xeon E-2134 @ 3.50GHz (4 cores, 8 threads)
64 GiB DDR4 ECC RAM
1 TiB nVME SSD
Intel X520-DA2 SFP+ 10Gbps network card
nVIDIA Quadro P620 graphics card
```
Running as single-node Proxmox
### Software stack(s)
#### Docker swarm machine
| Icon | Software | Description |
|------|------------|------------------------|
| <img width=32 src="https://avatars.githubusercontent.com/u/1854028"> | Debian Linux | Operating system |
| <img width=32 src="https://avatars.githubusercontent.com/u/5429470"> | Docker Swarm | Container orchestrator |
| <img width=32 src="https://avatars.githubusercontent.com/u/82976448"><img width=32 src="https://avatars.githubusercontent.com/u/76623657"> | Infrastructure applications | Authentik, Stalwart Mail for authentication and internal emails |
| <img width=32 src="https://avatars.githubusercontent.com/u/12724356"><img width=32 src="https://avatars.githubusercontent.com/u/40275816"> | Repository applications | Gitea, Harbor to store code and images |
| <img width=30 src="https://buildpath.win/_ipx/w_60&f_webp/buildpath-high-resolution-logo-transparent.png"> <img width=32 src="https://avatars.githubusercontent.com/u/13844975"> | Production applications | Personal applications running on the server |
Debian and docker / docker swarm are deployed by Terraform ; then the stacks are deployed by hand for now, looking at something like [SwarmCD](https://github.com/m-adawi/swarm-cd) for the future.
#### Kubernetes one-node cluster
| Icon | Software | Description |
|------|------------|------------------------|
| <img width=32 src="https://avatars.githubusercontent.com/u/13804887"> | Talos Linux | Kubernetes operating system
| <img width=32 src="https://raw.githubusercontent.com/kubernetes/kubernetes/refs/heads/master/logo/logo.png"> | Kubernetes | Container orchestrator |
| <img width=32 src="https://avatars.githubusercontent.com/u/21054566"> | Cilium | Container Network solution |
| <img width=32 src="https://avatars.githubusercontent.com/u/52158677"> | Flux | GitOps CD controller |
| <img width=32 src="https://avatars.githubusercontent.com/u/33050221"> | csi-driver-nfs | NFS Container Storage solution |
| <img width=32 src="https://raw.githubusercontent.com/kubernetes-sigs/external-dns/refs/heads/master/docs/img/external-dns.png"> | external-dns | Automatic ingress DNS mapping |
| <img width=32 src="https://avatars.githubusercontent.com/u/82976448"><img width=32 src="https://avatars.githubusercontent.com/u/76623657"> | Infrastructure applications | Authentik, Stalwart Mail for authentication and internal emails |
| <img width=32 src="https://avatars.githubusercontent.com/u/12724356"><img width=32 src="https://avatars.githubusercontent.com/u/40275816"> | Repository applications | Gitea, Harbor to store code and images |
| <img width=30 src="https://buildpath.win/_ipx/w_60&f_webp/buildpath-high-resolution-logo-transparent.png"> <img width=32 src="https://avatars.githubusercontent.com/u/13844975"> | Production applications | Personal applications running on the cluster |
Talos Linux, Cilium and flux are deployed using Terraform ; then flux takes over and deploys the whole `kubernetes` folder of this repository.
### Production/personal applications
This setup allows running multiple applications, either self-hosted applications for home/self usage or to run my own applications as production.
#### Production
| Icon | Software | Description |
|------|--------------|-------------|
| <img width=30 src="https://buildpath.win/_ipx/w_60&f_webp/buildpath-high-resolution-logo-transparent.png"> | BuildPath | https://buildpath.win, website for League of Legends champion builds |
| <img width=32 src="https://vhaudiquet.fr/assets/favicon.ico_256x256.png"> | vhaudiquet.fr | https://vhaudiquet.fr, personal website |
#### Personal applications
| Icon | Software | Description |
|------|--------------|-------------|
| <img width=32 src="https://avatars.githubusercontent.com/u/13844975"> | Home Assistant | Home automation software |
| <img width=32 src="https://avatars.githubusercontent.com/u/45698031"> | Jellyfin | Personal media system |
| <img width=32 src="https://avatars.githubusercontent.com/u/99562962"> | Paperless | Personal document manager |
| <img width=32 src="https://avatars.githubusercontent.com/u/32436079"> | Photoprism | Personal photo gallery |
| <img width=32 src="https://avatars.githubusercontent.com/u/67865462"> | Hedgedoc | Shared markdown notes |
| <img width=32 src="https://avatars.githubusercontent.com/u/59452120"> | Excalidraw | Hand-draw like diagrams |
| <img width=32 src="https://avatars.githubusercontent.com/u/139791695"> | Stirling PDF | PDF multi-tool |
| <img width=32 src="https://avatars.githubusercontent.com/u/86065214"> | Tandoor Recipes | Personal recipes manager |
| <img width=32 src="https://avatars.githubusercontent.com/u/7628018"> | Syncthing | File synchronization |
| <img width=32 src="https://avatars.githubusercontent.com/u/10428818"> | Gramps | Personal genealogical tool |
| <img width=32 src="https://avatars.githubusercontent.com/u/26692192"> | Navidrome | Personal music streaming service |
| <img width=32 src="https://avatars.githubusercontent.com/u/102734415"> | TubeArchivist | YouTube archiver |
| <img width=24 src="https://radicale.org/assets/logo.svg"> | Radicale | Calendar and contacts server |

View File

@@ -0,0 +1,32 @@
services:
homeassistant:
container_name: homeassistant
image: "ghcr.io/home-assistant/home-assistant:stable"
ports:
- "8123"
networks:
- default
- proxy
environment:
- TZ=Europe/Paris
volumes:
- ha_config:/config
- /etc/localtime:/etc/localtime:ro
- /run/dbus:/run/dbus:ro
restart: unless-stopped
labels:
- "traefik.enable=true"
- "traefik.http.routers.homeassistant.rule=Host(`homeassistant.local`)"
volumes:
ha_config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/home-assistant/config'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,16 @@
services:
matter-server:
image: ghcr.io/home-assistant-libs/python-matter-server:stable
container_name: matter-server
restart: unless-stopped
network_mode: host
volumes:
- data:/data/
volumes:
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/matter-server/data'

View File

@@ -0,0 +1,25 @@
services:
mosquitto:
image: eclipse-mosquitto
container_name: mosquitto
restart: unless-stopped
ports:
- "1883:1883"
- "9001:9001"
volumes:
- data:/mosquitto/data
- config:/mosquitto/config
volumes:
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/mosquitto/data'
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/mosquitto/config'

View File

@@ -0,0 +1,28 @@
services:
node-red:
image: nodered/node-red:latest
environment:
- TZ=Europe/Paris
ports:
- "1880"
networks:
- default
- proxy
volumes:
- data:/data
labels:
- "traefik.enable=true"
- "traefik.http.routers.node-red.rule=Host(`nodered.local`)"
volumes:
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/nodered/data'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,33 @@
services:
zigbee2mqtt:
container_name: zigbee2mqtt
restart: unless-stopped
image: koenkk/zigbee2mqtt
networks:
- default
- proxy
volumes:
- data:/app/data
- /run/udev:/run/udev:ro
ports:
- "8080"
environment:
- TZ=Europe/Paris
devices:
- /dev/ttyUSB0:/dev/ttyUSB0
labels:
- "traefik.enable=true"
- "traefik.http.routers.zigbee2mqtt.rule=Host(`zigbee2mqtt.local`)"
volumes:
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/zigbee2mqtt/data'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,23 @@
services:
roundcube:
image: roundcube/roundcubemail
container_name: roundcube
networks:
- default
- proxy
environment:
- ROUNDCUBEMAIL_DEFAULT_HOST=ssl://mail.vhaudiquet.fr
- ROUNDCUBEMAIL_DEFAULT_PORT=993
- ROUNDCUBEMAIL_SMTP_SERVER=ssl://mail.vhaudiquet.fr
- ROUNDCUBEMAIL_SMTP_PORT=465
ports:
- "8000"
labels:
- "traefik.enable=true"
- "traefik.http.routers.roundcube.rule=Host(`webmail.local`)"
- "traefik.http.routers.roundcube.entrypoints=http"
networks:
proxy:
name: proxy
external: true

View File

@@ -0,0 +1,32 @@
services:
stalwart:
image: stalwartlabs/mail-server:latest
container_name: stalwart
networks:
- default
- proxy
volumes:
- stalwart_data:/opt/stalwart-mail
ports:
- "25:25" # SMTP port
- "465:465" # SMTPS port
- "993:993" # IMAPS port
- "587:587" # SMTP Submission STARTTLS
- "8080" # HTTP port
labels:
- "traefik.enable=true"
- "traefik.http.routers.stalwart.rule=Host(`mail.vhaudiquet.fr`)"
- "traefik.http.services.stalwart.loadbalancer.server.port=8080"
volumes:
stalwart_data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/stalwart/data'
networks:
proxy:
name: proxy
external: true

View File

@@ -0,0 +1,24 @@
services:
traefik:
image: traefik:v3.2
command:
- "--configFile=/etc/traefik/traefik.yml"
ports:
- "80:80"
- "8080:8080"
networks:
- default
- proxy
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- type: bind
source: /root/traefik.yml
target: /etc/traefik/traefik.yml
labels:
- "traefik.enable=true"
- "traefik.http.routers.traefik.rule=Host(`traefik.local`)"
- "traefik.http.services.traefik.loadbalancer.server.port=8080"
networks:
proxy:
name: proxy

View File

@@ -0,0 +1,16 @@
api:
insecure: true
entryPoints:
http:
address: ":80"
forwardedHeaders:
trustedIPs:
- "127.0.0.1/32"
- "10.1.2.11/32" # nginxproxymanager
providers:
docker:
endpoint: "unix:///var/run/docker.sock"
network: proxy
exposedByDefault: false

View File

@@ -0,0 +1,11 @@
services:
sshportal:
image: moul/sshportal
container_name: sshportal
ports:
- "2222:2222"
volumes:
- "data:/root/"
volumes:
data:

View File

@@ -0,0 +1,96 @@
services:
grampsweb:
container_name: grampsweb
image: ghcr.io/gramps-project/grampsweb:latest
restart: always
networks:
- default
- proxy
ports:
- "5000"
environment:
GRAMPSWEB_TREE: "Gramps Web" # will create a new tree if not exists
GRAMPSWEB_CELERY_CONFIG__broker_url: "redis://grampsweb_redis:6379/0"
GRAMPSWEB_CELERY_CONFIG__result_backend: "redis://grampsweb_redis:6379/0"
GRAMPSWEB_RATELIMIT_STORAGE_URI: redis://grampsweb_redis:6379/1
depends_on:
- grampsweb_redis
volumes:
- gramps_users:/app/users
- gramps_index:/app/indexdir
- gramps_thumb_cache:/app/thumbnail_cache
- gramps_cache:/app/cache
- gramps_secret:/app/secret
- gramps_db:/root/.gramps/grampsdb
- gramps_media:/app/media
- gramps_tmp:/tmp
labels:
- "traefik.enable=true"
- "traefik.http.routers.grampsweb.rule=Host(`gramps.local`)"
grampsweb_celery:
container_name: grampsweb_celery
image: ghcr.io/gramps-project/grampsweb:latest
restart: always
environment:
GRAMPSWEB_TREE: "Gramps Web" # will create a new tree if not exists
GRAMPSWEB_CELERY_CONFIG__broker_url: "redis://grampsweb_redis:6379/0"
GRAMPSWEB_CELERY_CONFIG__result_backend: "redis://grampsweb_redis:6379/0"
GRAMPSWEB_RATELIMIT_STORAGE_URI: redis://grampsweb_redis:6379/1
volumes:
- gramps_users:/app/users
- gramps_index:/app/indexdir
- gramps_thumb_cache:/app/thumbnail_cache
- gramps_cache:/app/cache
- gramps_secret:/app/secret
- gramps_db:/root/.gramps/grampsdb
- gramps_media:/app/media
- gramps_tmp:/tmp
depends_on:
- grampsweb_redis
command: celery -A gramps_webapi.celery worker --loglevel=INFO --concurrency=2
grampsweb_redis:
image: docker.io/library/redis:7.2.4-alpine
container_name: grampsweb_redis
restart: always
volumes:
gramps_users:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/gramps/users'
gramps_index:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/gramps/index'
gramps_thumb_cache:
gramps_cache:
gramps_secret:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/gramps/secret'
gramps_db:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/gramps/db'
gramps_media:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/gramps/media'
gramps_tmp:
networks:
proxy:
name: proxy
external: true

View File

@@ -0,0 +1,39 @@
services:
jackett:
container_name: jackett
image: ghcr.io/hotio/jackett
ports:
- "9117"
networks:
- default
- proxy
environment:
- PUID=1000
- PGID=1000
- UMASK=002
- TZ=Europe/Paris
volumes:
- config:/config
- definitions:/app/Definitions
labels:
- "traefik.enable=true"
- "traefik.http.routers.jackett.rule=Host(`jackett.local`)"
volumes:
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/jackett/config'
definitions:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/jackett/Definitions/Custom'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,58 @@
services:
jellyfin:
image: jellyfin/jellyfin
container_name: jellyfin
user: "1000:1000"
networks:
- default
- proxy
ports:
- "8096"
volumes:
- config:/etc/jellyfin
- data:/var/lib/jellyfin
- cache:/cache
- log:/log
- nfs_films:/films
- nfs_series:/series
restart: 'unless-stopped'
# Alternative address used for autodiscovery
environment:
- JELLYFIN_PublishedServerUrl=https://flix.vhaudiquet.fr
- JELLYFIN_CONFIG_DIR=/etc/jellyfin
- JELLYFIN_DATA_DIR=/var/lib/jellyfin
- JELLYFIN_LOG_DIR=/log
- JELLYFIN_CACHE_DIR=/cache
labels:
- "traefik.enable=true"
- "traefik.http.routers.jellyfin.rule=Host(`flix.vhaudiquet.fr`)"
volumes:
nfs_films:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local,ro'
device: ':/mnt/media/films'
nfs_series:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local,ro'
device: ':/mnt/media/series'
cache:
log:
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/jellyfin/config'
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/jellyfin/data'
networks:
proxy:
name: proxy
external: true

View File

@@ -0,0 +1,32 @@
services:
jellyseerr:
image: fallenbagel/jellyseerr:latest
container_name: jellyseerr
environment:
- LOG_LEVEL=debug
- TZ=Europe/Paris
- PORT=5055 #optional
networks:
- default
- proxy
ports:
- 5055
volumes:
- config:/app/config
restart: unless-stopped
labels:
- "traefik.enable=true"
- "traefik.http.routers.jellyseerr.rule=Host(`jellyseerr.local`)"
volumes:
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/jellyseerr/config'
networks:
proxy:
name: proxy
external: true

View File

@@ -0,0 +1,44 @@
services:
radarr:
container_name: radarr
image: ghcr.io/hotio/radarr
ports:
- "7878"
networks:
- default
- proxy
environment:
- PUID=1000
- PGID=1000
- UMASK=002
- TZ=Europe/Paris
volumes:
- config:/config
- data_movies:/films
- data_downloads:/mediadl
labels:
- "traefik.enable=true"
- "traefik.http.routers.radarr.rule=Host(`radarr.local`)"
volumes:
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/radarr/config'
data_movies:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/media/films'
data_downloads:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/media/download'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,48 @@
services:
sonarr:
container_name: sonarr
image: ghcr.io/hotio/sonarr:latest
ports:
- "8989"
networks:
- default
- proxy
environment:
- PUID=1000
- PGID=1000
- UMASK=002
- TZ=Europe/Paris
volumes:
- config:/config
- data_series:/data/tv
- data_downloads:/mediadl
- cache:/cache
- log:/log
labels:
- "traefik.enable=true"
- "traefik.http.routers.sonarr.rule=Host(`sonarr.local`)"
volumes:
cache:
log:
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/sonarr/config'
data_series:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/media/series'
data_downloads:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/media/download'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,44 @@
services:
transmission:
image: lscr.io/linuxserver/transmission:latest
container_name: transmission
networks:
- default
- proxy
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Paris
- USER=transmission
- PASS=transmission
- HOST_WHITELIST=transmission.local
volumes:
- config:/config
- downloads:/mediadl
ports:
- '9091'
- 51413:51413
- 51413:51413/udp
restart: unless-stopped
labels:
- "traefik.enable=true"
- "traefik.http.routers.transmission.rule=Host(`transmission.local`)"
volumes:
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/transmission/config'
downloads:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/media/download'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,27 @@
services:
wizarr:
container_name: wizarr
image: ghcr.io/wizarrrr/wizarr:latest
networks:
- default
- proxy
ports:
- 5690:5690
volumes:
- wizarr_data:/data/database
labels:
- "traefik.enable=true"
- "traefik.http.routers.wizarr.rule=Host(`wizarr.local`)"
volumes:
wizarr_data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/wizarr/data'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,41 @@
services:
navidrome:
image: deluan/navidrome:latest
user: 1000:1000 # should be owner of volumes
ports:
- "4533"
restart: unless-stopped
networks:
- default
- proxy
environment:
ND_SCANSCHEDULE: 1h
ND_LOGLEVEL: info
ND_SESSIONTIMEOUT: 24h
ND_BASEURL: "http://navidrome.local"
ND_PORT: 4533
ND_REVERSEPROXYWHITELIST: "172.20.0.0/16,10.1.2.11/32"
volumes:
- data:/data
- "music:/music:ro"
labels:
- "traefik.enable=true"
- "traefik.http.routers.navidrome.rule=Host(`navidrome.local`)"
volumes:
music:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local,ro'
device: ':/mnt/media/music'
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/navidrome/data'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,94 @@
services:
tubearchivist:
container_name: tubearchivist
restart: unless-stopped
image: bbilly1/tubearchivist:v0.4.13
ports:
- "8000"
networks:
- default
- proxy
volumes:
- media:/youtube
- cache:/cache
env_file:
- .env
environment:
- ES_URL=http://archivist-es:9200
- REDIS_HOST=archivist-redis
- HOST_UID=1000
- HOST_GID=1000
- TA_HOST=tubearchivist.local
- TZ=Europe/Paris
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 2m
timeout: 10s
retries: 3
start_period: 30s
depends_on:
- archivist-es
- archivist-redis
labels:
- "traefik.enable=true"
- "traefik.http.routers.tubearchivist.rule=Host(`tubearchivist.local`)"
- "traefik.http.routers.tubearchivist.middlewares=tubearchivist-headers"
- "traefik.http.middlewares.tubearchivist-headers.headers.accesscontrolalloworiginlist=*"
- "traefik.http.middlewares.tubearchivist-headers.headers.accesscontrolallowmethods=GET,OPTIONS,PUT"
- "traefik.http.middlewares.tubearchivist-headers.headers.accesscontrolallowheaders=Authorization,*"
archivist-redis:
image: redis/redis-stack-server
container_name: archivist-redis
restart: unless-stopped
expose:
- "6379"
volumes:
- redis:/data
depends_on:
- archivist-es
archivist-es:
image: bbilly1/tubearchivist-es # only for amd64, or use official es 8.16.0
container_name: archivist-es
restart: unless-stopped
env_file:
- .env
environment:
- "ES_JAVA_OPTS=-Xms1g -Xmx1g"
- "xpack.security.enabled=true"
- "discovery.type=single-node"
- "path.repo=/usr/share/elasticsearch/data/snapshot"
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- es:/usr/share/elasticsearch/data
expose:
- "9200"
volumes:
media:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/media/youtube'
cache:
redis:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/tubearchivist/redis'
es:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/tubearchivist/es'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,86 @@
services:
pplessbroker:
image: docker.io/library/redis:7
restart: unless-stopped
volumes:
- redisdata:/data
pplessdb:
image: docker.io/library/postgres:16
restart: unless-stopped
volumes:
- pgdata:/var/lib/postgresql/data
env_file:
- .env
environment:
POSTGRES_DB: paperless
paperless-webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
networks:
- default
- proxy
depends_on:
- pplessdb
- pplessbroker
ports:
- "8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- export:/usr/src/paperless/export
- consume:/usr/src/paperless/consume
labels:
- "traefik.enable=true"
- "traefik.http.routers.paperless-webserver.rule=Host(`paperless.local`)"
env_file: .env
environment:
PAPERLESS_REDIS: redis://pplessbroker:6379
PAPERLESS_DBHOST: pplessdb
PAPERLESS_APPS: allauth.socialaccount.providers.openid_connect
PAPERLESS_ACCOUNT_DEFAULT_HTTP_PROTOCOL: http
volumes:
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/paperless/data'
media:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/paperless/media'
pgdata:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/paperless/db'
redisdata:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/paperless/redis'
export:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/paperless/export'
consume:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/paperless/consume'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,48 @@
services:
radicale:
image: tomsquest/docker-radicale
container_name: radicale
ports:
- 5232
init: true
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- ALL
cap_add:
- SETUID
- SETGID
- CHOWN
- KILL
deploy:
resources:
limits:
memory: 256M
pids: 50
healthcheck:
test: curl -f http://127.0.0.1:5232 || exit 1
interval: 30s
retries: 3
restart: unless-stopped
volumes:
- data:/data
labels:
- "traefik.enable=true"
- "traefik.http.routers.radicale.rule=Host(`radicale.local`)"
networks:
- default
- proxy
volumes:
data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/radicale/data'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,42 @@
services:
syncthing-valentin:
image: syncthing/syncthing
container_name: syncthing-valentin
hostname: syncthing-valentin
network_mode: host
environment:
- PUID=1000
- PGID=1000
volumes:
- config:/var/syncthing/config
- valentin_documents:/valentin/Documents
- valentin_photos:/valentin/Photos
ports:
- 8384:8384 # Web UI
- 22000:22000/tcp # TCP file transfers
- 22000:22000/udp # QUIC file transfers
- 21027:21027/udp # Receive local discovery broadcasts
restart: unless-stopped
healthcheck:
test: curl -fkLsS -m 2 127.0.0.1:8384/rest/noauth/health | grep -o --color=never OK || exit 1
interval: 1m
timeout: 10s
retries: 3
volumes:
config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/syncthing-valentin/config'
valentin_documents:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/main_storage/valentin/Documents'
valentin_photos:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/main_storage/valentin/Photos'

View File

@@ -0,0 +1,71 @@
services:
db_recipes:
restart: always
image: postgres:16-alpine
volumes:
- postgresql:/var/lib/postgresql/data
env_file:
- ./.env
web_recipes:
restart: always
image: vabene1111/recipes
env_file:
- ./.env
volumes:
- staticfiles:/opt/recipes/staticfiles
- nginx_config:/opt/recipes/nginx/conf.d
- mediafiles:/opt/recipes/mediafiles
depends_on:
- db_recipes
nginx_recipes:
image: nginx:mainline-alpine
restart: always
networks:
- default
- proxy
ports:
- 80
env_file:
- ./.env
depends_on:
- web_recipes
volumes:
- nginx_config:/etc/nginx/conf.d:ro
- staticfiles:/static:ro
- mediafiles:/media:ro
labels:
- "traefik.enable=true"
- "traefik.http.routers.nginx_recipes.rule=Host(`tandoor.local`)"
volumes:
nginx_config:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/tandoor/nginx_config'
staticfiles:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/tandoor/staticfiles'
mediafiles:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/tandoor/mediafiles'
postgresql:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/tandoor/db'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,27 @@
services:
alexscript:
container_name: alexscript
image: git.vhaudiquet.fr/vhaudiquet/alexscript:latest
networks:
- default
- proxy
environment:
- TZ=Europe/Paris
ports:
- 80
volumes:
- /etc/timezone:/etc/timezone:ro
- reservations:/app/reservations
- selenium_data_02:/app/.chrome-selenium
labels:
- "traefik.enable=true"
- "traefik.http.routers.alexscript.rule=Host(`alexscript.vhaudiquet.fr`)"
volumes:
reservations:
selenium_data_02:
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,20 @@
services:
semeryfr:
container_name: semeryfr
image: git.vhaudiquet.fr/semerys/semery.fr:latest
networks:
- default
- proxy
ports:
- 80
labels:
- "traefik.enable=true"
- "traefik.http.routers.semeryfr.rule=Host(`semery.fr`)"
environment:
- NGINX_HOST=semery.fr
- NGINX_PORT=80
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,29 @@
services:
vhaudiquetfr:
container_name: vhaudiquetfr
image: git.vhaudiquet.fr/vhaudiquet/vhaudiquet.fr:latest
networks:
- default
- proxy
ports:
- 80
labels:
- "traefik.enable=true"
- "traefik.http.routers.vhaudiquetfr.rule=Host(`vhaudiquet.fr`)"
environment:
- NGINX_HOST=vhaudiquet.fr
- NGINX_PORT=80
volumes:
- public:/usr/share/nginx/html/public
networks:
proxy:
external: true
name: proxy
volumes:
public:
driver_opts:
type: 'nfs'
o: 'addr=truenas.local'
device: ':/mnt/main_storage/public'

View File

@@ -0,0 +1,16 @@
services:
excalidraw:
image: excalidraw/excalidraw
networks:
- default
- proxy
ports:
- "80"
labels:
- "traefik.enable=true"
- "traefik.http.routers.excalidraw.rule=Host(`excalidraw.local`)"
networks:
proxy:
name: proxy
external: true

View File

@@ -0,0 +1,55 @@
services:
hedgedoc-database:
image: postgres:11.6-alpine
env_file:
- .env
environment:
- POSTGRES_DB=codimd
volumes:
- "database-data:/var/lib/postgresql/data"
restart: always
hedgedoc:
image: quay.io/hedgedoc/hedgedoc:1.10.0
networks:
- default
- proxy
env_file:
- .env
environment:
- CMD_USECDN=false
- CMD_ALLOW_EMAIL_REGISTER=false
- CMD_ALLOW_ANONYMOUS=false
- CMD_DOMAIN=md.vhaudiquet.fr
- CMD_PROTOCOL_USESSL=true
depends_on:
- hedgedoc-database
ports:
- "3000"
volumes:
- upload-data:/home/hackmd/app/public/uploads
- upload-data:/hedgedoc/public/uploads
restart: always
labels:
- "traefik.enable=true"
- "traefik.http.routers.hedgedoc.rule=Host(`md.vhaudiquet.fr`)"
- "traefik.http.routers.hedgedoc.entrypoints=http"
volumes:
database-data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/codimd/db'
upload-data:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/codimd/uploads'
networks:
proxy:
external: true
name: proxy

View File

@@ -0,0 +1,283 @@
x-server-discovery: &server-discovery
NOTESNOOK_SERVER_PORT: 5264
NOTESNOOK_SERVER_HOST: notesnook-server
IDENTITY_SERVER_PORT: 8264
IDENTITY_SERVER_HOST: identity-server
SSE_SERVER_PORT: 7264
SSE_SERVER_HOST: sse-server
SELF_HOSTED: 1
IDENTITY_SERVER_URL: ${AUTH_SERVER_PUBLIC_URL}
NOTESNOOK_APP_HOST: ${NOTESNOOK_APP_PUBLIC_URL}
x-env-files: &env-files
- .env
services:
validate:
image: vandot/alpine-bash
entrypoint: /bin/bash
env_file: *env-files
command:
- -c
- |
# List of required environment variables
required_vars=(
"INSTANCE_NAME"
"NOTESNOOK_API_SECRET"
"DISABLE_SIGNUPS"
"SMTP_USERNAME"
"SMTP_PASSWORD"
"SMTP_HOST"
"SMTP_PORT"
"AUTH_SERVER_PUBLIC_URL"
"NOTESNOOK_APP_PUBLIC_URL"
"MONOGRAPH_PUBLIC_URL"
"ATTACHMENTS_SERVER_PUBLIC_URL"
)
# Check each required environment variable
for var in "$${required_vars[@]}"; do
if [ -z "$${!var}" ]; then
echo "Error: Required environment variable $$var is not set."
exit 1
fi
done
echo "All required environment variables are set."
# Ensure the validate service runs first
restart: "no"
notesnook-db:
image: mongo:7.0.12
hostname: notesnookdb
volumes:
- dbdata:/data/db
- dbdata:/data/configdb
networks:
- notesnook
command: --replSet rs0 --bind_ip_all
depends_on:
validate:
condition: service_completed_successfully
healthcheck:
test: echo 'db.runCommand("ping").ok' | mongosh mongodb://localhost:27017 --quiet
interval: 40s
timeout: 30s
retries: 3
start_period: 60s
# the notesnook sync server requires transactions which only work
# with a MongoDB replica set.
# This job just runs `rs.initiate()` on our mongodb instance
# upgrading it to a replica set. This is only required once but we running
# it multiple times is no issue.
initiate-rs0:
image: mongo:7.0.12
networks:
- notesnook
depends_on:
- notesnook-db
entrypoint: /bin/sh
command:
- -c
- |
mongosh mongodb://notesnookdb:27017 <<EOF
rs.initiate();
rs.status();
EOF
notesnook-s3:
image: minio/minio:RELEASE.2024-07-29T22-14-52Z
# ports:
# - 9000:9000
networks:
- notesnook
- proxy
volumes:
- s3data:/data/s3
environment:
MINIO_BROWSER: "on"
depends_on:
validate:
condition: service_completed_successfully
env_file: *env-files
command: server /data/s3 --console-address :9090
healthcheck:
test: timeout 5s bash -c ':> /dev/tcp/127.0.0.1/9000' || exit 1
interval: 40s
timeout: 30s
retries: 3
start_period: 60s
labels:
- "traefik.enable=true"
- "traefik.http.routers.notesnook-s3.rule=Host(`notesnook.vhaudiquet.fr`) && PathPrefix(`/s3`)"
- "traefik.http.routers.notesnook-s3.middlewares=notesnook-s3"
- "traefik.http.middlewares.notesnook-s3.stripprefix.prefixes=/s3"
- "traefik.docker.network=proxy"
- "traefik.http.routers.notesnook-s3.entrypoints=http"
# There's no way to specify a default bucket in Minio so we have to
# set it up ourselves.
setup-s3:
image: minio/mc:RELEASE.2024-07-26T13-08-44Z
depends_on:
- notesnook-s3
networks:
- notesnook
entrypoint: /bin/bash
env_file: *env-files
command:
- -c
- |
until mc alias set minio http://notesnook-s3:9000 ${MINIO_ROOT_USER:-minioadmin} ${MINIO_ROOT_PASSWORD:-minioadmin}; do
sleep 1;
done;
mc mb minio/attachments -p
identity-server:
image: streetwriters/identity:latest
ports:
- 8264
networks:
- notesnook
- proxy
env_file: *env-files
depends_on:
- notesnook-db
healthcheck:
test: wget --tries=1 -nv -q http://localhost:8264/health -O- || exit 1
interval: 40s
timeout: 30s
retries: 3
start_period: 60s
environment:
<<: *server-discovery
MONGODB_CONNECTION_STRING: mongodb://notesnookdb:27017/identity?replSet=rs0
MONGODB_DATABASE_NAME: identity
labels:
- "traefik.enable=true"
- "traefik.http.routers.identity-server.rule=Host(`notesnook.vhaudiquet.fr`) && PathPrefix(`/identity`)"
- "traefik.http.routers.identity-server.middlewares=identity-server,notesnook-server-cors"
- "traefik.http.middlewares.identity-server.stripprefix.prefixes=/identity"
- "traefik.docker.network=proxy"
- "traefik.http.routers.identity-server.entrypoints=http"
- "traefik.http.services.identity-server.loadbalancer.server.port=8264"
notesnook-server:
image: streetwriters/notesnook-sync:latest
ports:
- 5264
networks:
- notesnook
- proxy
env_file: *env-files
depends_on:
- notesnook-s3
- setup-s3
- identity-server
healthcheck:
test: wget --tries=1 -nv -q http://localhost:5264/health -O- || exit 1
interval: 40s
timeout: 30s
retries: 3
start_period: 60s
environment:
<<: *server-discovery
MONGODB_CONNECTION_STRING: mongodb://notesnookdb:27017/?replSet=rs0
MONGODB_DATABASE_NAME: notesnook
S3_INTERNAL_SERVICE_URL: "http://notesnook-s3:9000"
S3_INTERNAL_BUCKET_NAME: "attachments"
S3_ACCESS_KEY_ID: "${MINIO_ROOT_USER:-minioadmin}"
S3_ACCESS_KEY: "${MINIO_ROOT_PASSWORD:-minioadmin}"
S3_SERVICE_URL: "${ATTACHMENTS_SERVER_PUBLIC_URL}"
S3_REGION: "us-east-1"
S3_BUCKET_NAME: "attachments"
labels:
- "traefik.enable=true"
- "traefik.http.routers.notesnook-server.rule=Host(`notesnook.vhaudiquet.fr`)"
- "traefik.docker.network=proxy"
- "traefik.http.routers.notesnook-server.entrypoints=http"
- "traefik.http.routers.notesnook-server.middlewares=notesnook-server-cors"
- "traefik.http.middlewares.notesnook-server-cors.headers.accesscontrolalloworiginlist=https://app.notesnook.com,http://localhost:3000,https://notesnook.vhaudiquet.fr"
- "traefik.http.middlewares.notesnook-server-cors.headers.accesscontrolallowmethods=GET,OPTIONS,PUT"
- "traefik.http.middlewares.notesnook-server-cors.headers.accesscontrolallowheaders=Authorization,*"
- "traefik.http.middlewares.notesnook-server-cors.headers.accesscontrolallowcredentials=true"
sse-server:
image: streetwriters/sse:latest
ports:
- 7264
env_file: *env-files
depends_on:
- identity-server
- notesnook-server
networks:
- notesnook
- proxy
labels:
- "traefik.enable=true"
- "traefik.http.routers.sse-server.rule=Host(`notesnook.vhaudiquet.fr`) && PathPrefix(`/sse`)"
- "traefik.http.routers.sse-server.middlewares=sse-server,notesnook-server-cors"
- "traefik.http.middlewares.sse-server.stripprefix.prefixes=/sse"
- "traefik.docker.network=proxy"
- "traefik.http.routers.sse-server.entrypoints=http"
healthcheck:
test: wget --tries=1 -nv -q http://localhost:7264/health -O- || exit 1
interval: 40s
timeout: 30s
retries: 3
start_period: 60s
environment:
<<: *server-discovery
monograph-server:
image: streetwriters/monograph:latest
# ports:
# - 6264:3000
env_file: *env-files
depends_on:
- notesnook-server
networks:
- notesnook
- proxy
healthcheck:
test: wget --tries=1 -nv -q http://localhost:3000/api/health -O- || exit 1
interval: 40s
timeout: 30s
retries: 3
start_period: 60s
environment:
<<: *server-discovery
API_HOST: http://notesnook-server:5264
PUBLIC_URL: ${MONOGRAPH_PUBLIC_URL}
labels:
- "traefik.enable=true"
- "traefik.http.routers.monograph-server.rule=Host(`notesnook.vhaudiquet.fr`) && PathPrefix(`/monograph`)"
- "traefik.http.routers.monograph-server.middlewares=monograph-server,notesnook-server-cors"
- "traefik.http.middlewares.monograph-server.stripprefix.prefixes=/monograph"
- "traefik.docker.network=proxy"
- "traefik.http.routers.monograph-server.entrypoints=http"
- "traefik.http.services.monograph-server.loadbalancer.server.port=3000"
autoheal:
image: willfarrell/autoheal:latest
tty: true
restart: always
environment:
- AUTOHEAL_INTERVAL=60
- AUTOHEAL_START_PERIOD=300
- AUTOHEAL_DEFAULT_STOP_TIMEOUT=10
depends_on:
validate:
condition: service_completed_successfully
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
notesnook:
proxy:
name: proxy
external: true
volumes:
dbdata:
s3data:

View File

@@ -0,0 +1,53 @@
services:
stirling-pdf:
image: stirlingtools/stirling-pdf:latest
ports:
- '8080'
networks:
- default
- proxy
volumes:
- trainingData:/usr/share/tessdata # Required for extra OCR languages
- extraConfigs:/configs
- customFiles:/customFiles/
- logs:/logs/
- pipeline:/pipeline/
environment:
- DOCKER_ENABLE_SECURITY=false
- INSTALL_BOOK_AND_ADVANCED_HTML_OPS=false
- LANGS=en_GB,fr_FR
labels:
- "traefik.enable=true"
- "traefik.http.routers.stirling-pdf.rule=Host(`stirling-pdf.local`)"
volumes:
trainingData:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/stirling-pdf/trainingData'
extraConfigs:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/stirling-pdf/extraConfigs'
customFiles:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/stirling-pdf/customFiles'
pipeline:
driver: local
driver_opts:
type: 'none'
o: 'bind'
device: '/app/stirling-pdf/pipeline'
logs:
networks:
proxy:
external: true
name: proxy

98
infra/pve/docker.tf Normal file
View File

@@ -0,0 +1,98 @@
/*
* Docker machine terraform file
*/
resource "proxmox_virtual_environment_file" "docker-machine-cloud-config" {
content_type = "snippets"
datastore_id = "local"
node_name = "pve"
source_raw {
data = <<-EOF
#cloud-config
package_update: true
packages:
- git
- ca-certificates
- wget
- curl
- gnupg2
- qemu-guest-agent
runcmd:
- systemctl enable --now qemu-guest-agent
- install -m 0755 -d /etc/apt/keyrings
- curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc
- chmod a+r /etc/apt/keyrings/docker.asc
- echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
- apt-get update
- apt-get -y install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
- docker swarm init
EOF
file_name = "docker-machine-cloud-config.yaml"
}
}
resource "proxmox_virtual_environment_vm" "docker-machine" {
name = "docker-machine"
node_name = "pve"
on_boot = true
agent {
enabled = true
}
tags = ["debian", "debian-latest", "docker", "terraform"]
cpu {
type = "kvm64"
cores = 4
sockets = 1
flags = []
}
memory {
dedicated = 16192
}
network_device {
bridge = "vmbr0"
model = "virtio"
vlan_id = 2
}
lifecycle {
ignore_changes = [
network_interface_names,
mac_addresses,
ipv4_addresses,
ipv6_addresses,
id,
disk,
initialization,
vga
]
}
boot_order = ["scsi0"]
scsi_hardware = "virtio-scsi-single"
disk {
interface = "scsi0"
iothread = true
datastore_id = "local-lvm"
size = 8
discard = "ignore"
}
clone {
vm_id = data.proxmox_virtual_environment_vms.debian_vm_template.vms[0].vm_id
}
vm_id = 701
initialization {
datastore_id = "local-lvm"
interface = "ide2"
vendor_data_file_id = proxmox_virtual_environment_file.docker-machine-cloud-config.id
}
}

370
infra/pve/kube.tf Normal file
View File

@@ -0,0 +1,370 @@
/*
* Kubernetes cluster terraform file
*/
resource "proxmox_virtual_environment_download_file" "talos-cloudimg" {
content_type = "iso"
datastore_id = "local"
file_name = "talos-v1.9.4-nocloud-amd64.iso"
node_name = "pve"
url = "https://factory.talos.dev/image/ce4c980550dd2ab1b17bbf2b08801c7eb59418eafe8f279833297925d67c7515/v1.9.4/nocloud-amd64.iso"
}
resource "proxmox_virtual_environment_vm" "kube" {
name = "kube-talos"
description = "Kubernetes Talos Linux"
tags = ["kubernetes", "talos", "terraform"]
node_name = "pve"
vm_id = 702
machine = "q35"
keyboard_layout = "fr"
agent {
enabled = true
}
stop_on_destroy = true
cpu {
cores = 4
type = "x86-64-v3"
}
memory {
dedicated = 16192
floating = 16192
}
boot_order = ["scsi0", "ide0"]
scsi_hardware = "virtio-scsi-single"
cdrom {
enabled = true
file_id = proxmox_virtual_environment_download_file.talos-cloudimg.id
interface = "ide0"
}
disk {
interface = "scsi0"
iothread = true
datastore_id = "local-lvm"
size = 16
discard = "ignore"
file_format = "raw"
}
vga {
type = "serial0"
}
initialization {
datastore_id = "local-lvm"
interface = "ide2"
ip_config {
ipv4 {
address = "dhcp"
}
}
user_account {
keys = [trimspace(var.ssh_public_key)]
password = var.machine_root_password
username = "root"
}
}
lifecycle {
ignore_changes = [
ipv4_addresses, ipv6_addresses, network_interface_names
]
}
network_device {
bridge = "vmbr0"
model = "virtio"
mac_address = "BC:24:11:F6:E1:C9"
vlan_id = 2
}
operating_system {
type = "l26"
}
tpm_state {
version = "v2.0"
}
serial_device {}
}
resource "talos_machine_secrets" "kube" {}
data "talos_machine_configuration" "kube" {
cluster_name = "kube"
machine_type = "controlplane"
cluster_endpoint = "https://kube-talos.local:6443"
machine_secrets = talos_machine_secrets.kube.machine_secrets
config_patches = [
yamlencode({
machine = {
install = {
image = "factory.talos.dev/installer/ce4c980550dd2ab1b17bbf2b08801c7eb59418eafe8f279833297925d67c7515:v1.9.4"
}
network = {
nameservers = [
"10.1.2.3"
]
}
}
cluster = {
allowSchedulingOnControlPlanes = true
apiServer = {
certSANs = [
"kube-talos.local"
]
}
network = {
dnsDomain = "kube-talos.local"
cni = {
name: "none"
}
}
proxy = {
disabled = true
}
}
})
]
}
data "talos_client_configuration" "kube" {
cluster_name = "kube"
client_configuration = talos_machine_secrets.kube.client_configuration
nodes = ["kube-talos.local"]
}
resource "talos_machine_configuration_apply" "kube" {
client_configuration = talos_machine_secrets.kube.client_configuration
machine_configuration_input = data.talos_machine_configuration.kube.machine_configuration
node = proxmox_virtual_environment_vm.kube.ipv4_addresses[7][0] # lo + 6 talos-created interfaces before eth0
depends_on = [ proxmox_virtual_environment_vm.kube ]
lifecycle {
replace_triggered_by = [ proxmox_virtual_environment_vm.kube ]
}
}
resource "talos_machine_bootstrap" "kube" {
node = proxmox_virtual_environment_vm.kube.ipv4_addresses[7][0] # lo + 6 talos-created interfaces before eth0
client_configuration = talos_machine_secrets.kube.client_configuration
depends_on = [ talos_machine_configuration_apply.kube ]
lifecycle {
replace_triggered_by = [ proxmox_virtual_environment_vm.kube ]
}
}
resource "talos_cluster_kubeconfig" "kube" {
node = proxmox_virtual_environment_vm.kube.ipv4_addresses[7][0] # lo + 6 talos-created interfaces before eth0
depends_on = [ talos_machine_bootstrap.kube ]
client_configuration = talos_machine_secrets.kube.client_configuration
}
output "kubeconfig" {
sensitive = true
value = talos_cluster_kubeconfig.kube.kubeconfig_raw
}
resource "local_file" "kubeconfig" {
content = "${talos_cluster_kubeconfig.kube.kubeconfig_raw}"
filename = "${path.module}/kubeconfig"
depends_on = [ talos_cluster_kubeconfig.kube ]
}
# TODO : Wait for talos_cluster_kubeconfig...
resource "helm_release" "cilium" {
name = "cilium"
namespace = "kube-system"
repository = "https://helm.cilium.io/"
chart = "cilium"
wait = false
depends_on = [ local_file.kubeconfig ]
set {
name = "ipam.mode"
value = "kubernetes"
}
set {
name = "kubeProxyReplacement"
value = true
}
set {
name = "securityContext.capabilities.ciliumAgent"
value = "{CHOWN,KILL,NET_ADMIN,NET_RAW,IPC_LOCK,SYS_ADMIN,SYS_RESOURCE,DAC_OVERRIDE,FOWNER,SETGID,SETUID}"
}
set {
name = "securityContext.capabilities.cleanCiliumState"
value = "{NET_ADMIN,SYS_ADMIN,SYS_RESOURCE}"
}
set {
name = "cgroup.autoMount.enabled"
value = false
}
set {
name = "cgroup.hostRoot"
value = "/sys/fs/cgroup"
}
set {
name = "k8sServiceHost"
value = "localhost"
}
set {
name = "k8sServicePort"
value = 7445
}
set {
name = "etcd.clusterDomain"
value = "kube-talos.local"
}
set {
name = "hubble.relay.enabled"
value = true
}
# Enable hubble ui
set {
name = "hubble.ui.enabled"
value = true
}
# Gateway API support
set {
name = "gatewayAPI.enabled"
value = true
}
set {
name = "gatewayAPI.enableAlpn"
value = true
}
set {
name = "gatewayAPI.enableAppProtocol"
value = true
}
# Gateway API trusted hops : for reverse proxy
set {
name = "gatewayAPI.xffNumTrustedHops"
value = 1
}
# Single-node cluster, so 1 operator only
set {
name = "operator.replicas"
value = 1
}
# L2 announcements
set {
name = "l2announcements.enabled"
value = true
}
set {
name = "externalIPs.enabled"
value = true
}
# Disable ingress controller (traefik will be used for now)
set {
name = "ingressController.enabled"
value = false
}
set {
name = "ingressController.loadbalancerMode"
value = "shared"
}
# Ingress controller for external : behind reverse proxy, trust 1 hop
set {
name = "envoy.xffNumTrustedHopsL7PolicyIngress"
value = 1
}
# Set cilium as default ingress controller
set {
name = "ingressController.default"
value = true
}
set {
name = "ingressController.service.externalTrafficPolicy"
value = "Local"
}
}
resource "kubernetes_namespace" "flux-system" {
metadata {
name = "flux-system"
}
lifecycle {
ignore_changes = [ metadata[0].annotations, metadata[0].labels ]
}
depends_on = [ talos_cluster_kubeconfig.kube, local_file.kubeconfig, helm_release.cilium ]
}
resource "kubernetes_secret" "flux-sops" {
metadata {
name = "flux-sops"
namespace = "flux-system"
}
type = "generic"
data = {
"sops.asc"=var.sops_private_key
}
depends_on = [ kubernetes_namespace.flux-system ]
}
resource "helm_release" "flux-operator" {
name = "flux-operator"
namespace = "flux-system"
repository = "oci://ghcr.io/controlplaneio-fluxcd/charts"
chart = "flux-operator"
wait = true
depends_on = [ kubernetes_secret.flux-sops ]
}
resource "helm_release" "flux-instance" {
name = "flux"
namespace = "flux-system"
repository = "oci://ghcr.io/controlplaneio-fluxcd/charts"
chart = "flux-instance"
values = [
file("values/components.yaml")
]
set {
name = "instance.distribution.version"
value = "2.x"
}
set {
name = "instance.distribution.registry"
value = "ghcr.io/fluxcd"
}
set {
name = "instance.sync.name"
value = "homeprod"
}
set {
name = "instance.sync.kind"
value = "GitRepository"
}
set {
name = "instance.sync.url"
value = "https://github.com/vhaudiquet/homeprod"
}
set {
name = "instance.sync.path"
value = "kubernetes/"
}
set {
name = "instance.sync.ref"
value = "refs/heads/main"
}
depends_on = [ helm_release.flux-operator ]
}

52
infra/pve/main.tf Normal file
View File

@@ -0,0 +1,52 @@
# Terraform providers configuration
terraform {
required_providers {
proxmox = {
source = "bpg/proxmox"
version = "0.69.1"
}
talos = {
source = "siderolabs/talos"
version = "0.7.1"
}
kubernetes = {
source = "hashicorp/kubernetes"
version = "2.36.0"
}
helm = {
source = "hashicorp/helm"
version = "2.17.0"
}
}
}
# Proxmox configuration
provider "proxmox" {
endpoint = "https://pve.local:8006/"
api_token = var.api_token
insecure = true
ssh {
agent = true
username = "root"
}
}
# Talos configuration
provider "talos" {}
# Kubernetes configuration
provider "kubernetes" {
config_path = "${path.module}/kubeconfig"
}
# Helm configuration
provider "helm" {
kubernetes {
config_path = "${path.module}/kubeconfig"
}
}
# debian-latest vm template(s), cloned to make other vms
data "proxmox_virtual_environment_vms" "debian_vm_template" {
node_name = "pve"
tags = ["template", "debian-latest"]
}

170
infra/pve/template.tf Normal file
View File

@@ -0,0 +1,170 @@
/*
* Terraform Proxmox templates
* VM and container templates, used to derive others
*/
# Debian Latest CLOUD disk image
resource "proxmox_virtual_environment_download_file" "debian-latest-cloudimg" {
content_type = "iso"
datastore_id = "local"
file_name = "debian-12-generic-amd64.qcow2.img"
node_name = "pve"
url = "https://cloud.debian.org/images/cloud/bookworm/latest/debian-12-generic-amd64.qcow2"
}
# Base cloud-config ('vendor') file for VM templates
resource "proxmox_virtual_environment_file" "cloud_config" {
content_type = "snippets"
datastore_id = "local"
node_name = "pve"
source_raw {
data = <<-EOF
#cloud-config
package_update: true
packages:
- git
- ca-certificates
- wget
- curl
- gnupg2
- qemu-guest-agent
runcmd:
- systemctl enable --now qemu-guest-agent
EOF
file_name = "cloud-config.yaml"
}
}
# Debian Latest VM template
resource "proxmox_virtual_environment_vm" "debian-latest-template" {
name = "debian-latest-template"
description = "Debian latest template VM from Terraform"
tags = ["debian", "debian-latest", "template", "terraform"]
node_name = "pve"
vm_id = 9002
template = true
machine = "q35"
keyboard_layout = "fr"
agent {
enabled = true
}
stop_on_destroy = true
cpu {
cores = 2
type = "x86-64-v2-AES"
}
memory {
dedicated = 2048
floating = 2048
}
disk {
datastore_id = "local-lvm"
file_id = proxmox_virtual_environment_download_file.debian-latest-cloudimg.id
interface = "scsi0"
}
vga {
type = "serial0"
}
initialization {
datastore_id = "local-lvm"
interface = "ide2"
ip_config {
ipv4 {
address = "dhcp"
}
}
user_account {
keys = [trimspace(var.ssh_public_key)]
password = var.machine_root_password
username = "root"
}
vendor_data_file_id = proxmox_virtual_environment_file.cloud_config.id
}
lifecycle {
ignore_changes = [
ipv4_addresses, ipv6_addresses, network_interface_names
]
}
network_device {
bridge = "vmbr0"
vlan_id = 2
}
operating_system {
type = "l26"
}
tpm_state {
version = "v2.0"
}
serial_device {}
}
# Debian Latest LXC container image
resource "proxmox_virtual_environment_download_file" "debian-latest-lxc-img" {
content_type = "vztmpl"
datastore_id = "local"
node_name = "pve"
url = "http://download.proxmox.com/images/system/debian-12-standard_12.7-1_amd64.tar.zst"
}
# Debian Latest LXC container template
resource "proxmox_virtual_environment_container" "debian-latest-container-template" {
description = "Debian latest template container from Terraform"
node_name = "pve"
vm_id = 9003
template = true
cpu {
cores = 2
}
memory {
dedicated = 512
}
disk {
datastore_id = "local-lvm"
size = 4 # 4 Gigabytes
}
initialization {
hostname = "debian-latest-container-template"
ip_config {
ipv4 {
address = "dhcp"
}
}
user_account {
keys = [trimspace(var.ssh_public_key)]
password = var.machine_root_password
}
}
network_interface {
name = "veth0"
vlan_id = 2
}
operating_system {
template_file_id = proxmox_virtual_environment_download_file.debian-latest-lxc-img.id
type = "debian"
}
}

View File

@@ -0,0 +1,34 @@
instance:
components:
- source-controller
- kustomize-controller
- helm-controller
- notification-controller
- image-reflector-controller
- image-automation-controller
cluster:
type: kubernetes
multitenant: false
networkPolicy: true
domain: "kube-talos.local"
kustomize:
patches:
- target:
kind: Deployment
name: "(kustomize-controller|helm-controller)"
patch: |
- op: add
path: /spec/template/spec/containers/0/args/-
value: --concurrent=10
- op: add
path: /spec/template/spec/containers/0/args/-
value: --requeue-dependency=10s
- patch: |
- op: add
path: /spec/decryption
value:
provider: sops
secretRef:
name: flux-sops
target:
kind: Kustomization

19
infra/pve/variables.tf Normal file
View File

@@ -0,0 +1,19 @@
variable "api_token" {
description = "Token to connect Proxmox API"
type = string
}
variable "machine_root_password" {
description = "Root password for VMs and containers"
type = string
}
variable "ssh_public_key" {
description = "Public SSH key authorized access for VMs and containers"
type = string
}
variable "sops_private_key" {
description = "Private SOPS GPG key for flux/kubernetes to decrypt secrets"
type = string
}

View File

@@ -0,0 +1,13 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: gitea
resources:
- namespace.yaml
- repository.yaml
- release.yaml
secretGenerator:
- name: gitea-values
files:
- values.yaml=values.yaml
configurations:
- kustomizeconfig.yaml

View File

@@ -0,0 +1,6 @@
nameReference:
- kind: Secret
version: v1
fieldSpecs:
- path: spec/valuesFrom/name
kind: HelmRelease

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: gitea

View File

@@ -0,0 +1,18 @@
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: gitea
namespace: gitea
spec:
interval: 1m
chart:
spec:
sourceRef:
kind: HelmRepository
name: gitea
namespace: gitea
chart: gitea
interval: 1m
valuesFrom:
- kind: Secret
name: gitea-values

View File

@@ -0,0 +1,8 @@
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:
name: gitea
namespace: gitea
spec:
interval: 1m
url: https://dl.gitea.io/charts/

View File

@@ -0,0 +1,102 @@
ingress:
enabled: true
hosts:
- host: git.vhaudiquet.fr
paths:
- path: /
pathType: Prefix
# Disable HA, we are on a 1-node cluster
redis-cluster:
enabled: false
redis:
enabled: true
postgresql:
enabled: true
global:
postgresql:
auth:
postgressPassword: ENC[AES256_GCM,data:S0jkpB+S8jV5+w==,iv:8lZJkw5DN2b0OhI1FHGYv90ZH1Pn0XXU2RSyv4aVx/Q=,tag:9r908balQkich6RFPlpHCQ==,type:str]
password: ENC[AES256_GCM,data:+KBkk+NOsM7Amg==,iv:VmBbDVk9G4rRgieFeJrFoWrXIfrY50Aq1/XBKsfamL4=,tag:oK3AFrYG7rGXIjttGgOv3Q==,type:str]
database: gitea
username: ENC[AES256_GCM,data:jiZce5o=,iv:mKzFJAXbSoaW4REoND/uJ6SW8sNhGOhjxH7X8R9nw0M=,tag:QSRIzYeRWYMju3hCEt+wpg==,type:str]
volumePermissions:
enabled: true
postgresql-ha:
enabled: false
persistence:
enabled: true
subPath: data
extraVolumes:
- name: git
nfs:
server: truenas.local
path: /mnt/main_storage/git
extraContainerVolumeMounts:
- name: git
mountPath: /git
clusterDomain: kube-talos.local
actions:
enabled: true
provisioning:
enabled: true
gitea:
oauth:
- name: Authentik
provider: openidConnect
key: ENC[AES256_GCM,data:s6yBMmp8FpIaC7m4uGen81nfPbCLnbJ06a0hEeuCLnPouXS2qNibkw==,iv:NpxJ3/fKcaEkctYqN9FV7WDWgdK0f2h4YUkW2e9ifAA=,tag:4WItsUK3J8hRk0c9qlWgMQ==,type:str]
secret: ENC[AES256_GCM,data:lO1R+gT+S2FrGhC6Ld/IRnRHDFjoB0/T+/Zle41oMchGtmmbdyKAUmTG9W6FzZzBsTEdV5GjgfwGU/G+PGnB1HPQkDD7INfy8qryfCBXhxqE9tUcpb8umxcXn77Yba1hAdETXY4FNpePv269Opixu/R4o4vTwG4isAGCkyTAwLw=,iv:LmZR/0+nXt5Dj59aqq4u0Qa/bqeMpCrs2TPi+mBG7eE=,tag:5+E5GUbOkF+GXVBLpLtodA==,type:str]
autoDiscoverUrl: https://authentik.vhaudiquet.fr/application/o/gitea/.well-known/openid-configuration
config:
APP_NAME: Gitea
server:
ROOT_URL: https://git.vhaudiquet.fr
lfs:
STORAGE_TYPE: local
PATH: /git/lfs
repository:
ROOT: /git/repo
database:
DB_TYPE: postgres
service:
ALLOW_ONLY_EXTERNAL_REGISTRATION: true
oauth2_client:
ENABLE_AUTO_REGISTRATION: true
ACCOUNT_LINKING: login
openid:
ENABLE_OPENID_SIGNIN: false
ENABLE_OPENID_SIGNUP: false
indexer:
ISSUE_INDEXER_TYPE: bleve
REPO_INDEXER_ENABLED: true
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-03-22T13:26:30Z"
mac: ENC[AES256_GCM,data:cVb4woEMFuW+KO9OtBFMQw1bcDeiJ93FR3mVY3l1nrGENXRPmlJQKsSUhMUlrn7nNnAQWbrJJ65u2MPvn2hZXRZsU0jy9vojSnF7XDecnrSzP69/lw8gRozBUlsLuiH7hivtKVuLDFMguuyD9wkCiQxp91ajJPZhaNiUkIgxz3o=,iv:q0L/o+6umhIm0Vf7tJ580Pi3i88VfhV7437qwxkyBvU=,tag:JlFXgYdVjnRHB190emIb1Q==,type:str]
pgp:
- created_at: "2025-03-22T13:26:30Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMA7uy4qQr71wiAQ/+LEKY6WwSXYVrv0Fs79agmy3GIm1zmJGny6Dw6IpB2ZzE
+r1h41IMltscDfXmiu2hAn/J7NXupc5Ak3OFR0LxYDsWIyWe8sF9JiRtjlcJHI0s
7II4Gg8WrGaOOoEGFpDCnwBkuz/rOrFWcXeE7NPu2UuwWwcKDXqgA0R47U1RJlHH
KWzyE9blF3bOvJDALHnxSeyarVmL+IIDpx5+M5oOSHxigR8ltyZnIDvKuZ/ZxDKr
e8Te2W5MRjrhsFW2dWyHXaMnJeFM2KqiPQUw4BkEx1LlNf2O85cJhPHKX06PhMtl
L7fxLhwWsA0G/VRu1LDpCDKvQrwnhF6MgAI1KrU3Lweeo/LCtv3k20anpFMSFbaF
bplci98Ee4pk+Kk3iX+WuCcZOXeCucr/oRb1bbNWLK0eT/vrI2DBGHsgzbkr+S91
OZiUjZgKc7KgU8j+opW+Tfao8uvVCamq/VqMVGmxW07j5CcuwxAWB6srxxA/B8PV
qzuB046F/hMi3ibAYi+R+zCCM4MR33G0blgKEZk4Z/UQenMpqaQMZWr0pUA+l+9a
JT1JVRE4SVMXhNYKxZ3Jcfv56Ua4eMyxNGfbq7iyv/YJPCNvQKcsd4rJRaHgojUm
kVwAZO5lPknuDwrIQQ/XaRnd+K5EnyRncFzNow3Dbsr04DZr7e2sGQy5IjM2ob/U
aAEJAhD80JnUs0Q0GK4wR0uwEHq8ACsMmJG4qPoljiQ2qL/T/5SLjui2lHThqm1j
tM9UbHsGUF/NN0HPZtDVASsaXO+jYCjM108e7yj58IEMvPLymIAXM+eXWoanpBX3
YLbnTetfr+ut
=u3XM
-----END PGP MESSAGE-----
fp: DC6910268E657FF70BA7EC289974494E76938DDC
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
version: 3.9.4

View File

@@ -0,0 +1,13 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: harbor
resources:
- namespace.yaml
- repository.yaml
- release.yaml
secretGenerator:
- name: harbor-values
files:
- values.yaml=values.yaml
configurations:
- kustomizeconfig.yaml

View File

@@ -0,0 +1,6 @@
nameReference:
- kind: Secret
version: v1
fieldSpecs:
- path: spec/valuesFrom/name
kind: HelmRelease

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: harbor

View File

@@ -0,0 +1,18 @@
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: harbor
namespace: harbor
spec:
interval: 1m
chart:
spec:
sourceRef:
kind: HelmRepository
name: harbor
namespace: harbor
chart: harbor
interval: 1m
valuesFrom:
- kind: Secret
name: harbor-values

View File

@@ -0,0 +1,8 @@
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:
name: harbor
namespace: harbor
spec:
interval: 1m
url: https://helm.goharbor.io

View File

@@ -0,0 +1,42 @@
externalURL: http://harbor.local
expose:
type: ingress
ingress:
hosts:
core: harbor.local
core:
extraEnvVars:
- name: ENC[AES256_GCM,data:eMpF9wMbBxkYceKeTXU4CPc/zDME,iv:zOZrXlAv45gNv2FffG2kqXq2670HwXCKSZjQaegEz+Y=,tag:dTNolgpFxKuXCg5/5/s0gQ==,type:str]
value: ENC[AES256_GCM,data:c5wQjHgl9y4rAYf1gHnK26cGRtg4wKVf9oHliewruhZkGflu8ds9OL5EPe8U7FuPjt9IGTHtovbjlWv3L+8myfYX1IBEe9tJ/2VdAsaldNFQKvBTm8V9TXPQeyoiZz9wsBT7Dq15vFBQBs2e3+u2ZVd6Kv0F62Ioq6Utc/Ng2WpoKVo0kFrBbwqg4wdDojnT+oXxyEBLnPUEPsES48w6L8zjcy+SF52FPAAXIM5tj9KVD6BlF/8b3B8TUTSZHI/rw5WkOVLL33iCrQnLVa8svR1DncvatEoe4yhfHS38ioft+WuO+pK7DpYYif/6RF7snKVs92mS6hrHreBxiQiiwaSvvMqdGN2G/l/iLaxypxPO4aykhHUPwayJyrX+ay7BOCbyGoXB2fUFxgWv3Shed++iIJc/tyd2n0p0wve0YZUHNrOoFjj8zuYIsGxr4igI0Tao/Gxf27ljGoEwc/A94fVY4AmISj/PYpHGaeTmNbcQJ7QetNXDtkJvBBQ0t7TyDmo+5kUrWU08PBrPxh8H+j0loZFJnbq/hjDHygRXFs+2cpYeV6GjvncWaMlp2Rdu7oq+TBvL9KFlzgp/8pNhOXTFJeQPqyHIzi0Q/JrIZoR8ixYGuvBBBNARWnMyfLbrM5BzJWns8Ybif8WzFIHZy/9ISB13lIiJEDveLbHA4yJGplFFYWBfUzGwEN4K9swE4aAjDC8Vmk7ap/uqFufj4oZT62e/na4hKW+Rtj3bbLzXZPgGzczojTOMet5UGuY=,iv:X8JmR3oy1G94f/K5vYeARyGB0usmMlZl3VrAiDQurH8=,tag:tSlixKKBI2dB4FSx03erZg==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-03-22T13:26:30Z"
mac: ENC[AES256_GCM,data:LIYTSXxmp7xpnrucygefTyHhI0Wi6EbGD0sA1NNLR9x/h+yG/5nlmdr02+dWFiEaeyvpA+6aSeoUWCHOMGMXV0EM5nmcZk14lQBQUWOVeYPbo5KSYzEdCy4ywVIa8HLG5yGJIhGuMPN7EDidkAiHcWNfaVRwRbG0PuczohYlwLg=,iv:fLqIPUlk8XkluxHA/WZd44JiM4FIZEvla33lCcvod2k=,tag:Xtg+QcTYy33WLJDWTECPeA==,type:str]
pgp:
- created_at: "2025-03-22T13:26:30Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMA7uy4qQr71wiAQ/+IoSA9GM+l1zIjF0mhXxmzMQC5yjcJPXkxBQ1bZTJEvkZ
pL7fl/8Jvc/eQ96DKXpC95vE6m0jRSXhv0kz4HgoRV+tZUBl6CYMPcQXmY4/Zh+a
olL5xkk0U2fjEqX/cxNDomTZP85TQpZy2k4vxx1A4Nun7lhjLPsYM+49Ql9WP69e
f/UZDxyfuZf0ScK0wnCIXJMdB5mgbdMacw7VXrGtbv6vz8tQUk6jqM7S+7VyLeWv
G9jDXn2Cs8FFHJcmlifvTqxdRv8AjSPpYyQ7CVpuSMm3xQzsDGH2pKlkFORNcEXH
76+z7IaE09xiJA07lOiXVnGrAlkK5Y/eSt7227uQJlewQMYXHIm7LffakQzlrEq0
EEmevA1AFtR6Dn1Q9yb7G0QQNFA+Vire7zF/RNkUobqXY2nC9CLgDdyhph+XLfSx
aZILDqhrHKSUVe+YrbZvkQK6MNKKZe7icQtp1NjESS4InWQajXAYzNhy5bPz/9vh
1JbmF0zF9rviOoK8mKXPw5xR5guvK2jVu+zNPuQClrONW6EvKKXhudcnw2JNtbJl
TN1hRXVu6gFJhtpSwt07RCEYNCp15E7foPDKijiG4OZCXZQHSdhLQ3bGo64sbCOq
BIB7OYNNSoMBZZxLzdjX/amBQNzpQFbGZx9560ZyLWcTbJWQYSACh0ukjvA76YLU
ZgEJAhCinfjS8XV8hdGbk08slYRTcoZT0VFuV4J+1CnilNACwxAF/fjvHR4NtIbs
gEhh5W6LjVSol+PxQHyjEPaU6KHV0pryHn5U7AzD0NMbn7o2A5O/DOk12EXQeekk
EWSu2T+KJw==
=UYvy
-----END PGP MESSAGE-----
fp: DC6910268E657FF70BA7EC289974494E76938DDC
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
version: 3.9.4

View File

@@ -0,0 +1,13 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: authentik
resources:
- namespace.yaml
- repository.yaml
- release.yaml
secretGenerator:
- name: authentik-values
files:
- values.yaml=values.yaml
configurations:
- kustomizeconfig.yaml

View File

@@ -0,0 +1,6 @@
nameReference:
- kind: Secret
version: v1
fieldSpecs:
- path: spec/valuesFrom/name
kind: HelmRelease

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: authentik

View File

@@ -0,0 +1,18 @@
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: authentik
namespace: authentik
spec:
interval: 1m
chart:
spec:
sourceRef:
kind: HelmRepository
name: authentik
namespace: authentik
chart: authentik
interval: 1m
valuesFrom:
- kind: Secret
name: authentik-values

View File

@@ -0,0 +1,8 @@
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:
name: authentik
namespace: authentik
spec:
interval: 1m
url: https://charts.goauthentik.io/

View File

@@ -0,0 +1,56 @@
authentik:
secret_key: ENC[AES256_GCM,data:MC1gcuH/YnIiwYlXwBgQQWOu4GV4x06ICv3dgcdZPS/4TSwWkI8zQGjjgMFZI6Rr0HLbctbz424MZY9fZem86rZNud57LTRO2yAko9YtAFo=,iv:2jpZ/Ox86yx/eb/C7UNTD04RusSioyNtG/aDR3cnrso=,tag:ORWDxvMyLkGeECVLYHzJHg==,type:str]
postgresql:
password: ENC[AES256_GCM,data:j9eA0WelAFGC6w==,iv:akVI+MgfBXhnjDR3CUCXH8TW2jfM4ZQPo6pyfPA+Wgs=,tag:j+eL1+fFbcr1n9PKRVorYw==,type:str]
email:
from: webbot@vhaudiquet.fr
host: mail.vhaudiquet.fr
port: 587
username: ENC[AES256_GCM,data:k4CtAo9RFLJvJkPbZHRUaczNi/0=,iv:FjR3ChvhSikOZux6Fat+WyIbkK/Jl0hoF7If6DMDRUE=,tag:FqxqDrzriAk3F6QrQCuJ8w==,type:str]
use_tls: true
password: ENC[AES256_GCM,data:dnHdnNTzX8DkcVSIbn0=,iv:Q9YLebID0UsGTWm9wk2ocCmUYVVgmjM1DlfV5I52s/U=,tag:pEDtlyk2jRhnLm3DR+hdtQ==,type:str]
postgresql:
enabled: true
volumePermissions:
enabled: true
auth:
password: ENC[AES256_GCM,data:kpIM6nbYyldabg==,iv:ujP/9CcOtp/xRGkpDojxVHBqAJlQFm6D746bbYGO0XU=,tag:idfGR+OEz5VQN3VEEDlNJg==,type:str]
redis:
enabled: true
server:
ingress:
enabled: true
hosts:
- authentik.vhaudiquet.fr
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-03-22T13:26:30Z"
mac: ENC[AES256_GCM,data:UugtYTl28eAwqf3E7DOZI6lTsFSlagFKtqcBfoyF5vLqaS1bdoi+52gwf/DjP8Qeavr5Kqbs5y3y3+b01aRfIijlqLF5KMKqu0n8jVcOCacjS4MK/vwewV0xNsLDS2Ox3MGBlhlcYwWJXhTAMf7i4DbUJwU1c9ZZbSKAQvYMXR0=,iv:nw48FNSFvswioTrnLy5UfDBM0zM9NJ5WxkQjZwR/PZc=,tag:oEBwGchARm5DVagq1eQG8A==,type:str]
pgp:
- created_at: "2025-03-22T13:26:30Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMA7uy4qQr71wiAQ/8Dmj2+XNCmr/75OeYBpejcEOq96IlycJ7r4Lcc6dMfV7K
dr56+1XEpM7JFxP11aiK//1Uo7tF7wNIHxuUMPWnaUU54/yd/M+1trQJEXf/eVie
wJQOo2O2xsb1OdfOHx+CIuQEkiCqE2tgz1CEQ6bpQMyJQE9vY3C54SI/W9rsx5E/
zFtec1a9R+N0ZpPvOyY4a+3pWddXJ/qS/SH/MlOcrlg6mldi6rdIfJflpXCccQ/j
msjVRrVcKCfilYY5vcKzW8z6lXyuS8JTszP2JuKZi5nJ/CR6T1tLW3O/ED3NIpgL
uPOH376QtodvEIU41o2yqElxoQ8402f4yGXEZFV15jJS7kzgAOI5YKRINufr7EJn
jbVI9laj48HsLUOG9G8q782VKMflg1kvlKHric3jl10BENSpUcq19fMZj//VjOjI
SbmIt49xmo1HDG8tgCTcrVhuCVWaROCs9sLbnjAhH2x1IbeSiuCkov0n09vjn8U+
OfQAmN2geJYL/hNMaSRaLE1eDd5TGE/LKubEaeoqFQVFwTrgT7yH3qEioTog+4JO
7Dw07pEQcG8LIRm3flJ3YRDfZn12Neb0DHqHxwTPqqJfT5vOJjDp2VK8mP4NTIgT
TlUbtWGJbvNaxs17iAttC7/i8H5MT/y9ihkJmj4Yp2JN9iZHdFVODFmVi94pm6DU
aAEJAhADkvkoe1GJeNgbIjjlpBWlVCFwNDKx5GjRt8v3mA02f+OwVPxqTtyCm9r3
FLjY/xIH8q+tV7LY+Ag2yEdW8XEC8whpn+F5NNzZxs5bilejVqfZ3DbjoeUQd12L
mZyd/SpFtSmp
=mSKU
-----END PGP MESSAGE-----
fp: DC6910268E657FF70BA7EC289974494E76938DDC
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
version: 3.9.4

View File

@@ -0,0 +1,7 @@
apiVersion: "cilium.io/v2alpha1"
kind: CiliumLoadBalancerIPPool
metadata:
name: "local-pool"
spec:
blocks:
- cidr: "10.1.2.187/32"

View File

@@ -0,0 +1,13 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: csi-driver-nfs
resources:
- namespace.yaml
- repository.yaml
- release.yaml
secretGenerator:
- name: csi-driver-nfs-values
files:
- values.yaml=values.yaml
configurations:
- kustomizeconfig.yaml

View File

@@ -0,0 +1,6 @@
nameReference:
- kind: Secret
version: v1
fieldSpecs:
- path: spec/valuesFrom/name
kind: HelmRelease

View File

@@ -0,0 +1,6 @@
apiVersion: v1
kind: Namespace
metadata:
name: csi-driver-nfs
labels:
pod-security.kubernetes.io/enforce: privileged

View File

@@ -0,0 +1,18 @@
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: csi-driver-nfs
namespace: csi-driver-nfs
spec:
interval: 1m
chart:
spec:
sourceRef:
kind: HelmRepository
name: csi-driver-nfs
namespace: csi-driver-nfs
chart: csi-driver-nfs
interval: 1m
valuesFrom:
- kind: Secret
name: csi-driver-nfs-values

View File

@@ -0,0 +1,8 @@
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:
name: csi-driver-nfs
namespace: csi-driver-nfs
spec:
interval: 1m
url: https://raw.githubusercontent.com/kubernetes-csi/csi-driver-nfs/master/charts

View File

@@ -0,0 +1,49 @@
storageClass:
create: true
name: nfs-csi
annotations:
storageclass.kubernetes.io/is-default-class: "true"
parameters:
server: truenas.local
share: /mnt/fast_app_data/kube/
subDir: ${pvc.metadata.namespace}/${pvc.metadata.name}
reclaimPolicy: Delete
mountOptions:
- noatime
- nfsvers=4.1
driver:
mountPermissions: 0
feature:
enableFSGroupPolicy: true
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-03-22T13:26:31Z"
mac: ENC[AES256_GCM,data:ceN4v+5tV5iCU3jld/INz689kJpF/v7ChIVObD+4FL5KiaRb0DToygiAzgo5BvbmCL9cudrZ1qtXLSe47PMxBrS2DOzuFfp3nlBfhIc1vyl9IwMJJrdM3VWmTKMQUcjbMpb0bnD0P6S230+DU+lB9Sx+2prCzRpGl1tTEkXjrYo=,iv:dEMp7H/5Ry/uQR5OuweSwUF6h4Cbjm3Dq+ZD2rCxvGI=,tag:NF1wMICn+c2SXoI15ugsdg==,type:str]
pgp:
- created_at: "2025-03-22T13:26:31Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMA7uy4qQr71wiAQ/+PxuU+B8+VPXqQ8jHkIxCN3cplCGMbyv+lgaiBGUSb3Dl
pfwircBKOYUwobYeCBKL8aGVkvE0AtJ3oAE0sGcLCk5vTggAzU3UctiS79cYCJlu
MoCzfLUonYgG9UBMWhNURE6BPXw8kj5YAmoyFx67pVlgc+6DfloH8crcm7puIMAe
qNcKKDw4H6VY3mBJqKElbRwHdPDMQuOXyViZYGTNbtQTpO9P9wlGNjOrL5weYT/q
t/WDIUjBYO133KDrbBGUq/jPfk0NnGICBYqF7qoEG+1G+Brw8VnRgNNH8UvvqA4/
rGKtvkXFYf563qVPNVeoYvnQG6YSTr4VYfiW1Ru8SvSpsZVJu2Xqhg0ga9JVuLn8
6qFp0Dd2UmskEoHz7HOk9jV712/EzATvEGY8o/LFIR/EVNlLaOQug10GQxQ9MFX4
qfVnnFkNlNW288n6f5fEpCrn1vOllftMbQDeev20qJUXTwVxsammdGUMDoWW7t2U
bf42OIJicrJ+msOSohN1zFafwb09njnpiAP3jJ9kwBACByRaSp9I6p0w1CU2xN0x
J31hRI5ByDQ4ZcTLDNPQmE9bpTy3sfVEiv0EalPAqevxR70CkQEFJZoxIuKxBAaC
O49TIuEfudcL6431m6W29EEW7V944ojKMvEWSx9HgJIfJ8wqU8S4wBcDFy1SoJTU
ZgEJAhB0f09p6f5t6umTqrzGKiHGPrAwlvErbdUTwH3gdk6j3YJCyYR1xzZCFdzW
lMa4kXu3v3wndGU1FteUE3HKkoRPttBg4sf1ekJvICv4HXfADCRar5Hf0CtbHVzB
sH6aWEmtVQ==
=1kP6
-----END PGP MESSAGE-----
fp: DC6910268E657FF70BA7EC289974494E76938DDC
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
version: 3.9.4

View File

@@ -0,0 +1,13 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: external-dns
resources:
- namespace.yaml
- repository.yaml
- release.yaml
secretGenerator:
- name: external-dns-values
files:
- values.yaml=values.yaml
configurations:
- kustomizeconfig.yaml

View File

@@ -0,0 +1,6 @@
nameReference:
- kind: Secret
version: v1
fieldSpecs:
- path: spec/valuesFrom/name
kind: HelmRelease

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: external-dns

View File

@@ -0,0 +1,18 @@
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: external-dns
namespace: external-dns
spec:
interval: 1m
chart:
spec:
sourceRef:
kind: HelmRepository
name: external-dns
namespace: external-dns
chart: external-dns
interval: 1m
valuesFrom:
- kind: Secret
name: external-dns-values

View File

@@ -0,0 +1,8 @@
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:
name: external-dns
namespace: external-dns
spec:
interval: 1m
url: https://kubernetes-sigs.github.io/external-dns/

View File

@@ -0,0 +1,44 @@
provider:
name: pihole
registry: noop
policy: upsert-only
sources:
- ingress
domainFilters:
- .local
extraArgs:
- ENC[AES256_GCM,data:OjceEFEsSdsu8L0cXiJpzXK99e/3qWOTGonYGJ0nCYOWkvQ=,iv:p4NBmSQEcbZ9KlPX6SRE9BszmbamJoDvfHRop3muG+U=,tag:ygCzPPLfpwRX6dYQOQ+z8w==,type:str]
- ENC[AES256_GCM,data:HwTgdSrgGbW6Rfq8IdiCPmQNcmeoeb9/HS4=,iv:96u90ODzbS9xcxR9ZM75GYTcklE93mW9zXtStj6vZU4=,tag:Ye+KzC5M5eiy6+qzvKtP9g==,type:str]
- ENC[AES256_GCM,data:8+M1yWuV6w2yom3Jiz9WZ2h5g7wO/O3z,iv:bFoZpe+rdZpFBs0ZtzPCQw+CW2Zhvc77dNGfk47SmnU=,tag:L3XWx/eMlkvAo2wK0MyYcA==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-03-22T13:26:30Z"
mac: ENC[AES256_GCM,data:MpvLwjJaIiWXACFarrL/rdAXGqUjEYtrN5X6RnlBL5tmrOJQDnlGo54A4GDTaPjyc1/Wy6gz0sL07Neb8UpzRCqp58l6DQJOEvB7YiYVXpakwQXcHqvVUjBAGyiFdMe31QT577zsA9GB/wOmUDfdWlyTRnKVJUj5PGsZlQIPGgk=,iv:t42wJNK6YAfTTvWbdEGfy75+qY2IneId6/qiOCkYZx8=,tag:xl/R6CukyGJMkKAkWLQp3Q==,type:str]
pgp:
- created_at: "2025-03-22T13:26:30Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMA7uy4qQr71wiARAAiylUaDcKIUVjfap79CyDI5TZpMi6xy9wDuvBb5xQj2pH
m81D0FKnvo+jyZBKhQe4j0tlH3mdbWWDJHmWkqygMSh9qRijYgyaUGCsSBFJ1A1f
3kbO5nDcB2hkGkp7WeghqDNLWoEQCZRx0ofsmId54YiQ9hAWG5yugoftJRSic0r1
my+qlENrV5ylt6ejLW62DrdcuYdahdtwzdUYWsKSLNO+7I67+8sx3aOTrzTMlFxv
eGnJDX2lcStDrk10KcvyHXOSTemo5Qyd4WCnEd6ushzSdBjxge3vJMowVMNOHyqf
gqMCvXflsgrQCd4YiNTkzRhsUHxGi0SaTUEXWpZxRva5dw53fnQGm0/2oTBmbXyo
3iorrU770jt3C09+/Wodae4DqZ6mnE/0UnkfuBn8gdaSN9HEVRtqx4gHqghZaQtB
V2/7ApyVqhmPPKHaqDN9+XzM1t41rmkdwqOJqC8jhtQo6UynhWGR4TPlzhkg9U1n
dhLB4InBy9llX+XyHm94S9T//MtFyX+x5UxU9SwobL2NnONTi55Jct2B9eB7Evpo
PHCd8gNpZsUnXPFdh0VWR/mrnivm+ZXGLeWyDAbUgio2pQS+EgcwdbN3jKIPJsny
iZiak87ZUrPXcqRCESlyfRUwMDx8Y6MeEnqGWMrHM2OqQzbGjM/DyyOMCTelovjU
aAEJAhAFOZyeSq7uAxcdCyYc0iPIUNSQyWc/F6T9C9XOxusvCMcovFaJd1n7dYfD
2iI+We5xiT3Ns9S8QZl92SarYnQ3+YFZO+9fao2bVTaW59OxClKPHKH3nctGvSli
CQQl/7bFyATK
=ZZjJ
-----END PGP MESSAGE-----
fp: DC6910268E657FF70BA7EC289974494E76938DDC
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
version: 3.9.4

View File

@@ -0,0 +1,13 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: traefik
resources:
- namespace.yaml
- repository.yaml
- release.yaml
secretGenerator:
- name: traefik-values
files:
- values.yaml=values.yaml
configurations:
- kustomizeconfig.yaml

View File

@@ -0,0 +1,6 @@
nameReference:
- kind: Secret
version: v1
fieldSpecs:
- path: spec/valuesFrom/name
kind: HelmRelease

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: traefik

View File

@@ -0,0 +1,18 @@
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: traefik
namespace: traefik
spec:
interval: 1m
chart:
spec:
sourceRef:
kind: HelmRepository
name: traefik
namespace: traefik
chart: traefik
interval: 1m
valuesFrom:
- kind: Secret
name: traefik-values

View File

@@ -0,0 +1,8 @@
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:
name: traefik
namespace: traefik
spec:
interval: 1m
url: https://traefik.github.io/charts

View File

@@ -0,0 +1,42 @@
ingressClass:
enabled: true
isDefaultClass: true
ports:
web:
forwardedHeaders:
trustedIPs:
- 127.0.0.1/32
# nginx-proxy
- 10.1.2.11/32
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-03-22T13:26:30Z"
mac: ENC[AES256_GCM,data:PMUHyPCnIhmUo5N1mdoMhDLXaFN6Cl0IGuq8EG3MGtY5X1g1QboL5nI5o25evFbuXdZn9KB2AqgzPZBxykhVpz8W+mj987g4VeDJ7sU/OnJibHSo+ibqoo0NvQaAMukWevqI7fAQZoyI3PZi07mMGYw23h2cmaJmsuAuDnQ0CvA=,iv:RRV/BF7OXFmBJX5lXZjrG4+4jjbjzMrR8BByMo5hfwA=,tag:+lVLSfdjHeJjA3dKMiRIGA==,type:str]
pgp:
- created_at: "2025-03-22T13:26:30Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMA7uy4qQr71wiAQ/8DGnKyC/pNGEAuuxcZjoLQhK8TJ3NgNQ3HBVLGpbVBb3S
P/n94oPwwEbWXpdq1/MapFgaiAP3kXyv308c0CeIICQvg9xFeXK7/o/X3ucJu/YV
TiMsBUCAIWKrN4lmNr3wgnMDQiRs9myzgmzJv3KOpbQr5cYnrT51spWCD2Nnt6Xm
HfLyZrxGscW0lrRi6jeg/7lts3HYEs75i8xUS95pj5/a+7i83sfpaAFdkGcxV6Vq
285Ys7S86Hrp2T0QkADHMJMXmbeTV18Psfy2v9SXgqeRMq1XHQDn+nPPkYY0kmhs
7xVEwGHYLkKuyNmTm+ygsQAVGd/kCeqO+hsdKRtmJ5f4vh0w1ePftScqbfEwNuDl
ygEVUIoVhDYdUKnjwqjgiOxsx3Y6+RS4g3vg6gNWk1HunM24bzkFRP4w1lVYB07n
hDcQeP0bqo7hopJjvM0VtXbSJq81duBup9DyyPaXOf30p0c+l9it4XdoeR7JaZ/y
nJ22POfQYCoJyKpgdB/eReLd/2MqLhdnsCUTd+CNTS1+nCz1M4JziagXU9CspnqP
sCYylw6aC9XfzScZldpysdqes1/1ZC9F2QeL6ZO66IRV3xBk/5eSsyZ275DRZYAj
P4jf1UhA4U0LQoVPAjh9cA8SLm29MgfEwoFSLGx6wsJ//ibxMIlxku9gkiRRTkPU
aAEJAhCQKhc7EsDKh7GgrlPh0763p+CuZR7yMp2W1kY9nU/w/802SgYEyLdPW1aY
gG3zMpt1roTOQI7D0jM7NjcYOLeOHWR0ac00wqv3S7I9+4tXOxuHyTX6Og19Z3GV
OUgA2wzhUFtj
=2DEs
-----END PGP MESSAGE-----
fp: DC6910268E657FF70BA7EC289974494E76938DDC
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
version: 3.9.4

View File

@@ -0,0 +1,13 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: glance
resources:
- namespace.yaml
- repository.yaml
- release.yaml
secretGenerator:
- name: glance-values
files:
- values.yaml=values.yaml
configurations:
- kustomizeconfig.yaml

View File

@@ -0,0 +1,6 @@
nameReference:
- kind: Secret
version: v1
fieldSpecs:
- path: spec/valuesFrom/name
kind: HelmRelease

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: glance

View File

@@ -0,0 +1,19 @@
apiVersion: helm.toolkit.fluxcd.io/v2
kind: HelmRelease
metadata:
name: glance
namespace: glance
spec:
interval: 1m
chart:
spec:
reconcileStrategy: Revision
sourceRef:
kind: HelmRepository
name: glance
namespace: glance
chart: glance
interval: 1m
valuesFrom:
- kind: Secret
name: glance-values

View File

@@ -0,0 +1,8 @@
apiVersion: source.toolkit.fluxcd.io/v1
kind: HelmRepository
metadata:
name: glance
namespace: glance
spec:
interval: 1m
url: https://rubxkube.github.io/charts/

View File

@@ -0,0 +1,48 @@
common:
ingress:
enabled: true
hostName: glance.local
tls:
enabled: false
persistence:
enabled: true
configMap:
enabled: true
data:
- name: config
mountPath: /mnt
data:
- content:
glance.yml: ENC[AES256_GCM,data:xwuScPkmEAdR/ng76IYN0O2YyseSk8HF3JNROruim2xj388yUvKNSfxuzIN1Frh4GjqxluhLhAq5//PQXi4sbTdkzOV2uutBWVNxdSlbyyG67Ruv/Oxa+PB4aQLKPGmGOARXrAd8eh/LKemzb0Myj3OVpGkw2/X5twse7w4Mvll0d/iO5oKVQubqXsMERJv4Xtq+3SYjEn8UL4WBLDVPdKbDlMlva1EVqtvqm3AevegZdO4a46gXSO4/rJZwSdPZCYgcnLNAnvvI6Mx87EuU2xNnN5gOTdArSQBrlCur1vIsAaWUmttnAHAO+unC4AzyWYXNi2XJOKkHnWFE0RzeZdX2ZA5DgHxdH4/8EyfeUY5EPjR0mDbvawyqMERR4H6/k+FtDnpoTKEGFUnzVIqZLrQ62sffO1kOfeskh9P0PtSK9C8UufBo6QNQwyiZTNesCdE1v39IM9zfhao1GRlB2rC3EbEhBYIn/UN2Nun68rWFr53iRqyu1yxuTA/E2mjN1Q5qz/8zT8yI0uwCu9+ICEEZ/AA0hMq5wEyev4EPlFxyWvX9tuGf+BB81OKNl/X5bOF4Xvo3M1Obo765e+j5petRGPJsb9Aio4U0ayCOZ6u/nOBotr9uIfJWDa0mB7w/FGVTGGvQueIs8OTu80YVDbImoq33FegmajPMGJzwe4poi7vOTalG0fK4aQi0voh0V5l7Xv03cxUT0qE7CjncYa+XJ5h4SqDW/DuWFPn2f1etmDoKluPgADOpSEMueFKNoF5UQryeLl4/2xdFNVhlYnTea80pwH+E/ixpxxloOUbl0L2ZDa7xs/h7sTWhZu3a4j/pB00TC8Bf1jSAvLw2btD4IwssiFXQxW36zH/b4mwr21K6bKV6MSAj529etbSJLCk/S/rBR9T2E3GptIuJWsqBrLw24B7tIZ9e/V2dIYlG+DdsEk6DxiJdFOetU4DE2Re+v/5vpjKZ+yww0GyWs6CazCXzHZwTL5rRgnXe7kQKEkDW9NIlyiU8Mg7+Xp3/dz0RIIBA0sGNLgdeBbLDcZu4reMYx4QL7RVpADD0gzBb1n7RMcEbNUNLok2I0g0KHrysVx7UM5wufCdpvmwBHRL5Ettn4sD1yLytaCo7brJKOF31dtKy/ZMkgg0RjQ/cJZWi+f8ypBD+RD9pYvB5XW0QGwDUwdVs45SGzyMVQH5KOJrvCZIYjTcuG7wEy9A/s+fCieCPeLSwpIa5y1kd7Tcw33P5wK1H8JfFONlAnT/sJWf1YnGLKIoJaoX6GqMAdFt8tPU5bpS58E1esLuQ7etGp46a4Im69Ni8aDCcfCbMhuK+yVKPkS+7VC2jqYsgxnq2qxXOflj2bn3J9iONEkBUbNF8DkWjaqMyq/3dNf5uXepKbhYzRtAoAkqDoDd/JlztA0botqBxttMdvNwfadwplirkoSsSzgEqRuVBBdb+oD5LQuIycGskb7vA+7DNPYQ+TTs5eYmiNX0SmduCqXG3b8yci+nRPXVzAX/5GUmlBpgG4pGMIXTAKO7+Nt1Si7/QbLTa/JtxHm2fQUPlmmKE1fNRnXGJjNoYQlQovEhgA7Bz1YlKApWCJPw68zamMQyVQBQrbuJ8da7QZByscqE8RgHc1Gnac9aODSZk08so/sHMA4rv5qG2gnSlPOmxJLPy1Ozil/giiZhFpW0BU3ilaS8uSXqylT3iQRYNgsp2orJ6BfkNyOlEqPl/dXpj87z3mw==,iv:K5q8LTDsKVK6DKZPDGMbRiRK+tatjY48LMHNUcuxMfQ=,tag:dESi4+zGANHHLh+zHLoWuQ==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age: []
lastmodified: "2025-03-22T13:26:30Z"
mac: ENC[AES256_GCM,data:9pjOdnlatlraKYdXJ1p05e5GmzGZ9+MR6sXYBFWoeBC/XJO3/R4BJXaE1ox6Wcz3W11535RRBYykGNLGMM5Y8FmLCl4OBqHoxwbxA6Mvz/7VTyb9IIKvw7+KU09i1AZ2P+E7wzQfhGKGMuFuB7mMYGWDDFZpNSSlFj0sLMGKGcM=,iv:WvbSDjthkMF62xrKgy3oDZZsXqB7YXU591XVAF7LQ2o=,tag:z/g323lfi+P8aty0bDtrnA==,type:str]
pgp:
- created_at: "2025-03-22T13:26:30Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMA7uy4qQr71wiAQ//cuVt31J6vMCVjO+/G1ZmL8UsjpPceHqSmR1Wox0JVLFD
cmUrK4RJ/2Zf8zY9UUpZVhOSwJj9WuW/Jq0EpVH7Ba3ihzUog21/bAPjHFncLmhM
fvNyjujWmWtwYYoB5aV2J9kbgZ8A7jewXT7vtoGtkAvAsRzupr2ZpSx2BgHCduVT
Yp74ANuZFPqIb+/20rDNWdqY7XwQxLzCaZ267mHn3d/jPCv7mNnEWRgydwMCWkYI
XN3cHjgHvM05nydpHZYsDrwGOHs/DoCNM+P4vYbujSIKoGOHP+uG2j57Mv9WXtIU
/ARFc0IM1/dthc5MDqWQVY6HgnrXA9zyZmhoE+mdUHRxZQmjo6q0h1qY4bJwI42S
MEM79CcbdlqQczx1+GzHcbT0KQvA+khsWsO63j56eNnt5SLw9pCzrNylgW2JiAzC
0arjLYG3Mq4LaV32i1W2dx06EG091+7/+wlvdMhj7z0152+EX4O4udo0fRNE4Gcz
0h9/urIJokfAjF3CWAa+bGUm/LmZqu2uRAmgWMmCeoYRCzgIVD5oukYKeb3UykU7
foNw4QKH2wHnrjgmq4UCa11rvNkjdGnu+PCK6ZC2ePw1EjcA0SxwNeUm885KM2h/
D8nJHygpMn4qWiMvjqbPwXwGD0RmfP9DKBVFOVbnvJndJ6MJHd34C8xrUwbuoxbU
aAEJAhCpBrpztZL8OAbDHDcTNDM25RJ7ZqX8NpmQEmvXBSy3Yu0bXFIsNmd/qbg3
AB76XAOtbGMoq7VGenwuZqxLF6xH/G4eT1Uew3QsT6VFec1CD2RVtKsJ95+1aR9i
DxqXP46SGZIW
=AHr7
-----END PGP MESSAGE-----
fp: DC6910268E657FF70BA7EC289974494E76938DDC
encrypted_regex: ^(password|ssh-key|api-key|user|username|privateKey|apiKey|extraArgs.*|extraEnvVars|.*secret.*|key|.*Password|.*\.ya?ml)$
version: 3.9.4