add reworked caddy configs from current setup

This commit is contained in:
Ruben 2025-07-12 00:09:31 -05:00
commit 76ec7d0505
No known key found for this signature in database
GPG key ID: 8EA836555FB6D9A5
23 changed files with 420 additions and 1 deletions

56
.old/etc/caddy/Caddyfile Normal file
View file

@ -0,0 +1,56 @@
# ╭────────────────────────────────────────────╮
# │ _ _ __ _ _ │
# │ ___ __ _ __| | __| |_ _ / _(_) | ___ │
# │ / __/ _` |/ _` |/ _` | | | | |_| | |/ _ \ │
# │ | (_| (_| | (_| | (_| | |_| | _| | | __/ │
# │ \___\__,_|\__,_|\__,_|\__, |_| |_|_|\___| │
# │ |___/ │
# ╰────────────────────────────────────────────╯
# synth.download's *current* caddyfile - will need to be entirely revampled once we actually get to the server migration. whenever that happens :p
# =============================================================================
# ╭────────────────────────────────╮
# │ commonly reused rules/snippets │
# ╰────────────────────────────────╯
# common settings - globally enables settings that are commonly used in every snippet
(common-settings) {
encode zstd gzip
}
# log - reused log settings
(log) {
log {
output file /var/log/caddy/{args[0]} {
roll_size 10MB
roll_keep 3
}
}
}
# robots-txt - link /robots.txt to the same file
(robots-txt) {
handle_path /robots.txt {
root * /var/www/robots.txt
file_server
}
}
# =============================================================================
# ╭──────────────────────────╮
# │ main/general information │
# ╰──────────────────────────╯
{
email synth@synth.download
import log caddy
}
# ╭───────────────────────────────────────────────────────────────────────────╮
# │ do an nginx and seperate snippets for sites into /etc/caddy/sites-enabled │
# ╰───────────────────────────────────────────────────────────────────────────╯
# to "disable" anything, just move the file to /etc/caddy/sites-disabled and reload caddy
import /etc/caddy/sites-enabled/*

View file

@ -0,0 +1,7 @@
upload.synth.download {
reverse_proxy 127.0.0.1:31011
import common-settings
import log upload
import robots-txt
}

View file

@ -0,0 +1,7 @@
tiktok.synth.download {
reverse_proxy 127.0.0.1:64132
import common-settings
import log tiktok
import robots-txt
}

View file

@ -0,0 +1,7 @@
tumblr.synth.download {
reverse_proxy 127.0.0.1:56695
import common-settings
import log tumblr
import robots-txt
}

View file

@ -0,0 +1,8 @@
adarkroom.synth.download {
root * /var/www/adarkroom
file_server
import common-settings
import log adarkroom
import robots-txt
}

View file

@ -0,0 +1,10 @@
asking.synth.download {
reverse_proxy 127.0.0.1:20617 {
header_up X-Real-Ip {remote_host}
header_up X-Http-Version {http.request.proto}
}
import common-settings
import log ask-js
import robots-txt
}

View file

@ -0,0 +1,28 @@
# generally, for xmpp, it's mostly a similar thing to the mailserver
# upload endpoint
upload.xmpp.synth.download {
request_body {
max_size 100m
}
handle /upload/* {
@options method OPTIONS
handle @options {
header Access-Control-Allow-Origin "*"
header Access-Control-Allow-Methods "PUT, GET, OPTIONS, HEAD"
header Access-Control-Allow-Headers "Authorization, Content-Type"
header Access-Control-Allow-Credentials "true"
header Content-Length "0"
header Content-Type "text/plain"
respond 200
}
reverse_proxy 127.0.0.1:5050
}
}
# other domains we just grab certificates for
xmpp.synth.download, muc.xmpp.synth.download, proxy.xmpp.synth.download, pubsub.xmpp.synth.download {
respond "what if it was called gyattmpp instead"
}

View file

@ -0,0 +1,84 @@
# mastodon (chuckya) frontend
masto.beeping.synth.download, masto.booping.synth.download {
root * /var/www/fedi-frontends/chuckya-fe
file_server
try_files {path} /
handle_path /favicon.png {
root * /var/www/website/assets/synth.download/synth.png
file_server
encode zstd gzip
}
handle_path /packs {
root * /var/www/fedi-frontends/chuckya-fe/packs
file_server
encode zstd gzip
}
import common-settings
import log mastofe
import robots-txt
}
# akkoma fe for iceshrimp
akko.beeping.synth.download {
# redirect api endpoints to iceshrimp
@redir {
path /oauth/*
path /api/*
path /nodeinfo/*
}
handle @redir {
reverse_proxy 127.0.0.1:24042
encode zstd gzip
}
# favicon
handle_path /favicon.png {
root * /var/www/website/assets/synth.download/synth.png
file_server
encode zstd gzip
}
# akkoma frontend
handle {
header Cache-Control "max-age=3600"
try_files {path} /
root * /var/www/fedi-frontends/akkoma-fe
encode zstd gzip
file_server
}
handle_path /static/* {
header Cache-Control "max-age=3600"
root * /var/www/fedi-frontends/akkoma-fe/static
encode zstd gzip
file_server
}
import common-settings
import log akkofe
import robots-txt
}
# phanpy
phanpy.beeping.synth.download, phanpy.booping.synth.download {
root * /var/www/fedi-frontends/phanpy
file_server
try_files {path} /
import common-settings
import log phanpy
import robots-txt
}
# pl-fe
pl-fe.beeping.synth.download, pl-fe.booping.synth.download {
root * /var/www/fedi-frontends/pl-fe
file_server
try_files {path} /
import common-settings
import log pl-fe
import robots-txt
}

View file

@ -0,0 +1,10 @@
forged.synth.download {
reverse_proxy 127.0.0.1:41807 {
header_up X-Real-Ip {remote_host}
header_up X-Http-Version {http.request.proto}
}
import common-settings
import log forgejo
import robots-txt
}

View file

@ -0,0 +1,7 @@
rss.synth.download {
reverse_proxy 127.0.0.1:27819
import common-settings
import log freshrss
import robots-txt
}

View file

@ -0,0 +1,14 @@
beeping.synth.download {
reverse_proxy 127.0.0.1:24042
# favicon stuff
handle_path /assets/synth.download/synth.png {
root * /var/www/website/assets/synth.download/synth.png
file_server
encode zstd gzip
}
import common-settings
import log iceshrimp
import robots-txt
}

View file

@ -0,0 +1,19 @@
# caddy won't actually handle mail (in terms of reverse proxy, etc), it's simply here so caddy can generate certificates for us
# that will be reused for the mailserver
# we use a daily cronjob to copy the certificates to the proper location because by default they're stored somewhere else
# that literally Nobody (except root) but caddy can access
mx1.synth.download {
# Optional, can be useful for troubleshooting
# connection to Caddy with correct certificate:
respond "Hello DMS"
}
# mta-sts stuff
mta-sts.synth.download {
root * /var/www/mta-sts
file_server
import common-settings
import log mta-sts
import robots-txt
}

View file

@ -0,0 +1,53 @@
merping.synth.download {
@local {
file
not path /
}
@local_media {
path_regexp /system/(.*)
}
@streaming {
path /api/v1/streaming
path /api/v1/streaming/*
}
@cache_control {
path_regexp ^/(emoji|packs|/system/accounts/avatars|/system/media_attachments/files)
}
root * /srv/docker/mastodon/public
handle_errors {
rewrite 500.html
file_server
}
header {
Strict-Transport-Security "max-age=31536000"
}
header /sw.js Cache-Control "public, max-age=0"
header @cache_control Cache-Control "public, max-age=31536000, immutable"
handle @local {
file_server
}
handle @streaming {
reverse_proxy 127.0.0.1:58834 {
header_up Accept-Encoding identity
}
}
reverse_proxy 127.0.0.1:46098 {
header_up Accept-Encoding identity
header_up X-Forwarded-Port 443
transport http {
keepalive 5s
keepalive_idle_conns 10
}
}
import common-settings
import log mastodon
import robots-txt
}

View file

@ -0,0 +1,15 @@
*.pds.synth.download, pds.synth.download {
root * /var/www/pds-dash
file_server
# since we utilize pds-dash to provide a proper "frontend" page for the pds, we need to manually redirect all known endpoints back to the pds
# probably a better way to do this; haven't discovered it yet
reverse_proxy /xrpc/* 127.0.0.1:24318
reverse_proxy /oauth/* 127.0.0.1:24318
reverse_proxy /.well-known/* 127.0.0.1:24318
reverse_proxy /@atproto/* 127.0.0.1:24318
import common-settings
import log pds
import robots-txt
}

View file

@ -0,0 +1,10 @@
reddit.synth.download {
reverse_proxy 127.0.0.1:51617 {
header_up X-Real-Ip {remote_host}
header_up X-Http-Version {http.request.proto}
}
import common-settings
import log redlib
import robots-txt
}

View file

@ -0,0 +1,7 @@
booping.synth.download {
reverse_proxy 127.0.0.1:60628
import common-settings
import log sharkey
import robots-txt
}

View file

@ -0,0 +1,41 @@
synth.download {
root * /var/www/website/_site
file_server
try_files {path} {path}.html {path}/ =404
# kill
redir /.git https://nbg1-speed.hetzner.com/10GB.bin
redir /.git/ /.git
# iceshrimp.net split domain
redir /.well-known/webfinger https://beeping.synth.download{uri} 301
redir /.well-known/host-meta https://beeping.synth.download{uri} 301
redir /.well-known/nodeinfo https://beeping.synth.download{uri} 301
# redirect pub to public directory
redir /pub /pub/
handle_path /pub/* {
root * /var/www/pub
file_server browse
}
# xmpp xep-0156
handle /.well-known/host-meta {
@options method OPTIONS
handle @options {
header Access-Control-Allow-Origin "*"
header Content-Type "application/xrd+xml"
}
}
handle /.well-known/host-meta.json {
@options method OPTIONS
handle @options {
header Access-Control-Allow-Origin "*"
header Content-Type "application/xrd+xml"
}
}
import common-settings
import log website
import robots-txt
}

View file

@ -0,0 +1,16 @@
twitch.synth.download {
reverse_proxy 127.0.0.1:24682
import common-settings
import log twitch
import robots-txt
}
# required backend
b.twitch.synth.download {
reverse_proxy 127.0.0.1:43072
import common-settings
import log twitch-backend
import robots-txt
}

View file

@ -0,0 +1,9 @@
vault.synth.download {
reverse_proxy 127.0.0.1:60838 {
header_up X-Real-IP {remote_host}
}
import common-settings
import log vaultwarden
import robots-txt
}

View file

@ -0,0 +1,7 @@
auth.synth.download {
reverse_proxy 127.0.0.1:19241
import common-settings
import log zitadel
import robots-txt
}

View file

@ -1 +0,0 @@
# blank

View file

@ -563,6 +563,9 @@ function docker_cleanup {
docker volume prune -af
docker container prune -f
docker network prune -f
# ensure our ipv6 network still exists
echo "${blue}docker-cleanup:${normal} Ensuring IPv6 network still exists..."
docker network create --ipv6 --subnet fd00:cafe:face:feed::/64 ip6net
echo "${green}Done. Beep!${normal}"
}

View file

@ -39,6 +39,8 @@ and as for other things:
- personal notes/knowledge base while setting things up, may contain useful information for others.
- [`helperbot`](./helperbot)
- our all-in-one helper script/utility for managing all of our vms. including vacuuming, cleanup, upgrading and backups. owo
- [`.old`](./.old)
- configurations related to our older setup before migrating to our new servers/new configurations. kept for archival purposes, and may be useful to some.
feel free to look at them! we share our configs for everyone to use.