WIP: caddy: very early role
parent
66e6c960d3
commit
d20c89defe
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env ansible-playbook
|
||||
---
|
||||
- hosts:
|
||||
- proxy.pub.infra.auro.re
|
||||
roles:
|
||||
- caddy
|
||||
...
|
@ -0,0 +1,174 @@
|
||||
import itertools
|
||||
import json
|
||||
from collections.abc import Iterable
|
||||
from typing import Any, Generic, Iterator, Literal, TypeVar
|
||||
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
Field,
|
||||
IPvAnyAddress,
|
||||
ValidationError,
|
||||
parse_obj_as,
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def flatten(iterable: Iterable[Iterable[T]]) -> Iterable[T]:
|
||||
return itertools.chain.from_iterable(iterable)
|
||||
|
||||
|
||||
class AutoList(list[T], Generic[T]):
|
||||
@classmethod
|
||||
def __get_validators__(cls):
|
||||
yield cls.__validator__
|
||||
|
||||
@classmethod
|
||||
def __validator__(cls, value):
|
||||
try:
|
||||
return parse_obj_as(list[T], value)
|
||||
except ValidationError:
|
||||
return [parse_obj_as(T, value)]
|
||||
|
||||
|
||||
class BaseHandler(BaseModel):
|
||||
headers: dict[str, str] = {}
|
||||
path: str | None = None
|
||||
|
||||
def to_caddy(self):
|
||||
raise StopIteration
|
||||
|
||||
|
||||
class FilesHandler(BaseHandler):
|
||||
root: str
|
||||
|
||||
def to_caddy(self):
|
||||
yield {"handle": [{"handler": "vars", "root": self.root}]}
|
||||
|
||||
|
||||
class StaticHandler(BaseHandler):
|
||||
status: int | None = None
|
||||
body: str | None = None
|
||||
|
||||
def to_caddy(self):
|
||||
response = {"handler": "static_response"}
|
||||
if self.status is not None:
|
||||
response["status_code"] = self.status
|
||||
if self.body is not None:
|
||||
response["body"] = self.body
|
||||
yield {"handle": [response]}
|
||||
|
||||
|
||||
class ReverseHandler(BaseHandler):
|
||||
reverse: AutoList[str]
|
||||
|
||||
def to_caddy(self):
|
||||
yield {
|
||||
"handle": [
|
||||
{
|
||||
"handler": "reverse_proxy",
|
||||
"upstreams": [{"dial": s} for s in self.reverse],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Handler = FilesHandler | ReverseHandler | StaticHandler
|
||||
Routes = dict[str, AutoList[Handler]]
|
||||
|
||||
|
||||
class Server(BaseModel):
|
||||
listen: AutoList[str]
|
||||
routes: Routes = {}
|
||||
|
||||
|
||||
Config = dict[str, Server]
|
||||
|
||||
|
||||
class Context:
|
||||
def __init__(self):
|
||||
self._group = 0
|
||||
|
||||
def next_group(self):
|
||||
self._group += 1
|
||||
return self._group
|
||||
|
||||
|
||||
def strip_path_prefix(prefix: str) -> Any:
|
||||
return {
|
||||
"strip_path_prefix": prefix,
|
||||
"handler": "rewrite",
|
||||
}
|
||||
|
||||
|
||||
def handler_to_caddy(handler: Handler, ctx: Context) -> Any:
|
||||
def to_caddy_inner():
|
||||
if handler.headers:
|
||||
yield {
|
||||
"handle": [
|
||||
{
|
||||
"handler": "headers",
|
||||
"response": {"set": {name: [value]}},
|
||||
}
|
||||
for name, value in handler.headers.items()
|
||||
]
|
||||
}
|
||||
yield from handler.to_caddy()
|
||||
|
||||
if handler.path is None:
|
||||
yield from to_caddy_inner()
|
||||
else:
|
||||
yield {
|
||||
"group": f"group{ctx.next_group()}",
|
||||
"match": [{"path": [handler.path]}],
|
||||
"handle": [
|
||||
{
|
||||
"handler": "subroute",
|
||||
"routes": [
|
||||
{"handle": [strip_path_prefix(handler.path)]},
|
||||
*to_caddy_inner(),
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def route_to_caddy(host: str, handlers: list[Handler], ctx: Context) -> Any:
|
||||
return {
|
||||
"match": [{"host": [host]}],
|
||||
"handle": [
|
||||
{
|
||||
"handler": "subroute",
|
||||
"routes": list(
|
||||
flatten(handler_to_caddy(h, ctx) for h in handlers)
|
||||
),
|
||||
}
|
||||
],
|
||||
"terminal": True,
|
||||
}
|
||||
|
||||
|
||||
def server_to_caddy(server: Server, ctx: Context) -> Any:
|
||||
return {
|
||||
"listen": server.listen,
|
||||
"errors": {}, # TODO
|
||||
"logs": {}, # TODO
|
||||
"routes": [
|
||||
route_to_caddy(host, handlers, ctx)
|
||||
for host, handlers in server.routes.items()
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def to_caddy(config: Any) -> Any:
|
||||
ctx = Context()
|
||||
parsed = parse_obj_as(Config, config)
|
||||
servers = {
|
||||
name: server_to_caddy(server, ctx) for name, server in parsed.items()
|
||||
}
|
||||
return {"apps": {"http": {"servers": servers}}}
|
||||
|
||||
|
||||
class FilterModule:
|
||||
def filters(self):
|
||||
return {"caddy__of_servers": to_caddy}
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
- name: Reload caddy
|
||||
systemd:
|
||||
name: caddy.service
|
||||
daemon_reload: true
|
||||
state: restarted
|
||||
...
|
@ -0,0 +1,40 @@
|
||||
---
|
||||
- name: Install caddy
|
||||
apt:
|
||||
name: caddy
|
||||
|
||||
- name: Create override directory
|
||||
file:
|
||||
path: /etc/systemd/system/caddy.service.d
|
||||
state: directory
|
||||
owner: root
|
||||
group: root
|
||||
mode: u=rwx,g=rx,o=rx
|
||||
|
||||
- name: Override caddy.service
|
||||
template:
|
||||
src: override.conf.j2
|
||||
dest: /etc/systemd/system/caddy.service.d/override.conf
|
||||
owner: root
|
||||
group: root
|
||||
mode: u=rw,g=r,o=r
|
||||
notify:
|
||||
- Reload caddy
|
||||
|
||||
- name: Configure caddy
|
||||
copy:
|
||||
content: "{{ caddy__servers | caddy__of_servers | to_json }}"
|
||||
dest: /etc/caddy/caddy.json
|
||||
owner: root
|
||||
group: root
|
||||
mode: u=rw,g=r,o=r
|
||||
notify:
|
||||
- Reload caddy
|
||||
|
||||
- name: Enable caddy
|
||||
systemd:
|
||||
daemon_reload: true
|
||||
name: caddy.service
|
||||
enabled: true
|
||||
state: started
|
||||
...
|
@ -0,0 +1 @@
|
||||
{{ caddy__servers | caddy__of_servers | to_nice_json }}
|
@ -0,0 +1,7 @@
|
||||
{{ ansible_managed | comment }}
|
||||
|
||||
[Service]
|
||||
ExecStart=
|
||||
ExecStart=/usr/bin/caddy run --environ --config /etc/caddy/caddy.json
|
||||
ExecReload=
|
||||
ExecReload=/usr/bin/caddy reload --config /etc/caddy/caddy.json --force
|
Loading…
Reference in New Issue