feat: Added config validation

This commit is contained in:
2026-02-20 05:33:50 +01:00
parent 7b29763230
commit 08c1d0c605
15 changed files with 3077 additions and 36 deletions

View File

@@ -5,9 +5,9 @@
import base64
import functools
import json
import pathlib
import sys
from typing import Annotated, Any, List, Literal
import git
import requests
@@ -15,6 +15,17 @@ import yaml
from jinja2 import Environment, FileSystemLoader, StrictUndefined, Template
from mergedeep import Strategy, merge
from netaddr import IPAddress
from pydantic import (
BaseModel,
BeforeValidator,
ConfigDict,
HttpUrl,
IPvAnyAddress,
ValidationInfo,
)
from pydantic_extra_types.semantic_version import SemanticVersion
from models import Model as TalosModel
REPO = git.Repo(sys.path[0], search_parent_directories=True)
assert REPO.working_dir is not None
@@ -39,6 +50,38 @@ TEMPLATES = Environment(
)
class ServerConfig(BaseModel):
model_config = ConfigDict(strict=True, extra="forbid")
tftpIp: IPvAnyAddress
httpUrl: HttpUrl
class TailscaleConfig(BaseModel):
model_config = ConfigDict(strict=True, extra="forbid")
loginServer: HttpUrl
authKey: str
class Config(BaseModel):
model_config = ConfigDict(strict=True, extra="forbid")
server: ServerConfig
tailscale: TailscaleConfig
class Cluster(BaseModel):
model_config = ConfigDict(strict=True, extra="forbid")
name: str
production: bool
controlPlaneIp: IPvAnyAddress
# TODO: Path
secretsFile: str
sopsKeyFile: str
# When we try to make a deep copy of the nodes dict it fails as the Template
# does not implement __deepcopy__, so this wrapper type facilitates that
class TemplateWrapper:
@@ -51,31 +94,68 @@ class TemplateWrapper:
return self
def render_templates(node: dict, args: dict):
class Inner(json.JSONEncoder):
def default(self, o):
if isinstance(o, TemplateWrapper):
try:
rendered = o.template.render(args | {"node": node})
except Exception as e:
e.add_note(f"While rendering for: {node['hostname']}")
raise e
# Parse the rendered yaml
return yaml.safe_load(rendered)
return super().default(o)
return Inner
def render_patch(wrapper: Any, info: ValidationInfo):
if not isinstance(wrapper, TemplateWrapper):
raise RuntimeError("Expected TemplateWrapper")
args = (info.context or {}) | {"node": info.data}
try:
rendered = wrapper.template.render(args)
except Exception as e:
e.add_note(f"While rendering for: {args['node']['hostname']}")
raise e
# Parse the rendered yaml
return yaml.safe_load(rendered)
def tailscale_subnet(gateway: str, netmask: str):
netmask_bits = IPAddress(netmask).netmask_bits()
return f"{IPAddress(gateway) & IPAddress(netmask)}/{netmask_bits}"
class Node(BaseModel):
model_config = ConfigDict(strict=True, extra="forbid")
schematicId: str
arch: Literal["amd64"]
talosVersion: SemanticVersion
kubernetesVersion: SemanticVersion
kernelArgs: List[str]
extraKernelArgs: List[str]
dns: List[IPvAnyAddress]
# TODO: Validation
ntp: str
install: bool
advertiseRoutes: bool
serial: str
interface: str
ip: IPvAnyAddress
netmask: IPvAnyAddress
gateway: IPvAnyAddress
# TODO: Extra validation
installDisk: str
autoInstall: bool
cluster: Cluster
hostname: str
filename: str
type: Literal["controlplane", "worker"]
patches: List[Annotated[TalosModel, BeforeValidator(render_patch)]]
patchesControlPlane: List[Annotated[TalosModel, BeforeValidator(render_patch)]]
def tailscale_subnet(gateway: IPvAnyAddress, netmask: IPvAnyAddress):
netmask_bits = IPAddress(netmask.exploded).netmask_bits()
return f"{IPAddress(gateway.exploded) & IPAddress(netmask.exploded)}/{netmask_bits}"
def load_secret(path: str):
with open(path) as f:
return base64.b64encode(f.read().encode()).decode()
def model_dump_json(model: BaseModel):
return model.model_dump_json(exclude_none=True)
@functools.cache
def get_schematic_id(schematic: str):
"""Lookup the schematic id associated with a given schematic"""
@@ -166,13 +246,19 @@ def main():
with open(ROOT.joinpath("secrets.yaml")) as fyaml:
merge(config, yaml.safe_load(fyaml), strategy=Strategy.TYPESAFE_REPLACE)
config = Config(**config)
template_args = {
"config": config,
"root": ROOT,
"helper": {"tailscale_subnet": tailscale_subnet, "load_secret": load_secret},
"helper": {
"tailscale_subnet": tailscale_subnet,
"load_secret": load_secret,
"model_dump_json": model_dump_json,
},
}
nodes = []
nodes: List[Node] = []
for fullname in walk_files(NODES):
filename = str(fullname.relative_to(NODES).parent) + "/" + fullname.stem
@@ -186,24 +272,15 @@ def main():
)
yml_data["hostname"] = fullname.stem
yml_data["filename"] = filename
nodes.append(yml_data)
# Quick and dirty way to resolve all the templates using a custom encoder
nodes = list(
map(
lambda node: json.loads(
json.dumps(node, cls=render_templates(node, template_args))
),
nodes,
)
)
node = Node.model_validate(yml_data, context=template_args)
nodes.append(node)
# HACK: We can't hash a dict, so we first convert it to json, the use set
# to get all the unique entries, and then convert it back
# NOTE: This assumes that all nodes in the cluster use the same definition for the cluster
clusters = list(
json.loads(cluster)
for cluster in set(json.dumps(node["cluster"]) for node in nodes)
Cluster.model_validate_json(cluster)
for cluster in set(node.cluster.model_dump_json() for node in nodes)
)
template_args |= {"nodes": nodes, "clusters": clusters}

15
tools/update_models Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT=$(git rev-parse --show-toplevel)
MODELS_DIR=${ROOT}/src/models
rm -rf ${MODELS_DIR}
SCHEMA_FILE=$(mktemp schema.XXX.json)
function cleanup() {
rm -rf ${SCHEMA_FILE}
}
trap cleanup EXIT
curl https://raw.githubusercontent.com/siderolabs/talos/refs/heads/release-1.11/website/content/v1.11/schemas/config.schema.json > ${SCHEMA_FILE}
uvx --from datamodel-code-generator datamodel-codegen --input ${SCHEMA_FILE} --input-file-type jsonschema --output ${MODELS_DIR} --output-model pydantic_v2.BaseModel