diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
index 449e696..25ce8b7 100644
--- a/.idea/inspectionProfiles/profiles_settings.xml
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -1,5 +1,6 @@
+
diff --git a/.idea/workspace.xml b/.idea/workspace.xml
index f1711ae..bd2452f 100644
--- a/.idea/workspace.xml
+++ b/.idea/workspace.xml
@@ -1,20 +1,54 @@
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -22,6 +56,13 @@
+
+
+
@@ -32,28 +73,54 @@
-
+
-
+ {
+ "customColor": "",
+ "associatedIndex": 1
+}
+
+
+
- {
+ "keyToString": {
+ "ModuleVcsDetector.initialDetectionPerformed": "true",
+ "Python.__init__.executor": "Run",
+ "Python.__main__.executor": "Run",
+ "Python.compose.executor": "Run",
+ "Python.dest_paths.executor": "Run",
+ "Python.env.executor": "Run",
+ "Python.env_data.executor": "Run",
+ "Python.env_row.executor": "Run",
+ "Python.models.executor": "Run",
+ "Python.networks.executor": "Run",
+ "Python.org.executor": "Run",
+ "Python.service.executor": "Run",
+ "Python.service_objs.executor": "Run",
+ "Python.src_paths.executor": "Run",
+ "Python.test.executor": "Run",
+ "RunOnceActivity.ShowReadmeOnStart": "true",
+ "RunOnceActivity.TerminalTabsStorage.copyFrom.TerminalArrangementManager.252": "true",
+ "RunOnceActivity.git.unshallow": "true",
+ "git-widget-placeholder": "main",
+ "run.code.analysis.last.selected.profile": "aDefault",
+ "settings.editor.selected.configurable": "preferences.pluginManager"
}
-}]]>
+}
+
+
+
+
+
+
+
+
+
@@ -68,8 +135,27 @@
1768351925652
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1768356921807
+
+
+
+ 1768356921807
+
+
@@ -83,4 +169,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index d650769..53f0b44 100755
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,8 +8,10 @@ requires-python = ">=3.13"
dependencies = [
"basedpyright>=1.37.1",
"loguru>=0.7.3",
+ "pydantic>=2.12.5",
"pyyaml>=6.0.3",
- "ruff>=0.14.11",
+ "ruff==0.14.13",
+ "sqlalchemy>=2.0.45",
]
[project.scripts]
@@ -19,6 +21,6 @@ docker_compose = "docker_compose:main"
requires = ["uv_build>=0.9.17,<0.10.0"]
build-backend = "uv_build"
-[tools.pyright]
-analysis.diagnosticMode = 'workspace'
-
+[tool.basedpyright]
+reportExplicitAny = "none"
+reportImportCycles = "none"
diff --git a/src/docker_compose/__init__.py b/src/docker_compose/__init__.py
index a144832..5bfd594 100644
--- a/src/docker_compose/__init__.py
+++ b/src/docker_compose/__init__.py
@@ -1,25 +1,33 @@
-from inspect import isclass
-from itertools import chain
from pathlib import Path
-from loguru import logger
-
ROOT = Path("/nas")
TEMPLATE_ROOT = ROOT.joinpath("docker_templates")
APP_ROOT = ROOT.joinpath("apps")
TRAEFIK_PATH = TEMPLATE_ROOT.joinpath("traefik")
-_ = logger.add("logs/app.log", level="DEBUG", rotation="1 second", retention="10")
+# ENGINE = create_engine("sqlite://", echo=True)
+
+#
+# _ = logger.add("logs/app.log", level="DEBUG", rotation="1 second", retention=10)
+#
+#
+# def log_cls(obj: type | object, **kwargs: str | int | bool):
+# logger.debug(
+# "\n\t".join(
+# chain(
+# (
+# f"created\n\tcls: {obj.__name__ if isclass(obj) else type(obj).__name__}",
+# ),
+# (f"{k}: {v}" for k, v in kwargs.items()),
+# )
+# )
+# )
+#
+#
+# def init_db():
+# SQLModel.metadata.create_all(ENGINE)
+#
-def log_cls(obj: type | object, **kwargs: str | int | bool):
- logger.debug(
- "\n\t".join(
- chain(
- (
- f"created\n\tcls: {obj.__name__ if isclass(obj) else type(obj).__name__}",
- ),
- (f"{k}: {v}" for k, v in kwargs.items()),
- )
- )
- )
+def fmt_replace_str(src: str) -> str:
+ return f"${{_{src.upper()}}}"
diff --git a/src/docker_compose/__main__.py b/src/docker_compose/__main__.py
index 09fcc96..bcc8ab3 100644
--- a/src/docker_compose/__main__.py
+++ b/src/docker_compose/__main__.py
@@ -1,23 +1,10 @@
-from collections.abc import Iterator
-
-# from typing import cast
-#
-# from docker_compose import TRAEFIK_PATH
-# from docker_compose.compose_data.net_yaml import NetArgsYaml
-from docker_compose.render.main import RenderByApp # , RenderByOrg
-
-# from docker_compose.util.Ts import TypeYamlCompatibleDict
-# from docker_compose.util.yaml_util import to_yaml
-
-
-def render_all() -> Iterator[str]:
- apps = RenderByApp.load_all()
- apps()
- return apps.proxy_nets
+from docker_compose.application.write_files import write_template
if __name__ == "__main__":
- _ = render_all()
+ write_template('gitea')
+ # init_db()
+ # _ = render_all()
# renderers = RenderByOrg.from_path(TRAEFIK_PATH)
# traefik =renderers["util"]
# data = traefik.template.compose_data.as_dict
diff --git a/src/docker_compose/env/__init__.py b/src/docker_compose/application/__init__.py
similarity index 100%
rename from src/docker_compose/env/__init__.py
rename to src/docker_compose/application/__init__.py
diff --git a/src/docker_compose/application/write_files.py b/src/docker_compose/application/write_files.py
new file mode 100644
index 0000000..28a0991
--- /dev/null
+++ b/src/docker_compose/application/write_files.py
@@ -0,0 +1,6 @@
+from docker_compose.domain.paths.src import SrcPaths
+
+
+def write_template(app:str):
+ data = SrcPaths.from_name(app)
+ data.compose()
\ No newline at end of file
diff --git a/src/docker_compose/compose_data/__init__.py b/src/docker_compose/compose_data/__init__.py
deleted file mode 100644
index 4f382ee..0000000
--- a/src/docker_compose/compose_data/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from docker_compose.util.replace import Replace
-
-DN = Replace.build_placeholder("dn", "org", "name")
-FQDN = Replace.build_placeholder("fqdn", "org", "name", "service")
diff --git a/src/docker_compose/compose_data/compose_yaml.py b/src/docker_compose/compose_data/compose_yaml.py
deleted file mode 100644
index 9f0d035..0000000
--- a/src/docker_compose/compose_data/compose_yaml.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from typing import TypedDict
-
-from docker_compose.compose_data.net_yaml import NetYaml
-from docker_compose.compose_data.services_yaml import ServiceYamlWrite
-from docker_compose.compose_data.volume_yaml import VolYaml
-
-
-class ComposeYaml(TypedDict):
- name: str
- services: dict[str, ServiceYamlWrite]
- networks: NetYaml | None
- volumes: dict[str, VolYaml] | None
diff --git a/src/docker_compose/compose_data/data.py b/src/docker_compose/compose_data/data.py
deleted file mode 100644
index ef38cdc..0000000
--- a/src/docker_compose/compose_data/data.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from collections.abc import Iterable, Iterator
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Self, cast, final, override
-
-from docker_compose import log_cls
-from docker_compose.compose_data import DN
-from docker_compose.compose_data.compose_yaml import ComposeYaml
-from docker_compose.compose_data.net import Net
-from docker_compose.compose_data.service import Service
-from docker_compose.compose_data.src_paths import SrcPaths
-from docker_compose.compose_data.volume_yaml import VolYaml
-from docker_compose.util.replace import Replace
-from docker_compose.util.yaml_util import read_yaml, to_yaml
-
-
-@final
-@dataclass(slots=True)
-class ComposeData:
- name: str
- services: dict[str, Service]
- networks: Net
- volumes: dict[str, VolYaml]
-
- def __post_init__(self):
- log_cls(ComposeData, name=self.name)
-
- @override
- def __str__(self) -> str:
- rep = Replace.format_src("name", self.name)
- return rep(to_yaml(self.as_dict)) # pyright: ignore[reportArgumentType]
-
- @staticmethod
- def get_services(paths: Iterable[Path]) -> Iterator[tuple[str, Service]]:
- for path in paths:
- service = Service.from_path(path)
- yield service.service_name, service
-
- @staticmethod
- def get_volumes(paths: Iterable[Path]) -> Iterator[tuple[str, VolYaml]]:
- for path in paths:
- yield path.stem, cast(VolYaml, cast(object, read_yaml(path)))
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- log_cls(ComposeData, path=str(path))
- return cls.from_src_paths(SrcPaths.from_path(path))
-
- @classmethod
- def from_src_paths(cls, src_paths: SrcPaths) -> Self:
- services = dict(cls.get_services(src_paths.service_files))
- return cls(
- src_paths.app_name,
- services,
- Net.from_service_list(services.values()),
- dict(cls.get_volumes(src_paths.volume_files)),
- )
-
- @property
- def as_dict(self) -> ComposeYaml:
- return ComposeYaml(
- name=DN.dest,
- services={
- service.service_name: service.as_dict
- for service in self.services.values()
- },
- networks=self.networks.as_dict,
- volumes=self.volumes,
- )
diff --git a/src/docker_compose/compose_data/dest_paths.py b/src/docker_compose/compose_data/dest_paths.py
deleted file mode 100644
index 47224e4..0000000
--- a/src/docker_compose/compose_data/dest_paths.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Self, final
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class DestPaths:
- compose_file: Path
- bind_vol_path: Path
-
- # def __post_init__(self):
- # log_cls(
- # self,
- # compose_file=str(self.compose_file),
- # bind_vol_path=str(self.bind_vol_path),
- # )
-
- @classmethod
- def from_path(cls, src: Path) -> Self:
- return cls(
- src.joinpath("docker-compose.yml"),
- src.joinpath("bind_vols.yml"),
- )
diff --git a/src/docker_compose/compose_data/main.py b/src/docker_compose/compose_data/main.py
deleted file mode 100644
index fe9746a..0000000
--- a/src/docker_compose/compose_data/main.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Self, final, override
-
-from docker_compose.compose_data.data import ComposeData
-from docker_compose.compose_data.dest_paths import DestPaths
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class Template:
- compose_data: ComposeData
- dest_path: DestPaths
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- return cls(
- ComposeData.from_path(path),
- DestPaths.from_path(path),
- )
-
- def __call__(self) -> None:
- with self.dest_path.compose_file.open("wt") as f:
- _ = f.write(str(self.compose_data))
-
- @override
- def __str__(self) -> str:
- return str(self.compose_data)
diff --git a/src/docker_compose/compose_data/net.py b/src/docker_compose/compose_data/net.py
deleted file mode 100644
index 7a1c8e4..0000000
--- a/src/docker_compose/compose_data/net.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from collections.abc import Iterable, Iterator
-from dataclasses import dataclass
-from typing import Self, final
-
-from docker_compose.compose_data.net_yaml import NetArgsYaml, NetYaml
-from docker_compose.compose_data.service import Service
-from docker_compose.util.replace import Replace
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class NetArgs:
- name: str
- full_name: str
- external: bool
-
- @classmethod
- def factory(cls, name: str):
- f = Replace.build_placeholder("_", "org", "name").dest
- return cls(
- name,
- f"{f}_{name}",
- name == "proxy",
- )
-
- @property
- def as_dict(self) -> NetArgsYaml:
- yaml_dict = NetArgsYaml(
- name=self.full_name,
- )
- if self.external:
- yaml_dict["external"] = self.external
- return yaml_dict
-
- # @property
- # def as_key_dict(self) -> tuple[str, NetArgsYaml]:
- # return str(self), self.as_dict
-
-
-@final
-@dataclass
-class Net:
- data: frozenset[NetArgs]
-
- def __iter__(self) -> Iterator[NetArgs]:
- yield from self.data
-
- @classmethod
- def from_service_list(cls, args: Iterable[Service]) -> Self:
- return cls.from_list(
- frozenset(net for service in args for net in service.networks)
- )
-
- @classmethod
- def from_list(cls, args: frozenset[str]) -> Self:
- return cls(frozenset(NetArgs.factory(arg) for arg in args))
-
- @property
- def as_dict(self) -> NetYaml:
- return {net.name: net.as_dict for net in self.data}
-
- @property
- def proxys(self) -> Iterator[str]:
- for net in self.data:
- if not net.external:
- continue
- yield net.full_name[:-6]
diff --git a/src/docker_compose/compose_data/net_yaml.py b/src/docker_compose/compose_data/net_yaml.py
deleted file mode 100644
index fe18fc3..0000000
--- a/src/docker_compose/compose_data/net_yaml.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from typing import NotRequired, TypedDict
-
-
-class NetArgsYaml(TypedDict):
- name: str
- external: NotRequired[bool]
-
-
-type NetYaml = dict[str, NetArgsYaml]
diff --git a/src/docker_compose/compose_data/service.py b/src/docker_compose/compose_data/service.py
deleted file mode 100644
index 551cf90..0000000
--- a/src/docker_compose/compose_data/service.py
+++ /dev/null
@@ -1,122 +0,0 @@
-from collections.abc import Callable, Iterator
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Self, final, override
-
-import yaml
-
-from docker_compose.compose_data import DN, FQDN
-from docker_compose.compose_data.services_yaml import (
- HealthCheck,
- ServiceYamlRead,
- ServiceYamlWrite,
-)
-from docker_compose.util.replace import Replace
-from docker_compose.util.Ts import T_Primitive, TypeYamlDict
-from docker_compose.util.yaml_util import validate_typed_dict
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class Service:
- _traefik_labels = frozenset(
- (
- f"traefik.http.routers.{DN.src}.rule=Host(`{Replace.fmt('url')}`)",
- f"traefik.http.routers.{DN.src}.entrypoints=websecure",
- f"traefik.docker.network={DN.src}_proxy",
- f"traefik.http.routers.{DN.src}.tls.certresolver=le",
- )
- )
- _sec_opts = frozenset(("no-new-privileges:true",))
-
- service_name: str
- command: tuple[str, ...]
- entrypoint: tuple[str, ...]
- environment: dict[str, T_Primitive]
- image: str
- labels: frozenset[str]
- logging: dict[str, str]
- networks: frozenset[str]
- restart: str
- security_opt: frozenset[str]
- user: str | None
- volumes: frozenset[str]
- shm_size: str | None
- depends_on: frozenset[str] | dict[str, dict[str, str]]
- healthcheck: HealthCheck | None
- ports: frozenset[str]
-
- @override
- def __hash__(self) -> int:
- return hash(self.service_name)
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- with path.open("rt") as f:
- return cls.from_txt(path.stem, f.read())
-
- @classmethod
- def from_txt(cls, name: str, data_str: str) -> Self:
- for func in cls.get_pre_render_funcs(name):
- data_str = func(data_str)
- data_dict: TypeYamlDict = yaml.safe_load(data_str) # pyright: ignore[reportAny]
- # if not isinstance(data_dict, MutableMapping):
- # raise TypeError
- data = validate_typed_dict(ServiceYamlRead, data_dict)
- return cls.from_dict(name, data) # pyright: ignore[reportArgumentType]
-
- @classmethod
- def from_dict(cls, name: str, data: ServiceYamlRead) -> Self:
- # helper = ServiceYamlProps(data)
- labels = frozenset(data.get("labels", ()))
- # ports = (f'"{p}"' for p in data.get("ports", ()))
- deps = data.get("depends_on", ())
- return cls(
- # service_val,
- name,
- # Replace.format_src_dest("service", name),
- tuple(data.get("command", ())),
- tuple(data.get("entrypoint", ())),
- data.get("environment", {}),
- data["image"],
- cls._traefik_labels.union(labels)
- if "traefik.enable=true" in labels
- else labels,
- data.get("logging", {}),
- frozenset(data.get("networks", ())),
- "unless-stopped",
- cls._sec_opts.union(data.get("security_opt", [])),
- data.get("user"),
- frozenset(data.get("volumes", ())),
- data.get("shm_size"),
- deps if isinstance(deps, dict) else frozenset(deps),
- data.get("healthcheck"),
- frozenset(data.get("ports", ())),
- )
-
- @classmethod
- def get_pre_render_funcs(cls, name: str) -> Iterator[Callable[[str], str]]:
- yield DN
- yield FQDN
- yield Replace.format_src_dest("service", name)
-
- @property
- def as_dict(self) -> ServiceYamlWrite:
- return ServiceYamlWrite(
- command=self.command,
- entrypoint=self.entrypoint,
- environment=self.environment,
- image=self.image,
- labels=self.labels,
- logging=self.logging,
- networks=self.networks,
- security_opt=self.security_opt,
- user=self.user,
- volumes=self.volumes,
- container_name=f"{DN.dest}_{self.service_name}",
- restart=self.restart,
- shm_size=self.shm_size,
- depends_on=self.depends_on,
- healthcheck=self.healthcheck,
- ports=self.ports,
- )
diff --git a/src/docker_compose/compose_data/services_yaml.py b/src/docker_compose/compose_data/services_yaml.py
deleted file mode 100644
index 161be56..0000000
--- a/src/docker_compose/compose_data/services_yaml.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from typing import NotRequired, TypedDict
-
-from docker_compose.util.Ts import T_Primitive
-
-
-class HealthCheck(TypedDict):
- test: list[str] | str
- interval: NotRequired[str]
- timeout: NotRequired[str]
- retries: NotRequired[int]
- start_period: NotRequired[str]
-
-
-class ServiceYamlRead(TypedDict):
- command: NotRequired[list[str]]
- entrypoint: NotRequired[list[str]]
- environment: NotRequired[dict[str, T_Primitive]]
- image: str
- labels: NotRequired[list[str]]
- logging: NotRequired[dict[str, str]]
- networks: NotRequired[list[str]]
- security_opt: NotRequired[list[str]]
- user: NotRequired[str]
- volumes: NotRequired[list[str]]
- shm_size: NotRequired[str]
- depends_on: NotRequired[list[str]|dict[str,dict[str,str]]]
- healthcheck: NotRequired[HealthCheck]
- ports: NotRequired[list[str]]
-
-
-class ServiceYamlWrite(TypedDict):
- command: tuple[str, ...]
- entrypoint: tuple[str, ...]
- environment: dict[str, T_Primitive]
- image: str
- labels: frozenset[str]
- logging: dict[str, str]
- networks: frozenset[str]
- security_opt: frozenset[str]
- user: str | None
- volumes: frozenset[str]
- container_name: str
- restart: str
- shm_size: str | None
- depends_on: frozenset[str]|dict[str,dict[str,str]]
- healthcheck: HealthCheck | None
- ports: frozenset[str]
diff --git a/src/docker_compose/compose_data/src_paths.py b/src/docker_compose/compose_data/src_paths.py
deleted file mode 100644
index 8fdadf2..0000000
--- a/src/docker_compose/compose_data/src_paths.py
+++ /dev/null
@@ -1,90 +0,0 @@
-from collections.abc import Iterator
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Self, final
-
-# class ComposeFileTemplate(Path):
-# def write_dict(self, data: TypeYamlCompatibleDict) -> None:
-# write_yaml(data, self)
-#
-# def write(self, data: str) -> None:
-# with self.open("wt") as f:
-# _ = f.write(data)
-#
-#
-# class OrgFile(Path):
-# @property
-# def as_dict(self) -> OrgYaml:
-# return cast(OrgYaml, cast(object, read_yaml(self)))
-
-
-# class YamlDir(Path):
-# @property
-# def yaml_files(self) -> Iterator[Path]:
-# if not self:
-# raise FileNotFoundError(self)
-# for service in self.iterdir():
-# if service.suffix not in YAML_EXTS:
-# continue
-# yield service
-#
-# def __bool__(self) -> bool:
-# return self.exists()
-
-#
-# class CfgDir(YamlDir):
-# @property
-# def cfg_file(self) -> OrgFile:
-# for file in self.yaml_files:
-# if file.stem != "cfg":
-# continue
-# return OrgFile(file)
-# raise FileNotFoundError(self.joinpath("cfg.y(a)ml"))
-#
-#
-# class ServiceDir(YamlDir):
-# @property
-# def files(self) -> Iterator[ServicePath]:
-# for file in self.yaml_files:
-# yield ServicePath(file)
-#
-#
-# class VolumesDir(YamlDir):
-# @property
-# def files(self) -> Iterator[VolumePath]:
-# try:
-# for file in self.yaml_files:
-# yield VolumePath(file)
-# except FileNotFoundError:
-# return
-
-
-# class VolumeData(Path):
-# def write(self, data: TypeYamlCompatibleRes) -> None:
-# write_yaml(data, self)
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class SrcPaths:
- YAML_EXTS = frozenset((".yml", ".yaml"))
-
- app_name: str
- service_files: frozenset[Path]
- volume_files: frozenset[Path]
-
- @classmethod
- def from_path(cls, src: Path) -> Self:
- return cls(
- src.stem,
- frozenset(cls.get_yaml_files(src.joinpath("services"))),
- frozenset(cls.get_yaml_files(src.joinpath("volumes"))),
- )
-
- @classmethod
- def get_yaml_files(cls, path: Path) -> Iterator[Path]:
- for service in path.iterdir():
- if service.suffix not in cls.YAML_EXTS:
- continue
- yield service
-
diff --git a/src/docker_compose/compose_data/volume_yaml.py b/src/docker_compose/compose_data/volume_yaml.py
deleted file mode 100644
index 4872dbb..0000000
--- a/src/docker_compose/compose_data/volume_yaml.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from docker_compose.util.Ts import TypeYamlDict
-
-type VolYaml = dict[str, TypeYamlDict]
diff --git a/src/docker_compose/org/__init__.py b/src/docker_compose/domain/__init__.py
similarity index 100%
rename from src/docker_compose/org/__init__.py
rename to src/docker_compose/domain/__init__.py
diff --git a/src/docker_compose/render/__init__.py b/src/docker_compose/domain/compose/__init__.py
similarity index 100%
rename from src/docker_compose/render/__init__.py
rename to src/docker_compose/domain/compose/__init__.py
diff --git a/src/docker_compose/domain/compose/compose.py b/src/docker_compose/domain/compose/compose.py
new file mode 100644
index 0000000..1f19421
--- /dev/null
+++ b/src/docker_compose/domain/compose/compose.py
@@ -0,0 +1,75 @@
+from __future__ import annotations
+
+from collections import ChainMap
+from collections.abc import Generator, MutableMapping
+from typing import TYPE_CHECKING, Any, final, override
+
+import yaml
+from pydantic import Field, RootModel, computed_field, model_serializer
+from pydantic.dataclasses import dataclass
+
+from docker_compose.domain.compose.service.service import Service
+from docker_compose.domain.compose.volume_files import VolumeFile
+
+if TYPE_CHECKING:
+ from docker_compose.domain.paths.src import SrcPaths
+
+
+@final
+@dataclass(slots=True)
+class Compose:
+ src_paths: SrcPaths
+ name: str = Field(init=False)
+ services: tuple[Service, ...] = Field(init=False)
+ volumes: tuple[VolumeFile, ...] = Field(init=False)
+
+ def __post_init__(self):
+ self.name = self.src_paths.path.stem
+ self.services = tuple(self.service_files)
+ self.volumes = tuple(self.volume_files)
+
+ @property
+ def service_files(self):
+ for path in self.src_paths.service_files:
+ yield Service.from_path(self, path)
+
+ @property
+ def volume_files(self):
+ for path in self.src_paths.volume_files:
+ yield VolumeFile.from_path(path)
+
+ @property
+ def networks_sub(self) -> Generator[dict[str, Any]]:
+ for service in self.services:
+ for network in service.networks:
+ yield network.as_dict(False)
+
+ @computed_field
+ @property
+ def networks(self) -> MutableMapping[str, Any]:
+ return ChainMap(*self.networks_sub)
+
+ # @classmethod
+ # def from_path(cls, path: Path) -> Self:
+ # src_paths = SrcPaths(path)
+ # return cls(
+ # path.stem,
+ # tuple(map(Service.from_path, src_paths.service_files)),
+ # tuple(map(VolumeFile.from_path, src_paths.volume_files)),
+ # )
+
+ @property
+ def as_dict(self) -> dict[Any, Any]:
+ return RootModel[Compose](self).model_dump(exclude_none=True) # pyright: ignore[reportAny]
+
+ @model_serializer(mode="plain")
+ def dump(self) -> dict[str, Any]:
+ return {self.name: ChainMap(*(s.as_dict for s in self.services))}
+
+ @override
+ def __str__(self) -> str:
+ return yaml.dump(self.as_dict)
+
+ def __call__(self):
+ with self.src_paths.compose_file.open("wt") as f:
+ _ = f.write(str(self))
diff --git a/src/docker_compose/domain/compose/service/__init__.py b/src/docker_compose/domain/compose/service/__init__.py
new file mode 100644
index 0000000..c9b2f9b
--- /dev/null
+++ b/src/docker_compose/domain/compose/service/__init__.py
@@ -0,0 +1,15 @@
+from docker_compose import APP_ROOT, fmt_replace_str
+from docker_compose.util import ReplaceStr
+
+DN = ReplaceStr(
+ fmt_replace_str("dn"),
+ "_".join(fmt_replace_str(s) for s in ("org", "name")),
+)
+FQDN = ReplaceStr(
+ fmt_replace_str("fqdn"),
+ "_".join(fmt_replace_str(s) for s in ("org", "name", "services")),
+)
+DATA_DN = ReplaceStr(
+ fmt_replace_str("data"),
+ str(APP_ROOT.joinpath(*(fmt_replace_str(s) for s in ("org", "name")))),
+)
diff --git a/src/docker_compose/domain/compose/service/env.py b/src/docker_compose/domain/compose/service/env.py
new file mode 100644
index 0000000..85c1282
--- /dev/null
+++ b/src/docker_compose/domain/compose/service/env.py
@@ -0,0 +1,42 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, cast, final
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ SerializerFunctionWrapHandler,
+ field_serializer,
+ model_serializer,
+)
+from pydantic.dataclasses import dataclass
+
+if TYPE_CHECKING:
+ from docker_compose.domain.compose.service.service import Service
+
+
+@final
+@dataclass(slots=True, config=ConfigDict(str_strip_whitespace=True))
+class Environment:
+ sep = "="
+ key: str
+ val: str
+ service: Service = Field(init=False)
+
+ @field_serializer("val", mode="plain")
+ def get_val(self, v: str) -> str:
+ return self.service(v)
+
+ # @field_validator("key", "val", mode="after")
+ # @classmethod
+ # def val_dump(cls, val: str) -> str:
+ # return val.strip()
+
+ @model_serializer(mode="wrap")
+ def model_serial(
+ self,
+ handler: SerializerFunctionWrapHandler,
+ # info: SerializationInfo,
+ ) -> str:
+ data = cast(dict[str, Any], handler(self))
+ return f"{data['key']}{self.sep}{data['val']}"
diff --git a/src/docker_compose/domain/compose/service/health_check.py b/src/docker_compose/domain/compose/service/health_check.py
new file mode 100644
index 0000000..14f0589
--- /dev/null
+++ b/src/docker_compose/domain/compose/service/health_check.py
@@ -0,0 +1,26 @@
+from typing import final
+
+from pydantic import ConfigDict, field_validator
+from pydantic.dataclasses import dataclass
+
+
+@final
+@dataclass(slots=True, config=ConfigDict(str_strip_whitespace=True))
+class HealthCheck:
+ test: tuple[str, ...]
+ interval: str | None
+ timeout: str | None
+ retries: int | None
+ start_period: str | None
+
+ @field_validator("test", mode="after")
+ @classmethod
+ def test_validator(cls, v: tuple[str, ...]) -> tuple[str, ...]:
+ return tuple(s.strip() for s in v)
+
+ # @field_validator("interval", "timeout", "start_period", mode="after")
+ # @classmethod
+ # def string_validator(cls, v: str | None) -> str | None:
+ # if not v:
+ # return
+ # return v.strip()
diff --git a/src/docker_compose/domain/compose/service/networks.py b/src/docker_compose/domain/compose/service/networks.py
new file mode 100644
index 0000000..16f556a
--- /dev/null
+++ b/src/docker_compose/domain/compose/service/networks.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, cast, final
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ RootModel,
+ SerializationInfo,
+ SerializerFunctionWrapHandler,
+ model_serializer,
+)
+from pydantic.dataclasses import dataclass
+
+from docker_compose.domain.compose.service import DN
+
+if TYPE_CHECKING:
+ from docker_compose.domain.compose.service.service import Service
+
+
+@final
+@dataclass(slots=True, config=ConfigDict(str_strip_whitespace=True))
+class Network:
+ val: str
+ service: Service = Field(init=False)
+
+ @property
+ def name(self):
+ return f"{DN.repl}_{self.val}"
+
+ @property
+ def external(self):
+ return "proxy" in self.val
+
+ @model_serializer(mode="wrap")
+ def serialize_model(
+ self,
+ handler: SerializerFunctionWrapHandler,
+ info: SerializationInfo,
+ ) -> str | dict[str, Any]:
+ context = cast(dict[str, Any] | None, info.context)
+ data = cast(dict[str, Any], handler(self))
+ if context:
+ context = cast(bool, context.get("full"))
+ if context is None:
+ return cast(str, data["val"])
+ if not data["external"] or context:
+ data.pop("external", None)
+ return {data.pop("val"): data}
+
+ def as_dict(self, context: bool = False) -> dict[str, Any]:
+ return RootModel[Network](self).model_dump(context={"full": context}) # pyright: ignore[reportAny]
diff --git a/src/docker_compose/domain/compose/service/port.py b/src/docker_compose/domain/compose/service/port.py
new file mode 100644
index 0000000..394b7a3
--- /dev/null
+++ b/src/docker_compose/domain/compose/service/port.py
@@ -0,0 +1,21 @@
+from typing import Self, final
+
+from pydantic import model_serializer
+from pydantic.dataclasses import dataclass
+
+
+@final
+@dataclass(slots=True)
+class Port:
+ sep = ":"
+
+ src: int
+ dest: int
+
+ @classmethod
+ def from_string(cls, string: str) -> Self:
+ return cls(*(int(s) for s in string.split(cls.sep)))
+
+ @model_serializer(mode="plain")
+ def serialize_model(self) -> str:
+ return f"{self.src}{self.sep}{self.dest}"
diff --git a/src/docker_compose/domain/compose/service/service.py b/src/docker_compose/domain/compose/service/service.py
new file mode 100644
index 0000000..d535797
--- /dev/null
+++ b/src/docker_compose/domain/compose/service/service.py
@@ -0,0 +1,147 @@
+from __future__ import annotations
+
+from collections.abc import Generator, Iterable
+from dataclasses import InitVar
+from functools import reduce
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Self, cast, final
+
+import yaml
+from pydantic import (
+ Field,
+ RootModel,
+ SerializerFunctionWrapHandler,
+ computed_field,
+ field_validator,
+ model_serializer,
+)
+from pydantic.dataclasses import dataclass
+
+from docker_compose import fmt_replace_str
+from docker_compose.domain.compose.service import DN, FQDN
+from docker_compose.domain.compose.service.env import Environment
+from docker_compose.domain.compose.service.health_check import HealthCheck
+from docker_compose.domain.compose.service.networks import Network
+from docker_compose.domain.compose.service.port import Port
+from docker_compose.domain.compose.service.volumes import Volumes
+from docker_compose.util import ReplaceStr
+
+if TYPE_CHECKING:
+ from docker_compose.domain.compose.compose import Compose
+
+
+@final
+@dataclass(slots=True)
+class Service:
+ _traefik_labels = (
+ "traefik.enable=true",
+ f"traefik.http.routers.{DN.repl}.rule=Host(`{fmt_replace_str('url')}`)",
+ f"traefik.http.routers.{DN.repl}.entrypoints=websecure",
+ f"traefik.docker.network={DN.repl}_proxy",
+ f"traefik.http.routers.{DN.repl}.tls.certresolver=le",
+ )
+ _sec_opts = ("no-new-privileges:true",)
+
+ compose: Compose
+ path: InitVar[Path]
+
+ image: str
+ service_name: str = Field(init=False, exclude=True)
+ user: str | None = Field(default=None)
+ shm_size: str | None = Field(default=None)
+ restart: str = Field(default="unless-stopped")
+ depends_on: dict[str, dict[str, str]] = Field(
+ default_factory=dict,
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+
+ command: tuple[str, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ entrypoint: tuple[str, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ environment: tuple[Environment, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ labels_raw: tuple[str, ...] = Field(
+ default=(),
+ exclude=True,
+ )
+ logging: tuple[str, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ networks: tuple[Network, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ security_opt: tuple[str, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ volumes: tuple[Volumes, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ ports: tuple[Port, ...] = Field(
+ default=(),
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+ healthcheck: HealthCheck | None = Field(
+ default=None,
+ exclude_if=lambda v: not v, # pyright: ignore[reportAny]
+ )
+
+ def __post_init__(self, path: Path) -> None:
+ self.service_name = path.stem
+
+ @computed_field
+ @property
+ def container_name(self):
+ return f"{DN.repl}_{self.service_name}"
+
+ def __iter__(self) -> Generator[ReplaceStr]:
+ yield FQDN
+ yield DN
+ yield ReplaceStr("application", self.service_name)
+
+ def __call__(self, data: str) -> str:
+ return reduce(lambda s, f: f(s), self, data)
+
+ @computed_field
+ @property
+ def labels(self):
+ if "traefik.enable=true" not in self.labels_raw:
+ return self.labels_raw
+ return self.labels_raw + self._traefik_labels
+
+ @field_validator(
+ "command",
+ "entrypoint",
+ "labels_raw",
+ "logging",
+ "security_opt",
+ mode="after",
+ )
+ @classmethod
+ def string_lists(cls, data: Iterable[str]) -> tuple[str, ...]:
+ return tuple(s.strip() for s in data)
+
+ @classmethod
+ def from_path(cls, compose: Compose, path: Path) -> Self:
+ with path.open("rt") as f:
+ data = cast(dict[str, Any], yaml.safe_load(f))
+ return cls(compose, path, **data) # pyright: ignore[reportAny]
+
+ @model_serializer(mode="wrap")
+ def dump(self, handler: SerializerFunctionWrapHandler) -> dict[str, Any]:
+ data = cast(dict[str, Any], handler(self))
+ return {data.pop("container_name"): data}
+
+ @property
+ def as_dict(self) -> dict[str, Any]:
+ return RootModel[Service](self).model_dump(exclude_none=True) # pyright: ignore[reportAny]
diff --git a/src/docker_compose/domain/compose/service/volumes.py b/src/docker_compose/domain/compose/service/volumes.py
new file mode 100644
index 0000000..b8c6a0e
--- /dev/null
+++ b/src/docker_compose/domain/compose/service/volumes.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Self, final
+
+from pydantic import ConfigDict, model_serializer
+from pydantic.dataclasses import dataclass
+
+if TYPE_CHECKING:
+ from docker_compose.domain.compose.service.service import Service
+
+
+@final
+@dataclass(slots=True, config=ConfigDict(str_strip_whitespace=True))
+class Volumes:
+ sep = ":"
+
+ service: Service
+
+ _src: str
+ dest: str
+
+ @classmethod
+ def from_str(cls, service: Service, src: str) -> Self:
+ return cls(service, *src.split(cls.sep, 2))
+
+ @property
+ def src(self) -> str:
+ return self.service(self._src)
+
+ @model_serializer(mode="plain")
+ def serialize_model(self) -> str:
+ return f"{self.src}{self.sep}{self.dest}"
diff --git a/src/docker_compose/domain/compose/volume_files.py b/src/docker_compose/domain/compose/volume_files.py
new file mode 100644
index 0000000..dfd7e29
--- /dev/null
+++ b/src/docker_compose/domain/compose/volume_files.py
@@ -0,0 +1,31 @@
+from dataclasses import InitVar
+from pathlib import Path
+from typing import Any, Self, cast, final
+
+import yaml
+from pydantic import ConfigDict, Field
+from pydantic.dataclasses import dataclass
+
+
+@final
+@dataclass(slots=True, config=ConfigDict(str_strip_whitespace=True))
+class VolumeFile:
+ path: InitVar[Path]
+ name: str = Field(init=False)
+
+ data: dict[str, Any]
+
+ def __post_init__(self, path: Path) -> None:
+ self.name = path.stem
+
+ @classmethod
+ def from_path(cls, path: Path) -> Self:
+ with path.open("rt") as f:
+ data = cast(dict[str, Any], yaml.safe_load(f))
+ return cls(path, data)
+
+ #
+ # @field_validator("name", mode="after")
+ # @classmethod
+ # def name_validate(cls, s: str) -> str:
+ # return s.strip()
diff --git a/src/docker_compose/domain/env/__init__.py b/src/docker_compose/domain/env/__init__.py
new file mode 100644
index 0000000..473a0f4
diff --git a/src/docker_compose/domain/env/env_data.py b/src/docker_compose/domain/env/env_data.py
new file mode 100644
index 0000000..add7dff
--- /dev/null
+++ b/src/docker_compose/domain/env/env_data.py
@@ -0,0 +1,35 @@
+from __future__ import annotations
+
+from collections.abc import Generator
+from typing import TYPE_CHECKING, Any, cast, final
+
+from pydantic import Field, model_serializer
+from pydantic.dataclasses import dataclass
+from pydantic_core.core_schema import SerializerFunctionWrapHandler
+
+from docker_compose.domain.env.env_row import EnvRow
+
+if TYPE_CHECKING:
+ from docker_compose.domain.paths.src import SrcPaths
+
+
+@final
+@dataclass(slots=True)
+class EnvData:
+ src_paths: SrcPaths
+ data: tuple[EnvRow, ...] = Field(init=False)
+
+ def __post_init__(self):
+ self.data = tuple(self.lines)
+
+ @property
+ def lines(self) -> Generator[EnvRow]:
+ with self.src_paths.env_file.open(mode="rt") as f:
+ for line in f:
+ if line.startswith("#"):
+ continue
+ yield EnvRow.from_str(self, line)
+
+ @model_serializer(mode="wrap")
+ def serialize_model(self, handler: SerializerFunctionWrapHandler) -> list[str]:
+ return cast(dict[str, Any], handler(self))["data"] # pyright: ignore[reportAny]
diff --git a/src/docker_compose/domain/env/env_row.py b/src/docker_compose/domain/env/env_row.py
new file mode 100644
index 0000000..9be41f6
--- /dev/null
+++ b/src/docker_compose/domain/env/env_row.py
@@ -0,0 +1,40 @@
+from __future__ import annotations
+
+import secrets
+from typing import Self, final
+
+from pydantic import ConfigDict, computed_field, field_validator, model_serializer
+from pydantic.dataclasses import dataclass
+
+from docker_compose.domain.env.env_data import EnvData
+
+# if TYPE_CHECKING:
+# from docker_compose.env.env_data import EnvData
+
+
+@final
+@dataclass(slots=True, frozen=True, config=ConfigDict(str_strip_whitespace=True))
+class EnvRow:
+ parent: EnvData
+ key: str
+ _val: str
+
+ @classmethod
+ def from_str(cls, parent: EnvData, raw: str) -> Self:
+ return cls(parent, *raw.split("="))
+
+ @field_validator("key", mode="after")
+ @classmethod
+ def strip_string(cls, s: str) -> str:
+ if s.startswith("#"):
+ raise ValueError
+ return s
+
+ @model_serializer(mode="plain")
+ def model_serializer(self) -> str:
+ return f"{self.key}={self.val}"
+
+ @computed_field
+ @property
+ def val(self) -> str:
+ return self._val.replace("{_PSWD}", secrets.token_urlsafe(12))
diff --git a/src/docker_compose/domain/paths/__init__.py b/src/docker_compose/domain/paths/__init__.py
new file mode 100644
index 0000000..473a0f4
diff --git a/src/docker_compose/domain/paths/dest.py b/src/docker_compose/domain/paths/dest.py
new file mode 100644
index 0000000..e9c4e2d
--- /dev/null
+++ b/src/docker_compose/domain/paths/dest.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING, final
+
+from pydantic.dataclasses import dataclass
+
+from docker_compose import APP_ROOT
+
+if TYPE_CHECKING:
+ from docker_compose.domain.paths.org import OrgData
+
+
+@final
+@dataclass
+class DestPath:
+ org_data: OrgData
+
+ # @classmethod
+ # def from_path(cls, path: Path) -> Iterator[Self]:
+ # return map(cls, OrgData.from_path(path))
+
+ @property
+ def base_path(self):
+ return APP_ROOT.joinpath(*self.org_data)
+
+ @property
+ def compose_path(self) -> Path:
+ return self.base_path.joinpath("docker-compose.yml")
+
+ @property
+ def env_path(self) -> Path:
+ return self.base_path.joinpath(".env")
diff --git a/src/docker_compose/domain/paths/org.py b/src/docker_compose/domain/paths/org.py
new file mode 100644
index 0000000..8a9c2dd
--- /dev/null
+++ b/src/docker_compose/domain/paths/org.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+from collections.abc import Generator, Iterator
+from enum import StrEnum
+from functools import reduce
+from typing import TYPE_CHECKING, Self, cast, final
+
+import yaml
+from pydantic import ConfigDict, Field, field_validator
+from pydantic.dataclasses import dataclass
+
+from docker_compose import fmt_replace_str
+from docker_compose.domain.paths.dest import DestPath
+from docker_compose.domain.render.render import Render
+from docker_compose.util import ReplaceStr
+
+if TYPE_CHECKING:
+ from docker_compose.domain.paths.src import SrcPaths
+
+
+class Orgs(StrEnum):
+ PERSONAL = "personal"
+ STRYTEN = "stryten"
+ C4 = "c4"
+
+
+@final
+@dataclass(
+ slots=True,
+ order=True,
+ config=ConfigDict(use_enum_values=True, str_strip_whitespace=True),
+)
+class OrgData:
+ src_paths: SrcPaths
+ app: str
+ org: Orgs
+ url: str | None
+ dest: DestPath = Field(init=False)
+ render: Render = Field(init=False)
+
+ def __post_init__(self):
+ self.dest = DestPath(self)
+ self.render = Render(self)
+
+ def __call__(self, string: str) -> str:
+ return reduce(lambda s, f: f(s), self.replace_funcs, string)
+
+ def __iter__(self):
+ yield self.app
+ yield self.org
+
+ @property
+ def _replace_args(self) -> Generator[tuple[str, str]]:
+ yield "app", self.app
+ yield "org", self.org
+ yield "url", ".".join((self.url, "ccamper7", "net")) if self.url else ""
+
+ @property
+ def replace_funcs(self) -> Generator[ReplaceStr]:
+ for s, r in self._replace_args:
+ yield ReplaceStr(fmt_replace_str(s), r)
+
+ @field_validator("app", "org", mode="after")
+ @classmethod
+ def strip(cls, v: str) -> str:
+ return v.strip()
+
+ @field_validator("url", mode="before")
+ @classmethod
+ def strip_url(cls, v: str) -> str:
+ if not v:
+ return v
+ return v.strip()
+
+ @classmethod
+ def from_src_path(cls, path: SrcPaths) -> Iterator[Self]:
+ # log_cls(cls, path=str(path))
+ with path.cfg_file.open("rt") as f:
+ data = cast(dict[str, dict[str, str]], yaml.safe_load(f))
+ app = path.cfg_file.stem
+ for org, _dict in data.items():
+ yield cls(app=app, org=org, **_dict) # pyright: ignore[reportArgumentType]
diff --git a/src/docker_compose/domain/paths/src.py b/src/docker_compose/domain/paths/src.py
new file mode 100644
index 0000000..9073539
--- /dev/null
+++ b/src/docker_compose/domain/paths/src.py
@@ -0,0 +1,61 @@
+from collections.abc import Iterator
+from pathlib import Path
+from typing import Self, final
+
+from pydantic import Field
+from pydantic.dataclasses import dataclass
+
+from docker_compose import TEMPLATE_ROOT
+from docker_compose.domain.compose.compose import Compose
+from docker_compose.domain.env.env_data import EnvData
+from docker_compose.domain.paths.org import OrgData, Orgs
+
+
+@final
+@dataclass(slots=True)
+class SrcPaths:
+ YAML_EXTS = frozenset((".yml", ".yaml"))
+
+ path: Path
+ compose: Compose = Field(init=False)
+ cfg: dict[Orgs, OrgData] = Field(init=False)
+ env: EnvData = Field(init=False)
+
+ def __post_init__(self):
+ self.compose = Compose(self)
+ self.cfg = {obj.org: obj for obj in OrgData.from_src_path(self)}
+ self.env = EnvData(self)
+
+ @property
+ def compose_file(self):
+ return self.path.joinpath("docker-compose.yml")
+
+ @property
+ def bind_vol_path(self):
+ return self.path.joinpath("bind_volumes.yml")
+
+ @property
+ def service_files(self) -> Iterator[Path]:
+ yield from self.get_yaml_files("services")
+
+ @property
+ def volume_files(self) -> Iterator[Path]:
+ yield from self.get_yaml_files("volumes")
+
+ @property
+ def cfg_file(self):
+ return self.path.joinpath("cfg.yml")
+
+ @property
+ def env_file(self) -> Path:
+ return self.path.joinpath(".env")
+
+ def get_yaml_files(self, folder: str) -> Iterator[Path]:
+ for service in self.path.joinpath(folder).iterdir():
+ if service.suffix not in self.YAML_EXTS:
+ continue
+ yield service
+
+ @classmethod
+ def from_name(cls, folder: str) -> Self:
+ return cls(TEMPLATE_ROOT.joinpath(folder))
diff --git a/src/docker_compose/domain/render/__init__.py b/src/docker_compose/domain/render/__init__.py
new file mode 100644
index 0000000..473a0f4
diff --git a/src/docker_compose/domain/render/bind_vols.py b/src/docker_compose/domain/render/bind_vols.py
new file mode 100644
index 0000000..555b2a5
--- /dev/null
+++ b/src/docker_compose/domain/render/bind_vols.py
@@ -0,0 +1,34 @@
+from __future__ import annotations
+
+from collections.abc import Iterator
+from pathlib import Path
+from typing import TYPE_CHECKING, final
+
+from pydantic.dataclasses import dataclass
+
+from docker_compose import ROOT
+
+if TYPE_CHECKING:
+ from docker_compose.domain.render.render import Render
+
+
+@final
+@dataclass(frozen=True, slots=True)
+class BindVols:
+ # data_rep = Replace("data", str(DATA_ROOT))
+ render: Render
+
+ def __call__(self):
+ # def mk_bind_vols(self) -> None:
+ for path in self:
+ path.mkdir(parents=True, exist_ok=True)
+
+ def __iter__(self) -> Iterator[Path]:
+ for app in self.render.template.services:
+ for vol in app.volumes:
+ path = Path(self.render.org_data(vol.src))
+ if ROOT not in path.parents:
+ continue
+ if not path.is_dir():
+ continue
+ yield path
diff --git a/src/docker_compose/domain/render/render.py b/src/docker_compose/domain/render/render.py
new file mode 100644
index 0000000..c8e3ff5
--- /dev/null
+++ b/src/docker_compose/domain/render/render.py
@@ -0,0 +1,147 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, final, override
+
+from pydantic import Field
+from pydantic.dataclasses import dataclass
+
+from docker_compose.domain.compose.compose import Compose
+from docker_compose.domain.render.bind_vols import BindVols
+
+if TYPE_CHECKING:
+ from docker_compose.domain.paths.org import OrgData
+
+
+@final
+@dataclass(slots=True)
+class Render:
+ org_data: OrgData
+ bind_vols: BindVols = Field(init=False)
+
+ def __post_init__(self):
+ self.bind_vols = BindVols(self)
+
+ # template: Compose = Field(init=False)
+ # org_data: dict[str, OrgData] = Field(init=False)
+
+ # def __post_init__(self, path: Path) -> None:
+ # self.src_paths = SrcPaths(path)
+ # self.template = Compose.from_path(self.src_paths.compose_file)
+ # self.org_data = {
+ # obj.org: obj for obj in OrgData.from_src_path(self.src_paths.cfg_file)
+ # }
+
+ @property
+ def template(self) -> Compose:
+ return self.org_data.src_paths.compose
+
+ @override
+ def __str__(self) -> str:
+ return self.org_data(str(self.template))
+
+ def __call__(self):
+ with self.org_data.dest.compose_path.open("wt") as f:
+ _ = f.write(str(self))
+
+ # @property
+ # def proxy_nets(self) -> Iterator[str]:
+ # for net in self.template.compose_data.networks:
+ # if not net.external:
+ # continue
+ # yield self.render(net.full_name)
+
+
+#
+# @final
+# @dataclass(frozen=True, slots=True)
+# class RenderByOrg:
+# template: Template
+# renders: dict[str, Render]
+#
+# def __iter__(self) -> Iterator[Render]:
+# yield from self.renders.values()
+# # yield render
+#
+# def __call__(self) -> None:
+# self.template()
+# for render in self:
+# render()
+# self.write_bind_vol_data()
+#
+# def write_bind_vol_data(self):
+# write_yaml(self.vols, self.template.dest_path.bind_vol_path)
+#
+# def __getitem__(self, key: str) -> Render:
+# return self.renders[key]
+#
+# def __bool__(self) -> bool:
+# return bool(self.renders)
+#
+# @staticmethod
+# def from_path_sub(template: Template, path: Path) -> Iterator[tuple[str, Render]]:
+# for org in OrgData.from_src_path(path):
+# yield org.org.dest, Render(template, org)
+#
+# @classmethod
+# def from_path(cls, path: Path) -> Self:
+# template = Template.from_src_path(path)
+# return cls(
+# template,
+# dict(cls.from_path_sub(template, path)),
+# )
+#
+# @property
+# def app(self):
+# return self.template.compose_data.name
+#
+# @property
+# def vols(self) -> Iterator[str]:
+# for render in self:
+# for path in render.bind_vols:
+# yield str(path)
+#
+# @property
+# def proxy_nets(self) -> Iterator[str]:
+# for render in self:
+# yield from render.proxy_nets
+#
+#
+# @final
+# @dataclass(frozen=True, slots=True)
+# class RenderByApp:
+# renders: dict[str, RenderByOrg]
+#
+# def __iter__(self) -> Iterator[RenderByOrg]:
+# yield from self.renders.values()
+#
+# def __call__(self) -> None:
+# for obj in self:
+# obj()
+#
+# @staticmethod
+# def _get_folders(path_: Path) -> Iterator[Path]:
+# for path in path_.iterdir():
+# if not path.is_dir():
+# continue
+# if path.stem == "traefik":
+# continue
+# yield path
+#
+# @classmethod
+# def _from_path_sub(cls, path_: Path) -> Iterator[tuple[str, RenderByOrg]]:
+# for path in cls._get_folders(path_):
+# by_org = RenderByOrg.from_path(path)
+# yield by_org.app, by_org
+#
+# @classmethod
+# def from_path(cls, path: Path) -> Self:
+# return cls(dict(cls._from_path_sub(path)))
+#
+# @classmethod
+# def load_all(cls) -> Self:
+# return cls.from_path(TEMPLATE_ROOT)
+#
+# @property
+# def proxy_nets(self) -> Iterator[str]:
+# for render in self:
+# yield from render.proxy_nets
diff --git a/src/docker_compose/env/data.py b/src/docker_compose/env/data.py
deleted file mode 100644
index 72111fd..0000000
--- a/src/docker_compose/env/data.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import re
-import secrets
-from collections.abc import Iterator
-from dataclasses import dataclass
-from functools import partial
-from pathlib import Path
-from typing import Self, final, override
-
-from docker_compose import log_cls
-from docker_compose.util.replace import Replace
-
-
-@final
-@dataclass
-class EnvData:
- line_valid = re.compile(r"^\s*(\w+)\s*=\s*(.+)\s*$")
- pswd = Replace.format_src("pswd", partial(secrets.token_urlsafe, 12))
- data: dict[str, str]
-
- @override
- def __str__(self) -> str:
- return "\n".join(sorted(map("=".join, self.with_pass)))
-
- @classmethod
- def get_lines(cls, path: Path) -> Iterator[tuple[str, str]]:
- with path.open(mode="rt") as f:
- for line in f:
- res = cls.line_valid.match(line)
- if not res:
- continue
- yield res.group(1), res.group(2)
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- log_cls(cls, path=str(path))
- return cls({k: v for k, v in cls.get_lines(path)})
-
- @property
- def with_pass(self) -> Iterator[tuple[str, str]]:
- p = self.pswd
- for k, v in self.data.items():
- if self.pswd.src not in v:
- yield k, v
- continue
- yield k, p(v)
diff --git a/src/docker_compose/env/main.py b/src/docker_compose/env/main.py
deleted file mode 100644
index fa055ab..0000000
--- a/src/docker_compose/env/main.py
+++ /dev/null
@@ -1,98 +0,0 @@
-from collections.abc import Iterator
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Self, final
-
-from docker_compose.env.data import EnvData
-from docker_compose.org.data import OrgData
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class Env:
- env: EnvData
- org_data: OrgData
-
- def __call__(self):
- with self.org_data.dest.open("wt") as f:
- _ = f.write(str(self.env))
-
- @classmethod
- def from_path(cls, path: Path, org: OrgData) -> Self:
- return cls(
- EnvData.from_path(path),
- org,
- )
-
- @property
- def org(self) -> str:
- return self.org_data.org.dest
-
- @property
- def app(self) -> str:
- return self.org_data.app.dest
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class EnvByOrg:
- data: dict[str, Env]
- app: str
-
- def __call__(self):
- for obj in self:
- obj()
-
- def __iter__(self) -> Iterator[Env]:
- yield from self.data.values()
-
- @classmethod
- def _from_path_sub(cls, path: Path) -> Iterator[tuple[str, Env]]:
- env_data = EnvData.from_path(path)
- for org in OrgData.from_path(path):
- env = Env(env_data, org)
- yield env.org, env
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- return cls(
- dict(cls._from_path_sub(path)),
- path.stem,
- )
-
- #
- # @property
- # def app(self) -> str:
- # return self.env.org_data.app.dest
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class EnvByApp:
- data: dict[str, EnvByOrg]
-
- def __iter__(self) -> Iterator[EnvByOrg]:
- yield from self.data.values()
-
- def __call__(self) -> None:
- for obj in self:
- obj()
-
- @staticmethod
- def _get_folders(path_: Path) -> Iterator[Path]:
- for path in path_.iterdir():
- if not path.is_dir():
- continue
- if path.stem == "traefik":
- continue
- yield path
-
- @classmethod
- def _from_path_sub(cls, path_: Path) -> Iterator[tuple[str, EnvByOrg]]:
- for path in cls._get_folders(path_):
- by_org = EnvByOrg.from_path(path)
- yield by_org.app, by_org
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- return cls(dict(cls._from_path_sub(path)))
diff --git a/src/docker_compose/org/data.py b/src/docker_compose/org/data.py
deleted file mode 100644
index aa802f5..0000000
--- a/src/docker_compose/org/data.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from collections.abc import Iterator
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Callable, Self, cast, final
-
-from docker_compose import APP_ROOT, log_cls
-from docker_compose.org.org_yaml import OrgDataYaml, OrgYaml
-from docker_compose.util.replace import Replace
-from docker_compose.util.yaml_util import read_yaml
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class OrgData:
- app: Replace
- org: Replace
- url: Replace
- dest: Path
-
- def __post_init__(self):
- log_cls(
- self,
- app=self.app.dest,
- org=self.org.dest,
- url=self.url.dest,
- )
-
- @classmethod
- def from_dict(cls, app: str, org: str, data: OrgDataYaml) -> Self:
- url = data.get("url")
- return cls(
- Replace.format_src("name", app),
- Replace.format_src("org", org),
- Replace.format_src(
- "url", ".".join((url, "ccamper7", "net")) if url else None
- ),
- APP_ROOT.joinpath(org, app),
- )
-
- # @classmethod
- # def get_app(cls, path: Path) -> str:
- # return path.stem
-
- @classmethod
- def from_path(cls, path: Path) -> Iterator[Self]:
- log_cls(cls, path=str(path))
- app = path.stem
- for org, data in cast(OrgYaml, cast(object, read_yaml(path))).items():
- yield cls.from_dict(app, org, data)
-
- def __iter__(self) -> Iterator[Callable[[str], str]]:
- yield self.app
- yield self.org
- yield self.url
diff --git a/src/docker_compose/org/org_yaml.py b/src/docker_compose/org/org_yaml.py
deleted file mode 100644
index 3634301..0000000
--- a/src/docker_compose/org/org_yaml.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from typing import Literal, NotRequired, TypedDict
-
-
-class OrgDataYaml(TypedDict):
- # org: str
- url: NotRequired[str]
-
-
-type OrgYaml = dict[Literal["ccamper7", "c4", "stryten"], OrgDataYaml]
diff --git a/src/docker_compose/render/main.py b/src/docker_compose/render/main.py
deleted file mode 100644
index 402393c..0000000
--- a/src/docker_compose/render/main.py
+++ /dev/null
@@ -1,165 +0,0 @@
-from collections.abc import Iterator
-from dataclasses import dataclass
-from itertools import chain
-from pathlib import Path
-from typing import Self, final, override
-
-from docker_compose import APP_ROOT, ROOT, TEMPLATE_ROOT
-from docker_compose.compose_data.main import Template
-from docker_compose.org.data import OrgData
-from docker_compose.util.replace import Replace
-from docker_compose.util.yaml_util import write_yaml
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class BindVols:
- # data_rep = Replace("data", str(DATA_ROOT))
- render: "Render"
-
- def __call__(self):
- # def mk_bind_vols(self) -> None:
- for path in self:
- path.mkdir(parents=True, exist_ok=True)
-
- def __iter__(self) -> Iterator[Path]:
- root = str(ROOT)
- for app in self.render.template.compose_data.services.values():
- for vol in app.volumes:
- path = self.render.render(vol.split(":", 1)[0])
- if not path.startswith(root):
- continue
- path = Path(path)
- if not path.is_dir():
- continue
- yield path
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class Render:
- data_rep = Replace("data", str(APP_ROOT))
- template: Template
- org_data: OrgData
-
- @override
- def __str__(self) -> str:
- return self.render(str(self.template))
-
- def __call__(self):
- self.write(str(self))
-
- @property
- def bind_vols(self) -> BindVols:
- return BindVols(self)
-
- def render(self, txt: str) -> str:
- for func in chain((self.data_rep,), self.org_data):
- txt = func(txt)
- return txt
-
- @property
- def proxy_nets(self) -> Iterator[str]:
- for net in self.template.compose_data.networks:
- if not net.external:
- continue
- yield self.render(net.full_name)
-
- def write(self, data: str, render:bool=False):
- with self.org_data.dest.open("wt") as f:
- _ = f.write(self.render(data) if render else data)
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class RenderByOrg:
- template: Template
- renders: dict[str, Render]
-
- def __iter__(self) -> Iterator[Render]:
- yield from self.renders.values()
- # yield render
-
- def __call__(self) -> None:
- self.template()
- for render in self:
- render()
- self.write_bind_vol_data()
-
- def write_bind_vol_data(self):
- write_yaml(self.vols, self.template.dest_path.bind_vol_path)
-
- def __getitem__(self, key: str) -> Render:
- return self.renders[key]
-
- def __bool__(self) -> bool:
- return bool(self.renders)
-
- @staticmethod
- def from_path_sub(template: Template, path: Path) -> Iterator[tuple[str, Render]]:
- for org in OrgData.from_path(path):
- yield org.org.dest, Render(template, org)
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- template = Template.from_path(path)
- return cls(
- template,
- dict(cls.from_path_sub(template, path)),
- )
-
- @property
- def app(self):
- return self.template.compose_data.name
-
- @property
- def vols(self) -> Iterator[str]:
- for render in self:
- for path in render.bind_vols:
- yield str(path)
-
- @property
- def proxy_nets(self) -> Iterator[str]:
- for render in self:
- yield from render.proxy_nets
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class RenderByApp:
- renders: dict[str, RenderByOrg]
-
- def __iter__(self) -> Iterator[RenderByOrg]:
- yield from self.renders.values()
-
- def __call__(self) -> None:
- for obj in self:
- obj()
-
- @staticmethod
- def _get_folders(path_: Path) -> Iterator[Path]:
- for path in path_.iterdir():
- if not path.is_dir():
- continue
- if path.stem == "traefik":
- continue
- yield path
-
- @classmethod
- def _from_path_sub(cls, path_: Path) -> Iterator[tuple[str, RenderByOrg]]:
- for path in cls._get_folders(path_):
- by_org = RenderByOrg.from_path(path)
- yield by_org.app, by_org
-
- @classmethod
- def from_path(cls, path: Path) -> Self:
- return cls(dict(cls._from_path_sub(path)))
-
- @classmethod
- def load_all(cls) -> Self:
- return cls.from_path(TEMPLATE_ROOT)
-
- @property
- def proxy_nets(self) -> Iterator[str]:
- for render in self:
- yield from render.proxy_nets
diff --git a/src/docker_compose/util/Ts.py b/src/docker_compose/util/Ts.py
deleted file mode 100644
index ca01524..0000000
--- a/src/docker_compose/util/Ts.py
+++ /dev/null
@@ -1,83 +0,0 @@
-from collections.abc import ItemsView, Iterator, KeysView, MutableMapping, Sequence, Set
-from types import GenericAlias, UnionType
-from typing import (
- ClassVar,
- Protocol,
- TypeAliasType,
- cast,
- get_args,
- get_origin,
-)
-
-#generic nested data
-type T_Primitive = None | bool | int | str
-type _PrimIters = Sequence[TypePrim] | Set[TypePrim] | Iterator[TypePrim]
-type TypePrimDict = MutableMapping[T_Primitive, TypePrim]
-type TypePrim = T_Primitive | _PrimIters | TypePrimDict
-
-# type T_TDict = MutableMapping[T_Primitive, T_Prim]
-
-# data going to and from YAML
-type TypeYaml = T_Primitive | TypeYamlRes
-type TypeYamlRes = list[TypeYaml] | TypeYamlDict
-class TypeYamlDict(Protocol):
- def __getitem__(self, key: str, /) -> object: ...
- # def __setitem__(self, key: str, value: V, /) -> V: ...
- # def __delitem__(self, key: Never | K, /) -> None: ...
- def __contains__(self, key: str, /) -> bool: ...
- def __iter__(self) -> Iterator[str]: ...
- def __len__(self) -> int: ...
- def keys(self) -> KeysView[str]: ...
- def items(self) -> ItemsView[str, object]: ...
- # def pop(self, key: Never | K, /) -> V: ...
- # def popitem(self) -> tuple[K, V]: ...
- # def clear(self) -> None: ...
-
- __required_keys__: ClassVar[frozenset[str]]
- __optional_keys__: ClassVar[frozenset[str]]
-
-
-#yaml compatible data
-type TypeYamlCompatibleIters = Sequence[TypeYamlCompatible] | Set[TypeYamlCompatible] | Iterator[TypeYamlCompatible]
-type TypeYamlCompatibleDict = MutableMapping[str, TypeYamlCompatible]
-type TypeYamlCompatibleRes = TypeYamlCompatibleIters | TypeYamlCompatibleDict
-type TypeYamlCompatible = T_Primitive | TypeYamlCompatibleRes
-
-
-# type T_YamlPostDict = MutableMapping[str, T_YamlPost]
-# type T_YamlPostRes = Sequence[T_YamlPost] | T_YamlPostDict
-# type T_YamlPost = T_Primitive | T_YamlPostRes
-
-
-def get_union_types(annotations: UnionType) -> Iterator[type]:
- for annotation in get_args(annotations): # pyright: ignore[reportAny]
- annotation = cast(TypeAliasType | GenericAlias | UnionType | type, annotation)
- if isinstance(annotation, TypeAliasType):
- yield from get_types(
- cast(GenericAlias | UnionType | type, annotation.__value__)
- )
- continue
- if isinstance(annotation, UnionType):
- yield from get_union_types(annotation)
- continue
- yield from get_types(annotation)
-
-
-def get_types(
- annotation: TypeAliasType | GenericAlias | UnionType | type,
-) -> Iterator[type]:
- if isinstance(annotation, TypeAliasType):
- yield from get_types(
- cast(GenericAlias | UnionType | type, annotation.__value__)
- )
- return
- if isinstance(annotation, GenericAlias):
- # print(annotation)
- # print(get_origin(annotation))
- yield get_origin(annotation)
- return
- if isinstance(annotation, UnionType):
- yield from get_union_types(annotation)
- return
- yield annotation
- return
diff --git a/src/docker_compose/util/__init__.py b/src/docker_compose/util/__init__.py
index fbbdd3b..198a76a 100644
--- a/src/docker_compose/util/__init__.py
+++ b/src/docker_compose/util/__init__.py
@@ -1,29 +1,38 @@
-# from collections.abc import Iterator, Mapping
-# from typing import Any, cast
-#
-# from docker_compose.util.Ts import T_PrimDict, T_Primitive, T_PrimVal
-#
-#
-# def merge_dicts[T: Mapping[Any, Any]](dict1: T, dict2: T) -> T:
-# def _merge_dicts(
-# _dict1: T_PrimDict, _dict2: T_PrimDict
-# ) -> Iterator[tuple[T_Primitive, T_PrimVal]]:
-# s1 = frozenset(_dict1.keys())
-# s2 = frozenset(_dict2.keys())
-# for k in s1.difference(s2):
-# yield k, _dict1[k]
-# for k in s2.difference(s1):
-# yield k, _dict2[k]
-# for k in s1.intersection(s2):
-# v1 = _dict1[k]
-# v2 = _dict2[k]
-# if isinstance(v1, dict) and isinstance(v2, dict):
-# yield k, dict[T_Primitive, T_PrimVal](_merge_dicts(v1, v2))
-# continue
-# if isinstance(v1, list) and isinstance(v2, list):
-# yield k, list(frozenset(v1).union(v2))
-# continue
-# raise Exception("merge error")
-#
-# return cast(T, dict(_merge_dicts(dict1, dict2)))
-#
+import re
+from functools import partial, reduce
+from typing import Any, final, override
+
+import yaml
+from pydantic.dataclasses import dataclass
+
+
+@final
+@dataclass
+class ReplaceStr:
+ src: str
+ repl: str
+
+ def __call__(self, s: str) -> str:
+ return s.replace(self.src, self.repl)
+
+
+@final
+class YamlUtil:
+ indent = partial(re.compile(r"(^\s?-)", re.MULTILINE).sub, r" \g<1>")
+ port = partial(re.compile(r"(\W*?)(\d+:\d+)", re.MULTILINE).sub, r'\g<1>"\g<2>"')
+
+ class VerboseSafeDumper(yaml.SafeDumper):
+ @override
+ def ignore_aliases(self, data: object) -> bool:
+ return True
+
+ def __call__(self, data: dict[Any, Any]) -> str:
+ return reduce(
+ lambda s, f: f(s),
+ self,
+ yaml.dump(data, Dumper=self.VerboseSafeDumper),
+ )
+
+ def __iter__(self):
+ yield self.indent
+ yield self.port
diff --git a/src/docker_compose/util/replace.py b/src/docker_compose/util/replace.py
deleted file mode 100644
index 74b2a28..0000000
--- a/src/docker_compose/util/replace.py
+++ /dev/null
@@ -1,92 +0,0 @@
-from collections.abc import Callable
-from dataclasses import dataclass
-from typing import Self, final
-
-type TypeDest = str | None | Callable[[], str]
-
-
-@final
-@dataclass(frozen=True, slots=True)
-class Replace:
- src: str
- _dest: TypeDest
-
- def __call__(self, string: str) -> str:
- return string.replace(self.src, self.dest)
-
- @classmethod
- def format_src(cls, src: str, dest: TypeDest):
- return cls(cls.fmt(src), dest)
-
- @classmethod
- def format_src_dest(cls, src: str, dest: str):
- return cls(cls.fmt(src), cls.fmt(dest))
-
- @classmethod
- def from_str(cls, src: str) -> Self:
- return cls.format_src(src, src)
-
- @classmethod
- def build_placeholder(cls, src: str, *dest: str) -> Self:
- return cls.format_src(
- src,
- "_".join(map(cls.fmt, dest)),
- )
-
- @property
- def dest(self) -> str:
- if not self._dest:
- return ""
- if isinstance(self._dest, str):
- return self._dest
- return self._dest()
-
- @staticmethod
- def fmt(src: str) -> str:
- return f"${{_{src.upper()}}}"
-
-
-#
-# @final
-# @dataclass(frozen=True, slots=True)
-# class ReplaceDynamic:
-# val: str
-# fmt: str
-#
-# @classmethod
-# def factory(cls, val: str):
-# return cls(val, format_src(val))
-#
-# def __call__(self, string: str) -> str:
-# return string.replace(self.fmt, self.val)
-#
-# # def __str__(self) -> str:
-# # return self.val if isinstance(self.val, str) else self.val.fmt
-# # def build_placeholder(self, *args: "ReplaceDynamic"):
-# # data = ((rep.val.upper(), rep.fmt) for rep in chain((self,), args))
-# # src: tuple[str, ...]
-# # dest: tuple[str, ...]
-# # src, dest = zip(*data)
-# # return ReplaceUnique("_".join(src), "_".join(dest))
-#
-#
-# @dataclass(frozen=True, slots=True)
-# class ReplaceStatic:
-# src: ClassVar[ReplaceDynamic]
-# _dest: None | str | Callable[[], str]
-#
-# def replace(self, string: str) -> str:
-# return string.replace(self.src.fmt, self.dest)
-#
-# @property
-# def dest(self) -> str:
-# if not self._dest:
-# return ""
-# if isinstance(self._dest, str):
-# return self._dest
-# return self._dest()
-#
-# # @classmethod
-# # def two_stage(cls, dest: str) -> tuple[Self, ReplaceDynamic]:
-# # dest_var = ReplaceDynamic(dest)
-# # return cls(dest_var.fmt), dest_var
diff --git a/src/docker_compose/util/yaml_util.py b/src/docker_compose/util/yaml_util.py
deleted file mode 100644
index a34685b..0000000
--- a/src/docker_compose/util/yaml_util.py
+++ /dev/null
@@ -1,171 +0,0 @@
-import re
-from collections.abc import Iterator, MutableMapping, Set
-from pathlib import Path
-from typing import (
- cast,
- get_type_hints,
- is_typeddict,
- override,
-)
-
-import yaml
-
-from docker_compose.util.Ts import (
- TypeYamlCompatible,
- TypeYamlCompatibleDict,
- TypeYamlCompatibleIters,
- TypeYamlCompatibleRes,
- TypeYamlDict,
- TypeYamlRes,
- get_types,
-)
-
-# class TypedYamlDict[K: object, V: object](Protocol):
-# def __getitem__(self, key: K, /) -> V: ...
-# # def __setitem__(self, key: K, value: V, /) -> V: ...
-# def __delitem__(self, key: K, /) -> V: ...
-# def __contains__(self, key: K, /) -> bool: ...
-# def __iter__(self) -> Iterator[K]: ...
-# def __len__(self) -> int: ...
-# def keys(self) -> KeysView[K]: ...
-# def items(self) -> ItemsView[K, V]: ...
-# def pop(self, key: K, /) -> V: ...
-#
-# # def popitem(self) -> tuple[K, V]: ...
-#
-# # def clear(self) -> None: ...
-#
-# __required_keys__: ClassVar[frozenset[str]]
-# __optional_keys__: ClassVar[frozenset[str]]
-
-
-class VerboseSafeDumper(yaml.SafeDumper):
- @override
- def ignore_aliases(self, data: object) -> bool:
- return True
-
-
-def yaml_prep(data: TypeYamlCompatibleRes) -> TypeYamlCompatibleRes:
- if isinstance(data, MutableMapping):
- return dict_prep(data)
- if isinstance(data, tuple):
- return tuple(list_prep(data))
- res = tuple(list_prep(data))
- try:
- return tuple(sorted(res)) # pyright: ignore[reportArgumentType, reportUnknownArgumentType, reportUnknownVariableType]
- except TypeError:
- return res
-
-
-def list_prep(data: TypeYamlCompatibleIters) -> Iterator[TypeYamlCompatible]:
- for v in data:
- if isinstance(v, (MutableMapping, tuple, list, Set, Iterator)):
- yield yaml_prep(v)
- continue
- if v:
- yield v
- continue
- if isinstance(v, bool):
- yield v
- continue
-
-
-def dict_prep(data: TypeYamlCompatibleDict) -> TypeYamlCompatibleDict:
- keys = tuple(data.keys())
- for k in keys:
- v = data[k]
- if isinstance(v, (MutableMapping, tuple, list, Set, Iterator)):
- data[k] = v = yaml_prep(v)
-
- if v:
- continue
- if isinstance(v, bool):
- continue
- del data[k]
- return data
-
-
-def to_yaml(data: TypeYamlCompatibleRes) -> str:
- dict_ = yaml_prep(data)
- res = yaml.dump(dict_, Dumper=VerboseSafeDumper)
- res = re.sub(r"(^\s?-)", r" \g<1>", res, flags=re.MULTILINE)
- return re.sub(r"(\W*?)(\d+:\d+)", r'\g<1>"\g<2>"', res, flags=re.MULTILINE)
-
-
-def write_yaml(
- data: TypeYamlCompatibleRes,
- path: Path,
-) -> None:
- with path.open("wt") as f:
- _ = f.write(to_yaml(data))
-
-
-def read_yaml(path: Path) -> TypeYamlRes:
- with path.open("rt") as f:
- return yaml.safe_load(f) # pyright: ignore[reportAny]
-
-
-def read_typed_yaml[T: TypeYamlDict](
- type_: type[T],
- path: Path,
-) -> T:
- with path.open("rt") as f:
- data: T = yaml.safe_load(f) # pyright: ignore[reportAny]
- return path_to_typed(type_, data, path)
-
-
-def path_to_typed[T: TypeYamlDict](
- type_: type[T],
- data: T,
- path: Path,
-) -> T:
- try:
- return validate_typed_dict(type_, data)
- except (KeyError, TypeError) as e:
- e.add_note(f"path: {path!s}")
- raise e
-
-
-def validate_typed_dict[T: TypeYamlDict](
- t: type[T],
- data: T,
-) -> T:
- _validate_typed_dict(t, data)
- return cast(T, cast(object, data))
-
-
-def _validate_typed_dict[T: TypeYamlDict](
- t: type[T],
- data: T,
-) -> None:
- keys = frozenset(data.keys())
- missing = t.__required_keys__.difference(keys)
- if missing:
- raise KeyError(f"missing required key(s): {', '.join(missing)}")
- extra = keys.difference(t.__required_keys__, t.__optional_keys__)
- if extra:
- raise KeyError(f"extra key(s): {', '.join(map(str, extra))}")
- hints = get_type_hints(t)
- for key, val in data.items():
- t2 = cast(type, cast(object, hints[key]))
- if is_typeddict(t2):
- _validate_typed_dict(t2, cast(TypeYamlDict, val))
- continue
-
- # try:
- # print(t2)
- # print(get_types(t2))
- t2 = tuple(get_types(t2))
- if not isinstance(val, t2):
- msg = ", ".join(t.__name__ for t in t2)
- e = TypeError(f"key: {key} expected *{msg}*, got *{type(val).__name__}*")
- e.add_note(f"key: {key!s}")
- raise e
- # valid = isinstance(val, get_types(t2))
- # except TypeError:
- # valid = isinstance(val, get_origin(t2))
- # if not valid:
- # raise TypeError(
- # f"key: {key} expected *{type(t2).__name__}*, got *{type(val).__name__}*"
- # )
- # yield key, val
diff --git a/uv.lock b/uv.lock
index 7457119..9ba3170 100755
--- a/uv.lock
+++ b/uv.lock
@@ -2,6 +2,15 @@ version = 1
revision = 3
requires-python = ">=3.13"
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
+]
+
[[package]]
name = "basedpyright"
version = "1.37.1"
@@ -30,16 +39,48 @@ source = { editable = "." }
dependencies = [
{ name = "basedpyright" },
{ name = "loguru" },
+ { name = "pydantic" },
{ name = "pyyaml" },
{ name = "ruff" },
+ { name = "sqlalchemy" },
]
[package.metadata]
requires-dist = [
{ name = "basedpyright", specifier = ">=1.37.1" },
{ name = "loguru", specifier = ">=0.7.3" },
+ { name = "pydantic", specifier = ">=2.12.5" },
{ name = "pyyaml", specifier = ">=6.0.3" },
- { name = "ruff", specifier = ">=0.14.11" },
+ { name = "ruff", specifier = "==0.14.13" },
+ { name = "sqlalchemy", specifier = ">=2.0.45" },
+]
+
+[[package]]
+name = "greenlet"
+version = "3.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" },
+ { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" },
+ { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" },
+ { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" },
+ { url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" },
]
[[package]]
@@ -71,6 +112,74 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/df/af/cd3290a647df567645353feed451ef4feaf5844496ced69c4dcb84295ff4/nodejs_wheel_binaries-24.12.0-py2.py3-none-win_arm64.whl", hash = "sha256:d0c2273b667dd7e3f55e369c0085957b702144b1b04bfceb7ce2411e58333757", size = 39048104, upload-time = "2025-12-11T21:12:23.495Z" },
]
+[[package]]
+name = "pydantic"
+version = "2.12.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.41.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+]
+
[[package]]
name = "pyyaml"
version = "6.0.3"
@@ -109,28 +218,78 @@ wheels = [
[[package]]
name = "ruff"
-version = "0.14.11"
+version = "0.14.13"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d4/77/9a7fe084d268f8855d493e5031ea03fa0af8cc05887f638bf1c4e3363eb8/ruff-0.14.11.tar.gz", hash = "sha256:f6dc463bfa5c07a59b1ff2c3b9767373e541346ea105503b4c0369c520a66958", size = 5993417, upload-time = "2026-01-08T19:11:58.322Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/50/0a/1914efb7903174b381ee2ffeebb4253e729de57f114e63595114c8ca451f/ruff-0.14.13.tar.gz", hash = "sha256:83cd6c0763190784b99650a20fec7633c59f6ebe41c5cc9d45ee42749563ad47", size = 6059504, upload-time = "2026-01-15T20:15:16.918Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f0/a6/a4c40a5aaa7e331f245d2dc1ac8ece306681f52b636b40ef87c88b9f7afd/ruff-0.14.11-py3-none-linux_armv6l.whl", hash = "sha256:f6ff2d95cbd335841a7217bdfd9c1d2e44eac2c584197ab1385579d55ff8830e", size = 12951208, upload-time = "2026-01-08T19:12:09.218Z" },
- { url = "https://files.pythonhosted.org/packages/5c/5c/360a35cb7204b328b685d3129c08aca24765ff92b5a7efedbdd6c150d555/ruff-0.14.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f6eb5c1c8033680f4172ea9c8d3706c156223010b8b97b05e82c59bdc774ee6", size = 13330075, upload-time = "2026-01-08T19:12:02.549Z" },
- { url = "https://files.pythonhosted.org/packages/1b/9e/0cc2f1be7a7d33cae541824cf3f95b4ff40d03557b575912b5b70273c9ec/ruff-0.14.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2fc34cc896f90080fca01259f96c566f74069a04b25b6205d55379d12a6855e", size = 12257809, upload-time = "2026-01-08T19:12:00.366Z" },
- { url = "https://files.pythonhosted.org/packages/a7/e5/5faab97c15bb75228d9f74637e775d26ac703cc2b4898564c01ab3637c02/ruff-0.14.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53386375001773ae812b43205d6064dae49ff0968774e6befe16a994fc233caa", size = 12678447, upload-time = "2026-01-08T19:12:13.899Z" },
- { url = "https://files.pythonhosted.org/packages/1b/33/e9767f60a2bef779fb5855cab0af76c488e0ce90f7bb7b8a45c8a2ba4178/ruff-0.14.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a697737dce1ca97a0a55b5ff0434ee7205943d4874d638fe3ae66166ff46edbe", size = 12758560, upload-time = "2026-01-08T19:11:42.55Z" },
- { url = "https://files.pythonhosted.org/packages/eb/84/4c6cf627a21462bb5102f7be2a320b084228ff26e105510cd2255ea868e5/ruff-0.14.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6845ca1da8ab81ab1dce755a32ad13f1db72e7fba27c486d5d90d65e04d17b8f", size = 13599296, upload-time = "2026-01-08T19:11:30.371Z" },
- { url = "https://files.pythonhosted.org/packages/88/e1/92b5ed7ea66d849f6157e695dc23d5d6d982bd6aa8d077895652c38a7cae/ruff-0.14.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e36ce2fd31b54065ec6f76cb08d60159e1b32bdf08507862e32f47e6dde8bcbf", size = 15048981, upload-time = "2026-01-08T19:12:04.742Z" },
- { url = "https://files.pythonhosted.org/packages/61/df/c1bd30992615ac17c2fb64b8a7376ca22c04a70555b5d05b8f717163cf9f/ruff-0.14.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590bcc0e2097ecf74e62a5c10a6b71f008ad82eb97b0a0079e85defe19fe74d9", size = 14633183, upload-time = "2026-01-08T19:11:40.069Z" },
- { url = "https://files.pythonhosted.org/packages/04/e9/fe552902f25013dd28a5428a42347d9ad20c4b534834a325a28305747d64/ruff-0.14.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53fe71125fc158210d57fe4da26e622c9c294022988d08d9347ec1cf782adafe", size = 14050453, upload-time = "2026-01-08T19:11:37.555Z" },
- { url = "https://files.pythonhosted.org/packages/ae/93/f36d89fa021543187f98991609ce6e47e24f35f008dfe1af01379d248a41/ruff-0.14.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a35c9da08562f1598ded8470fcfef2afb5cf881996e6c0a502ceb61f4bc9c8a3", size = 13757889, upload-time = "2026-01-08T19:12:07.094Z" },
- { url = "https://files.pythonhosted.org/packages/b7/9f/c7fb6ecf554f28709a6a1f2a7f74750d400979e8cd47ed29feeaa1bd4db8/ruff-0.14.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0f3727189a52179393ecf92ec7057c2210203e6af2676f08d92140d3e1ee72c1", size = 13955832, upload-time = "2026-01-08T19:11:55.064Z" },
- { url = "https://files.pythonhosted.org/packages/db/a0/153315310f250f76900a98278cf878c64dfb6d044e184491dd3289796734/ruff-0.14.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:eb09f849bd37147a789b85995ff734a6c4a095bed5fd1608c4f56afc3634cde2", size = 12586522, upload-time = "2026-01-08T19:11:35.356Z" },
- { url = "https://files.pythonhosted.org/packages/2f/2b/a73a2b6e6d2df1d74bf2b78098be1572191e54bec0e59e29382d13c3adc5/ruff-0.14.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:c61782543c1231bf71041461c1f28c64b961d457d0f238ac388e2ab173d7ecb7", size = 12724637, upload-time = "2026-01-08T19:11:47.796Z" },
- { url = "https://files.pythonhosted.org/packages/f0/41/09100590320394401cd3c48fc718a8ba71c7ddb1ffd07e0ad6576b3a3df2/ruff-0.14.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:82ff352ea68fb6766140381748e1f67f83c39860b6446966cff48a315c3e2491", size = 13145837, upload-time = "2026-01-08T19:11:32.87Z" },
- { url = "https://files.pythonhosted.org/packages/3b/d8/e035db859d1d3edf909381eb8ff3e89a672d6572e9454093538fe6f164b0/ruff-0.14.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:728e56879df4ca5b62a9dde2dd0eb0edda2a55160c0ea28c4025f18c03f86984", size = 13850469, upload-time = "2026-01-08T19:12:11.694Z" },
- { url = "https://files.pythonhosted.org/packages/4e/02/bb3ff8b6e6d02ce9e3740f4c17dfbbfb55f34c789c139e9cd91985f356c7/ruff-0.14.11-py3-none-win32.whl", hash = "sha256:337c5dd11f16ee52ae217757d9b82a26400be7efac883e9e852646f1557ed841", size = 12851094, upload-time = "2026-01-08T19:11:45.163Z" },
- { url = "https://files.pythonhosted.org/packages/58/f1/90ddc533918d3a2ad628bc3044cdfc094949e6d4b929220c3f0eb8a1c998/ruff-0.14.11-py3-none-win_amd64.whl", hash = "sha256:f981cea63d08456b2c070e64b79cb62f951aa1305282974d4d5216e6e0178ae6", size = 14001379, upload-time = "2026-01-08T19:11:52.591Z" },
- { url = "https://files.pythonhosted.org/packages/c4/1c/1dbe51782c0e1e9cfce1d1004752672d2d4629ea46945d19d731ad772b3b/ruff-0.14.11-py3-none-win_arm64.whl", hash = "sha256:649fb6c9edd7f751db276ef42df1f3df41c38d67d199570ae2a7bd6cbc3590f0", size = 12938644, upload-time = "2026-01-08T19:11:50.027Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/ae/0deefbc65ca74b0ab1fd3917f94dc3b398233346a74b8bbb0a916a1a6bf6/ruff-0.14.13-py3-none-linux_armv6l.whl", hash = "sha256:76f62c62cd37c276cb03a275b198c7c15bd1d60c989f944db08a8c1c2dbec18b", size = 13062418, upload-time = "2026-01-15T20:14:50.779Z" },
+ { url = "https://files.pythonhosted.org/packages/47/df/5916604faa530a97a3c154c62a81cb6b735c0cb05d1e26d5ad0f0c8ac48a/ruff-0.14.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:914a8023ece0528d5cc33f5a684f5f38199bbb566a04815c2c211d8f40b5d0ed", size = 13442344, upload-time = "2026-01-15T20:15:07.94Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/f3/e0e694dd69163c3a1671e102aa574a50357536f18a33375050334d5cd517/ruff-0.14.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d24899478c35ebfa730597a4a775d430ad0d5631b8647a3ab368c29b7e7bd063", size = 12354720, upload-time = "2026-01-15T20:15:09.854Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/e8/67f5fcbbaee25e8fc3b56cc33e9892eca7ffe09f773c8e5907757a7e3bdb/ruff-0.14.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aaf3870f14d925bbaf18b8a2347ee0ae7d95a2e490e4d4aea6813ed15ebc80e", size = 12774493, upload-time = "2026-01-15T20:15:20.908Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/ce/d2e9cb510870b52a9565d885c0d7668cc050e30fa2c8ac3fb1fda15c083d/ruff-0.14.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac5b7f63dd3b27cc811850f5ffd8fff845b00ad70e60b043aabf8d6ecc304e09", size = 12815174, upload-time = "2026-01-15T20:15:05.74Z" },
+ { url = "https://files.pythonhosted.org/packages/88/00/c38e5da58beebcf4fa32d0ddd993b63dfacefd02ab7922614231330845bf/ruff-0.14.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d2b1097750d90ba82ce4ba676e85230a0ed694178ca5e61aa9b459970b3eb9", size = 13680909, upload-time = "2026-01-15T20:15:14.537Z" },
+ { url = "https://files.pythonhosted.org/packages/61/61/cd37c9dd5bd0a3099ba79b2a5899ad417d8f3b04038810b0501a80814fd7/ruff-0.14.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d0bf87705acbbcb8d4c24b2d77fbb73d40210a95c3903b443cd9e30824a5032", size = 15144215, upload-time = "2026-01-15T20:15:22.886Z" },
+ { url = "https://files.pythonhosted.org/packages/56/8a/85502d7edbf98c2df7b8876f316c0157359165e16cdf98507c65c8d07d3d/ruff-0.14.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3eb5da8e2c9e9f13431032fdcbe7681de9ceda5835efee3269417c13f1fed5c", size = 14706067, upload-time = "2026-01-15T20:14:48.271Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/2f/de0df127feb2ee8c1e54354dc1179b4a23798f0866019528c938ba439aca/ruff-0.14.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:642442b42957093811cd8d2140dfadd19c7417030a7a68cf8d51fcdd5f217427", size = 14133916, upload-time = "2026-01-15T20:14:57.357Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/77/9b99686bb9fe07a757c82f6f95e555c7a47801a9305576a9c67e0a31d280/ruff-0.14.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4acdf009f32b46f6e8864af19cbf6841eaaed8638e65c8dac845aea0d703c841", size = 13859207, upload-time = "2026-01-15T20:14:55.111Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/46/2bdcb34a87a179a4d23022d818c1c236cb40e477faf0d7c9afb6813e5876/ruff-0.14.13-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:591a7f68860ea4e003917d19b5c4f5ac39ff558f162dc753a2c5de897fd5502c", size = 14043686, upload-time = "2026-01-15T20:14:52.841Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/a9/5c6a4f56a0512c691cf143371bcf60505ed0f0860f24a85da8bd123b2bf1/ruff-0.14.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:774c77e841cc6e046fc3e91623ce0903d1cd07e3a36b1a9fe79b81dab3de506b", size = 12663837, upload-time = "2026-01-15T20:15:18.921Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/bb/b920016ece7651fa7fcd335d9d199306665486694d4361547ccb19394c44/ruff-0.14.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:61f4e40077a1248436772bb6512db5fc4457fe4c49e7a94ea7c5088655dd21ae", size = 12805867, upload-time = "2026-01-15T20:14:59.272Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/b3/0bd909851e5696cd21e32a8fc25727e5f58f1934b3596975503e6e85415c/ruff-0.14.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6d02f1428357fae9e98ac7aa94b7e966fd24151088510d32cf6f902d6c09235e", size = 13208528, upload-time = "2026-01-15T20:15:03.732Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/3b/e2d94cb613f6bbd5155a75cbe072813756363eba46a3f2177a1fcd0cd670/ruff-0.14.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e399341472ce15237be0c0ae5fbceca4b04cd9bebab1a2b2c979e015455d8f0c", size = 13929242, upload-time = "2026-01-15T20:15:11.918Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/c5/abd840d4132fd51a12f594934af5eba1d5d27298a6f5b5d6c3be45301caf/ruff-0.14.13-py3-none-win32.whl", hash = "sha256:ef720f529aec113968b45dfdb838ac8934e519711da53a0456038a0efecbd680", size = 12919024, upload-time = "2026-01-15T20:14:43.647Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/55/6384b0b8ce731b6e2ade2b5449bf07c0e4c31e8a2e68ea65b3bafadcecc5/ruff-0.14.13-py3-none-win_amd64.whl", hash = "sha256:6070bd026e409734b9257e03e3ef18c6e1a216f0435c6751d7a8ec69cb59abef", size = 14097887, upload-time = "2026-01-15T20:15:01.48Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/e1/7348090988095e4e39560cfc2f7555b1b2a7357deba19167b600fdf5215d/ruff-0.14.13-py3-none-win_arm64.whl", hash = "sha256:7ab819e14f1ad9fe39f246cfcc435880ef7a9390d81a2b6ac7e01039083dd247", size = 13080224, upload-time = "2026-01-15T20:14:45.853Z" },
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "2.0.45"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/be/f9/5e4491e5ccf42f5d9cfc663741d261b3e6e1683ae7812114e7636409fcc6/sqlalchemy-2.0.45.tar.gz", hash = "sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88", size = 9869912, upload-time = "2025-12-09T21:05:16.737Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6a/c8/7cc5221b47a54edc72a0140a1efa56e0a2730eefa4058d7ed0b4c4357ff8/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe187fc31a54d7fd90352f34e8c008cf3ad5d064d08fedd3de2e8df83eb4a1cf", size = 3277082, upload-time = "2025-12-09T22:11:06.167Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/50/80a8d080ac7d3d321e5e5d420c9a522b0aa770ec7013ea91f9a8b7d36e4a/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:672c45cae53ba88e0dad74b9027dddd09ef6f441e927786b05bec75d949fbb2e", size = 3293131, upload-time = "2025-12-09T22:13:52.626Z" },
+ { url = "https://files.pythonhosted.org/packages/da/4c/13dab31266fc9904f7609a5dc308a2432a066141d65b857760c3bef97e69/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:470daea2c1ce73910f08caf10575676a37159a6d16c4da33d0033546bddebc9b", size = 3225389, upload-time = "2025-12-09T22:11:08.093Z" },
+ { url = "https://files.pythonhosted.org/packages/74/04/891b5c2e9f83589de202e7abaf24cd4e4fa59e1837d64d528829ad6cc107/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9c6378449e0940476577047150fd09e242529b761dc887c9808a9a937fe990c8", size = 3266054, upload-time = "2025-12-09T22:13:54.262Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/24/fc59e7f71b0948cdd4cff7a286210e86b0443ef1d18a23b0d83b87e4b1f7/sqlalchemy-2.0.45-cp313-cp313-win32.whl", hash = "sha256:4b6bec67ca45bc166c8729910bd2a87f1c0407ee955df110d78948f5b5827e8a", size = 2110299, upload-time = "2025-12-09T21:39:33.486Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/c5/d17113020b2d43073412aeca09b60d2009442420372123b8d49cc253f8b8/sqlalchemy-2.0.45-cp313-cp313-win_amd64.whl", hash = "sha256:afbf47dc4de31fa38fd491f3705cac5307d21d4bb828a4f020ee59af412744ee", size = 2136264, upload-time = "2025-12-09T21:39:36.801Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/8d/bb40a5d10e7a5f2195f235c0b2f2c79b0bf6e8f00c0c223130a4fbd2db09/sqlalchemy-2.0.45-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:83d7009f40ce619d483d26ac1b757dfe3167b39921379a8bd1b596cf02dab4a6", size = 3521998, upload-time = "2025-12-09T22:13:28.622Z" },
+ { url = "https://files.pythonhosted.org/packages/75/a5/346128b0464886f036c039ea287b7332a410aa2d3fb0bb5d404cb8861635/sqlalchemy-2.0.45-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d8a2ca754e5415cde2b656c27900b19d50ba076aa05ce66e2207623d3fe41f5a", size = 3473434, upload-time = "2025-12-09T22:13:30.188Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/64/4e1913772646b060b025d3fc52ce91a58967fe58957df32b455de5a12b4f/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f46ec744e7f51275582e6a24326e10c49fbdd3fc99103e01376841213028774", size = 3272404, upload-time = "2025-12-09T22:11:09.662Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/27/caf606ee924282fe4747ee4fd454b335a72a6e018f97eab5ff7f28199e16/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:883c600c345123c033c2f6caca18def08f1f7f4c3ebeb591a63b6fceffc95cce", size = 3277057, upload-time = "2025-12-09T22:13:56.213Z" },
+ { url = "https://files.pythonhosted.org/packages/85/d0/3d64218c9724e91f3d1574d12eb7ff8f19f937643815d8daf792046d88ab/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2c0b74aa79e2deade948fe8593654c8ef4228c44ba862bb7c9585c8e0db90f33", size = 3222279, upload-time = "2025-12-09T22:11:11.1Z" },
+ { url = "https://files.pythonhosted.org/packages/24/10/dd7688a81c5bc7690c2a3764d55a238c524cd1a5a19487928844cb247695/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a420169cef179d4c9064365f42d779f1e5895ad26ca0c8b4c0233920973db74", size = 3244508, upload-time = "2025-12-09T22:13:57.932Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/41/db75756ca49f777e029968d9c9fee338c7907c563267740c6d310a8e3f60/sqlalchemy-2.0.45-cp314-cp314-win32.whl", hash = "sha256:e50dcb81a5dfe4b7b4a4aa8f338116d127cb209559124f3694c70d6cd072b68f", size = 2113204, upload-time = "2025-12-09T21:39:38.365Z" },
+ { url = "https://files.pythonhosted.org/packages/89/a2/0e1590e9adb292b1d576dbcf67ff7df8cf55e56e78d2c927686d01080f4b/sqlalchemy-2.0.45-cp314-cp314-win_amd64.whl", hash = "sha256:4748601c8ea959e37e03d13dcda4a44837afcd1b21338e637f7c935b8da06177", size = 2138785, upload-time = "2025-12-09T21:39:39.503Z" },
+ { url = "https://files.pythonhosted.org/packages/42/39/f05f0ed54d451156bbed0e23eb0516bcad7cbb9f18b3bf219c786371b3f0/sqlalchemy-2.0.45-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd337d3526ec5298f67d6a30bbbe4ed7e5e68862f0bf6dd21d289f8d37b7d60b", size = 3522029, upload-time = "2025-12-09T22:13:32.09Z" },
+ { url = "https://files.pythonhosted.org/packages/54/0f/d15398b98b65c2bce288d5ee3f7d0a81f77ab89d9456994d5c7cc8b2a9db/sqlalchemy-2.0.45-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9a62b446b7d86a3909abbcd1cd3cc550a832f99c2bc37c5b22e1925438b9367b", size = 3475142, upload-time = "2025-12-09T22:13:33.739Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e1/3ccb13c643399d22289c6a9786c1a91e3dcbb68bce4beb44926ac2c557bf/sqlalchemy-2.0.45-py3-none-any.whl", hash = "sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0", size = 1936672, upload-time = "2025-12-09T21:54:52.608Z" },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+]
+
+[[package]]
+name = "typing-inspection"
+version = "0.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
]
[[package]]