Merge remote-tracking branch 'origin/main'
# Conflicts: # .idea/.gitignore # .idea/dictionaries/project.xml # .idea/inspectionProfiles/profiles_settings.xml # .idea/misc.xml # .idea/modules.xml # src/docker_compose/__init__.py # src/docker_compose/cfg/__init__.py # src/docker_compose/cfg/cfg_paths.py # src/docker_compose/cfg/src_path.py # src/docker_compose/compose/compose.py # src/docker_compose/compose/compose_yaml.py # src/docker_compose/compose/net.py # src/docker_compose/compose/net_yaml.py
This commit is contained in:
11
.idea/compose_gen_uv.iml
generated
Normal file
11
.idea/compose_gen_uv.iml
generated
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="PYTHON_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager">
|
||||||
|
<content url="file://$MODULE_DIR$">
|
||||||
|
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||||
|
</content>
|
||||||
|
<orderEntry type="jdk" jdkName="Python 3.13 (compose_gen_uv)" jdkType="Python SDK" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
</module>
|
||||||
10
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
10
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<profile version="1.0">
|
||||||
|
<option name="myName" value="Project Default" />
|
||||||
|
<inspection_tool class="PyInconsistentReturnsInspection" enabled="false" level="WEAK WARNING" enabled_by_default="false" />
|
||||||
|
<inspection_tool class="PyMissingTypeHintsInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||||
|
<option name="m_onlyWhenTypesAreKnown" value="false" />
|
||||||
|
</inspection_tool>
|
||||||
|
<inspection_tool class="PyUnnecessaryCastInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true" />
|
||||||
|
</profile>
|
||||||
|
</component>
|
||||||
9
.idea/ruff.xml
generated
Normal file
9
.idea/ruff.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="RuffConfigService">
|
||||||
|
<option name="globalRuffExecutablePath" value="/opt/pycharm_venv/bin/ruff" />
|
||||||
|
<option name="runRuffOnSave" value="true" />
|
||||||
|
<option name="useRuffImportOptimizer" value="true" />
|
||||||
|
<option name="useRuffServer" value="true" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
8
src/docker_compose/Ts.py
Normal file
8
src/docker_compose/Ts.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from collections.abc import Mapping
|
||||||
|
|
||||||
|
type nested_list = list[str | nested_list]
|
||||||
|
type T_Primitive = bool | int | str
|
||||||
|
type T_PrimVal = T_Primitive | list[T_Primitive] | T_PrimDict
|
||||||
|
type T_PrimDict = Mapping[T_Primitive, T_PrimVal]
|
||||||
|
type T_YamlVals = T_Primitive | list[T_Primitive | T_YamlDict] | T_YamlDict
|
||||||
|
type T_YamlDict = Mapping[str, T_YamlVals]
|
||||||
27
src/docker_compose/cfg/cfg_paths_yaml.py
Normal file
27
src/docker_compose/cfg/cfg_paths_yaml.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from collections.abc import Iterator
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import NotRequired, Self, TypedDict, final
|
||||||
|
|
||||||
|
from docker_compose.cfg.org_data import OrgData
|
||||||
|
from docker_compose.cfg.org_data_yaml import OrgDataYaml
|
||||||
|
from docker_compose.cfg.src_path import SrcPaths
|
||||||
|
from docker_compose.yaml import YamlWrapper
|
||||||
|
|
||||||
|
|
||||||
|
class CfgYamlData(TypedDict):
|
||||||
|
services: list[str]
|
||||||
|
volumes: NotRequired[list[str]]
|
||||||
|
orgs: list[OrgDataYaml]
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class CfgYaml(YamlWrapper[CfgYamlData]):
|
||||||
|
@classmethod
|
||||||
|
def from_src_paths(cls, src_paths: SrcPaths) -> Self:
|
||||||
|
return cls.from_path(src_paths.cfg_file)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def orgs_data(self) -> Iterator[OrgData]:
|
||||||
|
for org in self.data["orgs"]:
|
||||||
|
yield OrgData.from_dict(org)
|
||||||
15
src/docker_compose/cfg/org_data.py
Normal file
15
src/docker_compose/cfg/org_data.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Self, final
|
||||||
|
|
||||||
|
from docker_compose.cfg.org_data_yaml import OrgDataYaml
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class OrgData:
|
||||||
|
org: str
|
||||||
|
url: str | None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: OrgDataYaml) -> Self:
|
||||||
|
return cls(data["org"], data.get("url"))
|
||||||
6
src/docker_compose/cfg/org_data_yaml.py
Normal file
6
src/docker_compose/cfg/org_data_yaml.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from typing import NotRequired, TypedDict
|
||||||
|
|
||||||
|
|
||||||
|
class OrgDataYaml(TypedDict):
|
||||||
|
org: str
|
||||||
|
url: NotRequired[str]
|
||||||
35
src/docker_compose/compose/dest_path.py
Normal file
35
src/docker_compose/compose/dest_path.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Self, final
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class DestPaths:
|
||||||
|
data_dir: Path
|
||||||
|
env_file: Path
|
||||||
|
compose_file: Path
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_path(cls, path: Path) -> Self:
|
||||||
|
return cls(
|
||||||
|
path,
|
||||||
|
path.joinpath(".env"),
|
||||||
|
path.joinpath("docker-docker_compose.yml"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# @staticmethod
|
||||||
|
# def _mk_dir(path: Path) -> None:
|
||||||
|
# if path.exists():
|
||||||
|
# return
|
||||||
|
# path.mkdir(parents=True)
|
||||||
|
|
||||||
|
def mk_compose_dir(self) -> None:
|
||||||
|
if self.data_dir.exists():
|
||||||
|
return
|
||||||
|
self.data_dir.mkdir(parents=True)
|
||||||
|
# vols = self.bind_vols
|
||||||
|
# if vols is None:
|
||||||
|
# return
|
||||||
|
# for vol in vols:
|
||||||
|
# _mk_dir(vol)
|
||||||
28
src/docker_compose/compose/net_args.py
Normal file
28
src/docker_compose/compose/net_args.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import final
|
||||||
|
|
||||||
|
from docker_compose.compose.net_args_yaml import NetArgsYaml
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class NetArgs:
|
||||||
|
name: str
|
||||||
|
external: bool | None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def as_dict(self) -> NetArgsYaml:
|
||||||
|
yaml_dict = NetArgsYaml(
|
||||||
|
name=self.name,
|
||||||
|
)
|
||||||
|
if self.external is not None:
|
||||||
|
yaml_dict["external"] = self.external
|
||||||
|
return yaml_dict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_proxy_check(name: str) -> bool:
|
||||||
|
return name.endswith("proxy")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_proxy(self) -> bool:
|
||||||
|
return self.is_proxy_check(self.name)
|
||||||
6
src/docker_compose/compose/net_args_yaml.py
Normal file
6
src/docker_compose/compose/net_args_yaml.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from typing import NotRequired, TypedDict
|
||||||
|
|
||||||
|
|
||||||
|
class NetArgsYaml(TypedDict):
|
||||||
|
name: str
|
||||||
|
external: NotRequired[bool]
|
||||||
39
src/docker_compose/compose/render.py
Normal file
39
src/docker_compose/compose/render.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
from collections.abc import Iterator
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import final
|
||||||
|
|
||||||
|
from docker_compose.compose.compose import Compose
|
||||||
|
from docker_compose.compose.replace_args import ReplaceArgs
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class Rendered(Compose):
|
||||||
|
def mk_bind_vols(self) -> None:
|
||||||
|
for app_data in self.services.values():
|
||||||
|
if app_data.volumes is None:
|
||||||
|
continue
|
||||||
|
for vol in app_data.volumes:
|
||||||
|
for arg in self.replace_args:
|
||||||
|
path = arg.render_yaml(vol.split(":", 1)[0])
|
||||||
|
if not path.startswith("/"):
|
||||||
|
continue
|
||||||
|
path = Path(path)
|
||||||
|
if path.exists():
|
||||||
|
continue
|
||||||
|
path.mkdir(parents=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def proxy_nets(self) -> Iterator[str]:
|
||||||
|
for net in self.proxys:
|
||||||
|
for re in self.replace_args:
|
||||||
|
yield re.org_name.replace(net)
|
||||||
|
|
||||||
|
def write_all(self) -> None:
|
||||||
|
self.mk_bind_vols()
|
||||||
|
for arg in self.replace_args:
|
||||||
|
arg.write_yaml(self.as_yaml)
|
||||||
|
|
||||||
|
def write(self, args: ReplaceArgs) -> None:
|
||||||
|
args.write_yaml(self.as_yaml)
|
||||||
73
src/docker_compose/compose/replace_args.py
Normal file
73
src/docker_compose/compose/replace_args.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import reduce
|
||||||
|
from shutil import copyfile
|
||||||
|
from typing import Self, final
|
||||||
|
|
||||||
|
from docker_compose.cfg import DATA_ROOT
|
||||||
|
from docker_compose.cfg.cfg_paths import CfgData
|
||||||
|
from docker_compose.cfg.org_data import OrgData
|
||||||
|
from docker_compose.compose.dest_path import DestPaths
|
||||||
|
from docker_compose.compose.val_obj import (
|
||||||
|
DataDir,
|
||||||
|
NameVal,
|
||||||
|
OrgVal,
|
||||||
|
Record,
|
||||||
|
Url,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class ReplaceArgs:
|
||||||
|
cfg: CfgData
|
||||||
|
org: Record[OrgVal]
|
||||||
|
name: Record[NameVal]
|
||||||
|
org_name: Record[NameVal]
|
||||||
|
data: Record[DataDir]
|
||||||
|
url: Record[Url]
|
||||||
|
dest_paths: DestPaths
|
||||||
|
|
||||||
|
# noinspection PyMissingTypeHints
|
||||||
|
def __iter__(self):
|
||||||
|
yield self.org
|
||||||
|
yield self.name
|
||||||
|
yield self.org_name
|
||||||
|
yield self.data
|
||||||
|
yield self.url
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_cfg_data(cls, cfg_data: CfgData, org_data: OrgData) -> Self:
|
||||||
|
_org = OrgVal(org_data.org)
|
||||||
|
_name = NameVal(cfg_data.name)
|
||||||
|
org_name = NameVal(f"{_org.str}_{_name.str}") if _org.is_valid() else _name
|
||||||
|
data_dir = DATA_ROOT.joinpath(_org.str, _name.str)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
cfg_data,
|
||||||
|
Record("org", _org),
|
||||||
|
Record("name", _name),
|
||||||
|
Record("org_name", org_name),
|
||||||
|
Record("data", DataDir(data_dir)),
|
||||||
|
Record("url", Url(org_data.url)),
|
||||||
|
DestPaths.from_path(data_dir),
|
||||||
|
)
|
||||||
|
|
||||||
|
def mk_compose_env(self) -> None:
|
||||||
|
src = self.cfg.src_paths.env_file
|
||||||
|
dest = self.dest_paths.env_file
|
||||||
|
if src.exists() and not dest.exists():
|
||||||
|
_ = copyfile(src, dest)
|
||||||
|
|
||||||
|
def render_yaml(self, yaml: str) -> str:
|
||||||
|
return reduce(lambda s, f: f.replace(s), self, yaml)
|
||||||
|
|
||||||
|
def write_yaml(self, yaml: str) -> None:
|
||||||
|
self.dest_paths.mk_compose_dir()
|
||||||
|
with self.dest_paths.compose_file.open("wt") as f:
|
||||||
|
_ = f.write(self.render_yaml(yaml))
|
||||||
|
|
||||||
|
# def mk_vol_dir(self, path: str):
|
||||||
|
# p = Path(self.render_yaml(path))
|
||||||
|
# if p.exists():
|
||||||
|
# return
|
||||||
|
# p.mkdir(parents=True)
|
||||||
80
src/docker_compose/compose/service.py
Normal file
80
src/docker_compose/compose/service.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
from abc import ABCMeta
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Self, final
|
||||||
|
|
||||||
|
from docker_compose.compose.service_yaml_read import (
|
||||||
|
ServiceYamlRead,
|
||||||
|
)
|
||||||
|
from docker_compose.compose.service_yaml_write import (
|
||||||
|
ServiceYamlWrite,
|
||||||
|
ServiceYamlWriteData,
|
||||||
|
)
|
||||||
|
from docker_compose.compose.val_obj import Record
|
||||||
|
from docker_compose.Ts import T_Primitive
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class Service(metaclass=ABCMeta):
|
||||||
|
command: tuple[str, ...] | None
|
||||||
|
container_name: str
|
||||||
|
entrypoint: tuple[str, ...] | None
|
||||||
|
environment: dict[str, T_Primitive] | None
|
||||||
|
image: str
|
||||||
|
labels: frozenset[str] | None
|
||||||
|
logging: dict[str, str] | None
|
||||||
|
networks: frozenset[str] | None
|
||||||
|
restart: str
|
||||||
|
security_opt: frozenset[str]
|
||||||
|
user: str | None
|
||||||
|
volumes: frozenset[str] | None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_path(cls, path: Path) -> Self:
|
||||||
|
return cls.from_dict(ServiceYamlRead.from_path(path))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: ServiceYamlRead):
|
||||||
|
command = data.data.get("command")
|
||||||
|
entrypoint = data.data.get("entrypoint")
|
||||||
|
volumes = data.data.get("volumes")
|
||||||
|
nets = data.data.get("networks")
|
||||||
|
return cls(
|
||||||
|
None if not command else tuple(command),
|
||||||
|
Record.get_replace_name("org_name"),
|
||||||
|
tuple(entrypoint) if entrypoint else None,
|
||||||
|
data.data.get("environment"),
|
||||||
|
data.data["image"],
|
||||||
|
data.labels,
|
||||||
|
data.data.get("logging"),
|
||||||
|
frozenset(nets) if nets else None,
|
||||||
|
"unless-stopped",
|
||||||
|
data.sec_opts,
|
||||||
|
data.data.get("user"),
|
||||||
|
frozenset(volumes) if volumes else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def as_dict(self) -> ServiceYamlWrite:
|
||||||
|
data = ServiceYamlWriteData(
|
||||||
|
container_name=self.container_name,
|
||||||
|
image=self.image,
|
||||||
|
restart=self.restart,
|
||||||
|
security_opt=sorted(self.security_opt),
|
||||||
|
)
|
||||||
|
if self.command is not None:
|
||||||
|
data["command"] = list(self.command)
|
||||||
|
if self.entrypoint is not None:
|
||||||
|
data["entrypoint"] = list(self.entrypoint)
|
||||||
|
if self.environment is not None:
|
||||||
|
data["environment"] = self.environment
|
||||||
|
if self.labels is not None:
|
||||||
|
data["labels"] = sorted(self.labels)
|
||||||
|
if self.logging is not None:
|
||||||
|
data["logging"] = self.logging
|
||||||
|
if self.user is not None:
|
||||||
|
data["user"] = self.user
|
||||||
|
if self.volumes is not None:
|
||||||
|
data["volumes"] = sorted(self.volumes)
|
||||||
|
return ServiceYamlWrite(data)
|
||||||
60
src/docker_compose/compose/service_yaml_read.py
Normal file
60
src/docker_compose/compose/service_yaml_read.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Literal, NotRequired, TypedDict
|
||||||
|
|
||||||
|
from docker_compose.compose.val_obj import Record
|
||||||
|
from docker_compose.Ts import T_Primitive
|
||||||
|
from docker_compose.yaml import YamlWrapper
|
||||||
|
|
||||||
|
type T_NetAbc = str | Literal["proxy", "internal"]
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceYamlReadData(TypedDict):
|
||||||
|
command: NotRequired[list[str]]
|
||||||
|
entrypoint: NotRequired[list[str]]
|
||||||
|
environment: NotRequired[dict[str, T_Primitive]]
|
||||||
|
image: str
|
||||||
|
labels: NotRequired[list[str]]
|
||||||
|
logging: NotRequired[dict[str, str]]
|
||||||
|
networks: NotRequired[list[str]]
|
||||||
|
security_opt: NotRequired[list[str]]
|
||||||
|
user: NotRequired[str]
|
||||||
|
volumes: NotRequired[list[str]]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class ServiceYamlRead(YamlWrapper[ServiceYamlReadData]):
|
||||||
|
@property
|
||||||
|
def sec_opts(self) -> frozenset[str]:
|
||||||
|
sec_opts = frozenset(
|
||||||
|
"no-new-privileges:true",
|
||||||
|
)
|
||||||
|
sec = self.data.get("security_opt")
|
||||||
|
if not sec:
|
||||||
|
return sec_opts
|
||||||
|
return sec_opts.union(sec)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def labels(self) -> frozenset[str] | None:
|
||||||
|
org_name = Record.get_replace_name("org_name")
|
||||||
|
url = Record.get_replace_name("url")
|
||||||
|
traefik_labels = frozenset(
|
||||||
|
(
|
||||||
|
f"traefik.http.routers.{org_name}.rule=Host(`{url}`)",
|
||||||
|
f"traefik.http.routers.{org_name}.entrypoints=websecure",
|
||||||
|
f"traefik.docker.network={org_name}_proxy",
|
||||||
|
f"traefik.http.routers.{org_name}.tls.certresolver=le",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
labels = self.data.get("labels")
|
||||||
|
if not labels:
|
||||||
|
return
|
||||||
|
if "traefik.enable=true" not in labels:
|
||||||
|
return frozenset(labels)
|
||||||
|
return traefik_labels.union(labels)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def nets(self) -> frozenset[str] | None:
|
||||||
|
nets = self.data.get("networks")
|
||||||
|
if nets is None:
|
||||||
|
return
|
||||||
|
return frozenset(nets)
|
||||||
15
src/docker_compose/compose/service_yaml_write.py
Normal file
15
src/docker_compose/compose/service_yaml_write.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import NotRequired
|
||||||
|
|
||||||
|
from docker_compose.compose.service_yaml_read import ServiceYamlReadData
|
||||||
|
from docker_compose.yaml import YamlWrapper
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceYamlWriteData(ServiceYamlReadData):
|
||||||
|
container_name: NotRequired[str]
|
||||||
|
restart: NotRequired[str]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class ServiceYamlWrite(YamlWrapper[ServiceYamlWriteData]):
|
||||||
|
pass
|
||||||
81
src/docker_compose/compose/val_obj.py
Normal file
81
src/docker_compose/compose/val_obj.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import final, override
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class RecordVal(metaclass=ABCMeta):
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def str(self) -> str:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class Record[T: RecordVal]:
|
||||||
|
name: str
|
||||||
|
val: T
|
||||||
|
|
||||||
|
def replace(self, string: str) -> str:
|
||||||
|
return string.replace(self.replace_name, self.val.str)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def replace_name(self) -> str:
|
||||||
|
return self.get_replace_name(self.name)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_replace_name(string: str) -> str:
|
||||||
|
return f"${{_{string.upper()}}}"
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class OrgVal(RecordVal):
|
||||||
|
val: str | None
|
||||||
|
|
||||||
|
@property
|
||||||
|
@override
|
||||||
|
def str(self) -> str:
|
||||||
|
if self.val is None:
|
||||||
|
return "personal"
|
||||||
|
return self.val
|
||||||
|
|
||||||
|
def is_valid(self) -> bool:
|
||||||
|
return self.val is not None
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class NameVal(RecordVal):
|
||||||
|
val: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
@override
|
||||||
|
def str(self) -> str:
|
||||||
|
return self.val
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class DataDir(RecordVal):
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
@property
|
||||||
|
@override
|
||||||
|
def str(self) -> str:
|
||||||
|
return str(self.path)
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class Url(RecordVal):
|
||||||
|
sub_url: str | None
|
||||||
|
|
||||||
|
@property
|
||||||
|
@override
|
||||||
|
def str(self) -> str:
|
||||||
|
if self.sub_url is None:
|
||||||
|
return ""
|
||||||
|
return ".".join([self.sub_url, "ccamper7", "net"])
|
||||||
25
src/docker_compose/compose/volumes_yaml.py
Normal file
25
src/docker_compose/compose/volumes_yaml.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import final
|
||||||
|
|
||||||
|
from docker_compose.Ts import T_YamlDict
|
||||||
|
from docker_compose.yaml import YamlWrapper
|
||||||
|
|
||||||
|
type VolYamlData = dict[str, T_YamlDict]
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class VolYaml(YamlWrapper[VolYamlData]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# def vols_from_path(path: Path) -> VolYamlData:
|
||||||
|
# return cast(VolYamlData, read_yml(path))
|
||||||
|
|
||||||
|
|
||||||
|
# def vols_yaml_factory(self) -> Iterator[tuple[str, VolDataYaml]]:
|
||||||
|
# vols = self.volumes
|
||||||
|
# if vols is None:
|
||||||
|
# return
|
||||||
|
# for path in vols:
|
||||||
|
# yield path.stem, cast(VolDataYaml, read_yml(path))
|
||||||
86
src/docker_compose/util.py
Normal file
86
src/docker_compose/util.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
from collections.abc import Mapping
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from docker_compose.Ts import T_PrimDict, T_Primitive, T_PrimVal
|
||||||
|
|
||||||
|
|
||||||
|
def merge_dicts[T: Mapping[Any, Any]](dict1: T, dict2: T) -> T:
|
||||||
|
def _merge_dicts(_dict1: T_PrimDict, _dict2: T_PrimDict):
|
||||||
|
s1 = frozenset(_dict1.keys())
|
||||||
|
s2 = frozenset(_dict2.keys())
|
||||||
|
for k in s1.difference(s2):
|
||||||
|
yield k, _dict1[k]
|
||||||
|
for k in s2.difference(s1):
|
||||||
|
yield k, _dict2[k]
|
||||||
|
for k in s1.intersection(s2):
|
||||||
|
v1 = _dict1[k]
|
||||||
|
v2 = _dict2[k]
|
||||||
|
if isinstance(v1, dict) and isinstance(v2, dict):
|
||||||
|
yield k, dict[T_Primitive, T_PrimVal](_merge_dicts(v1, v2))
|
||||||
|
continue
|
||||||
|
if isinstance(v1, list) and isinstance(v2, list):
|
||||||
|
yield k, list(frozenset(v1).union(v2))
|
||||||
|
continue
|
||||||
|
raise Exception("merge error")
|
||||||
|
|
||||||
|
return cast(T, dict(_merge_dicts(dict1, dict2)))
|
||||||
|
|
||||||
|
|
||||||
|
# class T_TypedDict(Protocol):
|
||||||
|
# __required_keys__: ClassVar[frozenset[str]]
|
||||||
|
|
||||||
|
# def keys(self) -> KeysView[str]: ...
|
||||||
|
|
||||||
|
|
||||||
|
# def read_yml(path: Path):
|
||||||
|
# with path.open("rt") as f:
|
||||||
|
# return yaml.safe_load(f)
|
||||||
|
|
||||||
|
|
||||||
|
# def to_yaml(data: T_YamlDict) -> str:
|
||||||
|
# _yaml = yaml.dump(data, Dumper=VerboseSafeDumper)
|
||||||
|
# return re.sub(r"(^\s*-)", r" \g<1>", _yaml, flags=re.MULTILINE)
|
||||||
|
|
||||||
|
|
||||||
|
# def get_replace_name(name: str) -> str:
|
||||||
|
# return f"${{_{name.upper()}}}"
|
||||||
|
|
||||||
|
|
||||||
|
# def validate_typed_dict(
|
||||||
|
# # typed_dict: type[T_TypedDict],
|
||||||
|
# data: T_TypedDict,
|
||||||
|
# path: Path | None = None,
|
||||||
|
# pre: tuple[str, ...] | None = None,
|
||||||
|
# ) -> None:
|
||||||
|
# req = type(data).__required_keys__.difference(data.keys())
|
||||||
|
# if not req:
|
||||||
|
# return
|
||||||
|
# if pre is None:
|
||||||
|
# keys = (f'"{key}"' for key in req)
|
||||||
|
# else:
|
||||||
|
# key_pre = ".".join(pre)
|
||||||
|
# keys = (f'"{key_pre}.{key}"' for key in req)
|
||||||
|
# msg = f"key(s) ({', '.join(keys)}) not found"
|
||||||
|
# if path is not None:
|
||||||
|
# msg = f"{msg} in file {path!s}"
|
||||||
|
# print(msg)
|
||||||
|
# raise KeyError
|
||||||
|
|
||||||
|
|
||||||
|
# def to_typed_dict[T:T_TypedDict](typed_dict:type[T] ,data: Mapping[str, Any]) -> T:
|
||||||
|
# missing = typed_dict.__required_keys__.difference(data)
|
||||||
|
# if missing:
|
||||||
|
# msg = f"key(s) ({', '.join(map("{}".format, missing))}) not found"
|
||||||
|
# raise KeyError(msg)
|
||||||
|
# _dict = typed_dict()
|
||||||
|
# for key in typed_dict.__required_keys__:
|
||||||
|
# val = data[key]
|
||||||
|
# if not isinstance(val, typed_dict.__annotations__[key]):
|
||||||
|
# msg = f'invalid type for {type(data).__name__}[{key}]\nexpected {typed_dict.__annotations__[key]} got {type(val).__name__}'
|
||||||
|
# raise TypeError()
|
||||||
|
# _dict[key] = val
|
||||||
|
# for key, key_type in BackupData.__annotations__.items():
|
||||||
|
# if key not in data:
|
||||||
|
# raise ValueError(f"Key: {key} is not available in data.")
|
||||||
|
# result[key] = key_type(data[key])
|
||||||
|
# return result
|
||||||
42
src/docker_compose/yaml.py
Normal file
42
src/docker_compose/yaml.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import re
|
||||||
|
from collections.abc import ItemsView, Iterator, KeysView
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import ClassVar, Protocol, cast, override
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
class ProtoMapping[K, V: object](Protocol):
|
||||||
|
def __getitem__(self, key: K, /) -> V: ...
|
||||||
|
def __iter__(self) -> Iterator[K]: ...
|
||||||
|
def __len__(self) -> int: ...
|
||||||
|
def __contains__(self, key: object, /) -> bool: ...
|
||||||
|
def keys(self) -> KeysView[K]: ...
|
||||||
|
def items(self) -> ItemsView[K, V]: ...
|
||||||
|
|
||||||
|
|
||||||
|
class TTypedyamldict(ProtoMapping[str, object], Protocol):
|
||||||
|
__required_keys__: ClassVar[frozenset[str]]
|
||||||
|
__optional_keys__: ClassVar[frozenset[str]]
|
||||||
|
|
||||||
|
|
||||||
|
class VerboseSafeDumper(yaml.SafeDumper):
|
||||||
|
@override
|
||||||
|
def ignore_aliases(self, data: object) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class YamlWrapper[T: ProtoMapping[str, object]]:
|
||||||
|
data: T
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_path(cls, path: Path):
|
||||||
|
with path.open("rt") as f:
|
||||||
|
return cls(cast(T, yaml.safe_load(f)))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def as_yaml(self) -> str:
|
||||||
|
_yaml = yaml.dump(self.data, Dumper=VerboseSafeDumper)
|
||||||
|
return re.sub(r"(^\s*-)", r" \g<1>", _yaml, flags=re.MULTILINE)
|
||||||
Reference in New Issue
Block a user