diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..e28a74d
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,5 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
diff --git a/.idea/compose_gen_uv.iml b/.idea/compose_gen_uv.iml
new file mode 100644
index 0000000..5ecfebe
--- /dev/null
+++ b/.idea/compose_gen_uv.iml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/dictionaries/project.xml b/.idea/dictionaries/project.xml
new file mode 100644
index 0000000..8dfa517
--- /dev/null
+++ b/.idea/dictionaries/project.xml
@@ -0,0 +1,7 @@
+
+
+
+ traefik
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 0000000..d3af53a
--- /dev/null
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..449e696
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..a7baea3
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..479437f
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/ruff.xml b/.idea/ruff.xml
new file mode 100644
index 0000000..0dad89a
--- /dev/null
+++ b/.idea/ruff.xml
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..0faa797
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/compose/__init__.py b/src/compose/__init__.py
deleted file mode 100644
index 1329af5..0000000
--- a/src/compose/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from collections.abc import Iterable, Iterator
-
-from compose.cfg import CFG_ROOT, TRAEFIK_PATH
-from compose.cfg.factory import cfg_data_factory
-from compose.compose.factory import compose_factory
-from compose.rendered.entity import Rendered
-from compose.rendered.factory import rendered_factory
-from compose.rendered.util import write
-from compose.src_path.entity import src_paths_factory
-from compose.template.factory import template_factory
-
-
-def load_all() -> Iterable[Rendered]:
- for dir in CFG_ROOT.iterdir():
- paths = src_paths_factory(dir)
- cfg = cfg_data_factory(paths)
- parsed = compose_factory(cfg)
- for template in template_factory(parsed):
- yield rendered_factory(template)
-
-
-def render_all() -> Iterator[Rendered]:
- for rendered in load_all():
- write(rendered)
- yield rendered
-
-
-if __name__ == "__main__":
- renders = render_all()
- src_paths = src_paths_factory(TRAEFIK_PATH)
- cfg_data = cfg_data_factory(src_paths)
- traefik = compose_factory(cfg_data)
- for template in template_factory(traefik):
- rendered = rendered_factory(template)
- write(rendered)
diff --git a/src/compose/cfg/__init__.py b/src/compose/cfg/__init__.py
deleted file mode 100644
index f91132f..0000000
--- a/src/compose/cfg/__init__.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from collections.abc import Mapping
-from pathlib import Path
-
-type nested_list = list[str | nested_list]
-type T_Primitive = bool | int | str
-type T_PrimVal = T_Primitive | list[T_Primitive] | T_PrimDict
-type T_PrimDict = Mapping[T_Primitive, T_PrimVal]
-type T_YamlVals = T_Primitive | list[T_Primitive | T_YamlDict] | T_YamlDict
-type T_YamlDict = Mapping[str, T_YamlVals]
-
-CFG_ROOT = Path("/data/cfg")
-DATA_ROOT = Path("/data")
-TRAEFIK_PATH = Path("/data/traefik")
-
-# TCo_YamlVals = TypeVar(
-# "TCo_YamlVals",
-# bound=T_Primitive | list[T_Primitive | T_YamlDict] | T_YamlDict,
-# covariant=True,
-# )
-# type TCo_YamlDict = dict[str, TCo_YamlVals]
-
-# TCo_YamlDict = TypeVar("TCo_YamlDict", bound=dict[str, T_YamlVals], covariant=True)
-
-
-# class HasServices(TypedDict):
-# services: dict[str, ComposeService]
diff --git a/src/compose/util.py b/src/compose/util.py
deleted file mode 100644
index 0bd9c85..0000000
--- a/src/compose/util.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import re
-from collections.abc import KeysView, Mapping
-from pathlib import Path
-from typing import Any, ClassVar, Protocol, cast, override
-
-import yaml
-
-from compose.cfg import T_PrimDict, T_Primitive, T_PrimVal, T_YamlDict
-
-
-class VerboseSafeDumper(yaml.SafeDumper):
- @override
- def ignore_aliases(self, data: Any) -> bool: # pyright: ignore[reportExplicitAny, reportAny]
- return True
-
-
-def merge_dicts[T: Mapping[Any, Any]](dict1: T, dict2: T) -> T:
- def _merge_dicts(dict1: T_PrimDict, dict2: T_PrimDict):
- s1 = frozenset(dict1.keys())
- s2 = frozenset(dict2.keys())
- for k in s1.difference(s2):
- yield k, dict1[k]
- for k in s2.difference(s1):
- yield k, dict2[k]
- for k in s1.intersection(s2):
- v1 = dict1[k]
- v2 = dict2[k]
- if isinstance(v1, dict) and isinstance(v2, dict):
- yield k, dict[T_Primitive, T_PrimVal](_merge_dicts(v1, v2))
- continue
- if isinstance(v1, list) and isinstance(v2, list):
- yield k, list(frozenset(v1).union(v2))
- continue
- raise Exception("merge error")
-
- return cast(T, dict(_merge_dicts(dict1, dict2)))
-
-
-def read_yml(path: Path) -> T_YamlDict:
- with path.open("rt") as f:
- return cast(T_YamlDict, yaml.safe_load(f))
-
-
-def to_yaml(data: T_YamlDict) -> str:
- _yaml = yaml.dump(data, Dumper=VerboseSafeDumper)
- return re.sub(r"(^\s*-)", r" \g<1>", _yaml, flags=re.MULTILINE)
-
-
-def get_replace_name(name: str) -> str:
- return f"${{_{name.upper()}}}"
-
-
-class T_TypedDict(Protocol):
- __required_keys__: ClassVar[frozenset[str]]
-
- def keys(self) -> KeysView[str]: ...
-
-
-def validate_typed_dict(
- typed_dict: type[T_TypedDict],
- data: T_TypedDict,
- path: Path | None = None,
- pre: tuple[str, ...] | None = None,
-) -> None:
- req = typed_dict.__required_keys__.difference(data.keys())
- if not req:
- return
- if pre is None:
- keys = (f'"{key}"' for key in req)
- else:
- key_pre = ".".join(pre)
- keys = (f'"{key_pre}.{key}"' for key in req)
- msg = f"key(s) ({', '.join(keys)}) not found"
- if path is not None:
- msg = f"{msg} in file {path!s}"
- print(msg)
- raise KeyError
diff --git a/src/docker_compose/__init__.py b/src/docker_compose/__init__.py
new file mode 100644
index 0000000..1429728
--- /dev/null
+++ b/src/docker_compose/__init__.py
@@ -0,0 +1,21 @@
+from collections.abc import Iterable, Iterator
+
+from compose.cfg import CFG_ROOT, TRAEFIK_PATH
+from compose.compose.render import Rendered
+
+
+def load_all() -> Iterable[Rendered]:
+ for _dir in CFG_ROOT.iterdir():
+ yield Rendered.from_path(_dir)
+
+
+def render_all() -> Iterator[str]:
+ for rendered in load_all():
+ rendered.write_all()
+ yield from rendered.proxy_nets
+
+
+if __name__ == "__main__":
+ # renders = render_all()
+ nets = frozenset(render_all())
+ traefik = Rendered.from_path(TRAEFIK_PATH)
diff --git a/src/docker_compose/cfg/__init__.py b/src/docker_compose/cfg/__init__.py
new file mode 100644
index 0000000..e427e1c
--- /dev/null
+++ b/src/docker_compose/cfg/__init__.py
@@ -0,0 +1,5 @@
+from pathlib import Path
+
+CFG_ROOT = Path("/data/cfg")
+DATA_ROOT = Path("/data")
+TRAEFIK_PATH = Path("/data/traefik")
diff --git a/src/docker_compose/util.py b/src/docker_compose/util.py
new file mode 100644
index 0000000..71b0154
--- /dev/null
+++ b/src/docker_compose/util.py
@@ -0,0 +1,86 @@
+from collections.abc import Mapping
+from typing import Any, cast
+
+from compose.Ts import T_PrimDict, T_Primitive, T_PrimVal
+
+
+def merge_dicts[T: Mapping[Any, Any]](dict1: T, dict2: T) -> T:
+ def _merge_dicts(_dict1: T_PrimDict, _dict2: T_PrimDict):
+ s1 = frozenset(_dict1.keys())
+ s2 = frozenset(_dict2.keys())
+ for k in s1.difference(s2):
+ yield k, _dict1[k]
+ for k in s2.difference(s1):
+ yield k, _dict2[k]
+ for k in s1.intersection(s2):
+ v1 = _dict1[k]
+ v2 = _dict2[k]
+ if isinstance(v1, dict) and isinstance(v2, dict):
+ yield k, dict[T_Primitive, T_PrimVal](_merge_dicts(v1, v2))
+ continue
+ if isinstance(v1, list) and isinstance(v2, list):
+ yield k, list(frozenset(v1).union(v2))
+ continue
+ raise Exception("merge error")
+
+ return cast(T, dict(_merge_dicts(dict1, dict2)))
+
+
+# class T_TypedDict(Protocol):
+# __required_keys__: ClassVar[frozenset[str]]
+
+# def keys(self) -> KeysView[str]: ...
+
+
+# def read_yml(path: Path):
+# with path.open("rt") as f:
+# return yaml.safe_load(f)
+
+
+# def to_yaml(data: T_YamlDict) -> str:
+# _yaml = yaml.dump(data, Dumper=VerboseSafeDumper)
+# return re.sub(r"(^\s*-)", r" \g<1>", _yaml, flags=re.MULTILINE)
+
+
+# def get_replace_name(name: str) -> str:
+# return f"${{_{name.upper()}}}"
+
+
+# def validate_typed_dict(
+# # typed_dict: type[T_TypedDict],
+# data: T_TypedDict,
+# path: Path | None = None,
+# pre: tuple[str, ...] | None = None,
+# ) -> None:
+# req = type(data).__required_keys__.difference(data.keys())
+# if not req:
+# return
+# if pre is None:
+# keys = (f'"{key}"' for key in req)
+# else:
+# key_pre = ".".join(pre)
+# keys = (f'"{key_pre}.{key}"' for key in req)
+# msg = f"key(s) ({', '.join(keys)}) not found"
+# if path is not None:
+# msg = f"{msg} in file {path!s}"
+# print(msg)
+# raise KeyError
+
+
+# def to_typed_dict[T:T_TypedDict](typed_dict:type[T] ,data: Mapping[str, Any]) -> T:
+# missing = typed_dict.__required_keys__.difference(data)
+# if missing:
+# msg = f"key(s) ({', '.join(map("{}".format, missing))}) not found"
+# raise KeyError(msg)
+# _dict = typed_dict()
+# for key in typed_dict.__required_keys__:
+# val = data[key]
+# if not isinstance(val, typed_dict.__annotations__[key]):
+# msg = f'invalid type for {type(data).__name__}[{key}]\nexpected {typed_dict.__annotations__[key]} got {type(val).__name__}'
+# raise TypeError()
+# _dict[key] = val
+# for key, key_type in BackupData.__annotations__.items():
+# if key not in data:
+# raise ValueError(f"Key: {key} is not available in data.")
+# result[key] = key_type(data[key])
+# return result