Toml support.

This commit is contained in:
Sleepy Monax 2023-11-15 15:45:43 +01:00
parent 83f56dbf8d
commit 5b703d779a
8 changed files with 147 additions and 126 deletions

View file

@ -22,6 +22,7 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
sudo apt install -y ruff
python -m pip install --upgrade pip python -m pip install --upgrade pip
python -m pip install -r requirements.txt python -m pip install -r requirements.txt
python -m pip install mypy pytest python -m pip install mypy pytest
@ -33,3 +34,7 @@ jobs:
- name: Run PyTest - name: Run PyTest
run: | run: |
python -m pytest python -m pytest
- name: Run Ruff
run: |
ruff check cutekit

View file

@ -36,15 +36,15 @@ def ensure(version: tuple[int, int, int]):
def setupLogger(verbose: bool): def setupLogger(verbose: bool):
if verbose: if verbose:
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=logging.DEBUG,
format=f"{vt100.CYAN}%(asctime)s{vt100.RESET} {vt100.YELLOW}%(levelname)s{vt100.RESET} %(name)s: %(message)s", format=f"{vt100.CYAN}%(asctime)s{vt100.RESET} {vt100.YELLOW}%(levelname)s{vt100.RESET} %(name)s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S", datefmt="%Y-%m-%d %H:%M:%S",
) )
else: else:
projectRoot = model.Project.root() projectRoot = model.Project.topmost()
logFile = const.GLOBAL_LOG_FILE logFile = const.GLOBAL_LOG_FILE
if projectRoot is not None: if projectRoot is not None:
logFile = os.path.join(projectRoot, const.PROJECT_LOG_FILE) logFile = os.path.join(projectRoot.dirname(), const.PROJECT_LOG_FILE)
# create the directory if it doesn't exist # create the directory if it doesn't exist
logDir = os.path.dirname(logFile) logDir = os.path.dirname(logFile)

View file

@ -1,7 +1,8 @@
import os import os
import logging import logging
import dataclasses as dt
from pathlib import Path from pathlib import Path
from dataclasses import dataclass
from typing import TextIO, Union from typing import TextIO, Union
from . import shell, rules, model, ninja, const, cli from . import shell, rules, model, ninja, const, cli
@ -187,7 +188,7 @@ def gen(out: TextIO, target: model.Target, registry: model.Registry):
all(w, registry, target) all(w, registry, target)
@dataclass @dt.dataclass
class Product: class Product:
path: Path path: Path
target: model.Target target: model.Target

View file

@ -1,10 +1,10 @@
import inspect import inspect
import logging import logging
import sys import sys
import dataclasses as dt
from pathlib import Path from pathlib import Path
from typing import Optional, Union, Callable from typing import Optional, Union, Callable
from dataclasses import dataclass
from . import const, vt100 from . import const, vt100
@ -72,7 +72,7 @@ def parse(args: list[str]) -> Args:
Callback = Callable[[Args], None] Callback = Callable[[Args], None]
@dataclass @dt.dataclass
class Command: class Command:
shortName: Optional[str] shortName: Optional[str]
longName: str longName: str

View file

@ -1,17 +1,20 @@
import os import os
import json import json
import re
import tomllib
from pathlib import Path from pathlib import Path
from typing import Any, cast, Callable, Final from typing import Any, Optional, cast, Callable, Final
from . import shell, compat from . import shell
Json = Any Json = Any
Builtin = Callable[..., Json] Builtin = Callable[..., Json]
BUILTINS: Final[dict[str, Builtin]] = { BUILTINS: Final[dict[str, Builtin]] = {
"uname": lambda arg, ctx: getattr(shell.uname(), arg).lower(), "uname": lambda arg, ctx: getattr(shell.uname(), arg).lower(),
"include": lambda arg, ctx: evalRead(arg), "include": lambda arg, ctx: evalRead(Path(arg)),
"evalRead": lambda arg, ctx: evalRead(arg), "evalRead": lambda arg, ctx: evalRead(Path(arg)),
"join": lambda lhs, rhs, ctx: cast( "join": lambda lhs, rhs, ctx: cast(
Json, {**lhs, **rhs} if isinstance(lhs, dict) else lhs + rhs Json, {**lhs, **rhs} if isinstance(lhs, dict) else lhs + rhs
), ),
@ -50,12 +53,26 @@ def eval(jexpr: Json, filePath: Path) -> Json:
return jexpr return jexpr
def extraSchema(toml: str) -> Optional[str]:
schemaRegex = re.compile(r"#:schema\s+(.*)")
schema = schemaRegex.search(toml)
return schema.group(1) if schema else None
def read(path: Path) -> Json: def read(path: Path) -> Json:
try: try:
with open(path, "r") as f: with open(path, "r") as f:
if path.suffix == ".toml":
tomlStr = f.read()
toml = tomllib.loads(tomlStr)
schema = extraSchema(tomlStr)
if schema:
toml["$schema"] = schema
return toml
else:
return json.load(f) return json.load(f)
except: except Exception as e:
raise RuntimeError(f"Failed to read {path}") raise RuntimeError(f"Failed to read {path}: {e}")
def evalRead(path: Path) -> Json: def evalRead(path: Path) -> Json:

View file

@ -1,13 +1,12 @@
import os import os
import logging import logging
import dataclasses as dt
from enum import Enum from enum import Enum
from typing import Any, Generator, Optional, Type, cast from typing import Any, Generator, Optional, Type, cast
from pathlib import Path from pathlib import Path
from dataclasses_json import DataClassJsonMixin from dataclasses_json import DataClassJsonMixin
import dataclasses
from dataclasses import dataclass, field
from cutekit import const, shell from cutekit import const, shell
@ -29,11 +28,13 @@ class Kind(Enum):
# --- Manifest --------------------------------------------------------------- # # --- Manifest --------------------------------------------------------------- #
@dataclass @dt.dataclass
class Manifest(DataClassJsonMixin): class Manifest(DataClassJsonMixin):
id: str id: str
type: Kind = field(default=Kind.UNKNOWN) type: Kind = dt.field(default=Kind.UNKNOWN)
path: str = field(default="") path: str = dt.field(default="")
SUFFIXES = [".json", ".toml"]
SUFFIXES_GLOBS = ["*.json", "*.toml"]
@staticmethod @staticmethod
def parse(path: Path, data: dict[str, Any]) -> "Manifest": def parse(path: Path, data: dict[str, Any]) -> "Manifest":
@ -47,12 +48,24 @@ class Manifest(DataClassJsonMixin):
obj.path = str(path) obj.path = str(path)
return obj return obj
@staticmethod
def tryLoad(path: Path) -> Optional["Manifest"]:
for suffix in Manifest.SUFFIXES:
pathWithSuffix = path.with_suffix(suffix)
if pathWithSuffix.exists():
_logger.debug(f"Loading manifest from '{pathWithSuffix}'")
return Manifest.parse(pathWithSuffix, jexpr.evalRead(pathWithSuffix))
return None
@staticmethod @staticmethod
def load(path: Path) -> "Manifest": def load(path: Path) -> "Manifest":
""" """
Load a manifest from a given path Load a manifest from a given path
""" """
return Manifest.parse(path, jexpr.evalRead(path)) manifest = Manifest.tryLoad(path)
if manifest is None:
raise RuntimeError(f"Could not find manifest at '{path}'")
return manifest
def dirname(self) -> str: def dirname(self) -> str:
""" """
@ -79,16 +92,16 @@ class Manifest(DataClassJsonMixin):
_project: Optional["Project"] = None _project: Optional["Project"] = None
@dataclass @dt.dataclass
class Extern(DataClassJsonMixin): class Extern(DataClassJsonMixin):
git: str git: str
tag: str tag: str
@dataclass @dt.dataclass
class Project(Manifest): class Project(Manifest):
description: str = field(default="(No description)") description: str = dt.field(default="(No description)")
extern: dict[str, Extern] = field(default_factory=dict) extern: dict[str, Extern] = dt.field(default_factory=dict)
@property @property
def externDirs(self) -> list[str]: def externDirs(self) -> list[str]:
@ -96,47 +109,37 @@ class Project(Manifest):
return list(res) return list(res)
@staticmethod @staticmethod
def root() -> Optional[str]: def topmost() -> Optional["Project"]:
"""
Find the root of the project by looking for a project.json
"""
cwd = Path.cwd() cwd = Path.cwd()
topmost: Optional["Project"] = None
while str(cwd) != cwd.root: while str(cwd) != cwd.root:
if (cwd / "project.json").is_file(): projectManifest = Manifest.tryLoad(cwd / "project")
return str(cwd) if projectManifest is not None:
topmost = projectManifest.ensureType(Project)
cwd = cwd.parent cwd = cwd.parent
return None return topmost
@staticmethod
def chdir() -> None:
"""
Change the current working directory to the root of the project
"""
path = Project.root()
if path is None:
raise RuntimeError(
"No project.json found in this directory or any parent directory"
)
os.chdir(path)
@staticmethod
def at(path: str) -> Optional["Project"]:
path = os.path.join(path, "project.json")
if not os.path.exists(path):
return None
return Manifest.load(Path(path)).ensureType(Project)
@staticmethod @staticmethod
def ensure() -> "Project": def ensure() -> "Project":
root = Project.root() project = Project.topmost()
if root is None: if project is None:
raise RuntimeError( raise RuntimeError(
"No project.json found in this directory or any parent directory" "No project found in this directory or any parent directory"
)
os.chdir(root)
return Manifest.load(Path(os.path.join(root, "project.json"))).ensureType(
Project
) )
return project
def chdir(self):
"""
Change the current working directory to the root of the project
"""
os.chdir(self.dirname())
@staticmethod
def at(path: Path) -> Optional["Project"]:
projectManifest = Manifest.tryLoad(path / "project")
if projectManifest is None:
return None
return projectManifest.ensureType(Project)
@staticmethod @staticmethod
def fetchs(extern: dict[str, Extern]): def fetchs(extern: dict[str, Extern]):
@ -159,7 +162,7 @@ class Project(Manifest):
ext.git, ext.git,
extPath, extPath,
) )
project = Project.at(extPath) project = Project.at(Path(extPath))
if project is not None: if project is not None:
Project.fetchs(project.extern) Project.fetchs(project.extern)
@ -188,6 +191,7 @@ def initCmd(args: cli.Args):
name = args.consumeArg() name = args.consumeArg()
_logger.info("Fetching registry...") _logger.info("Fetching registry...")
r = requests.get(f"https://raw.githubusercontent.com/{repo}/main/registry.json") r = requests.get(f"https://raw.githubusercontent.com/{repo}/main/registry.json")
if r.status_code != 200: if r.status_code != 200:
@ -235,11 +239,11 @@ def initCmd(args: cli.Args):
# --- Target ----------------------------------------------------------------- # # --- Target ----------------------------------------------------------------- #
@dataclass @dt.dataclass
class Tool(DataClassJsonMixin): class Tool(DataClassJsonMixin):
cmd: str = field(default="") cmd: str = dt.field(default="")
args: list[str] = field(default_factory=list) args: list[str] = dt.field(default_factory=list)
files: list[str] = field(default_factory=list) files: list[str] = dt.field(default_factory=list)
Tools = dict[str, Tool] Tools = dict[str, Tool]
@ -249,11 +253,11 @@ DEFAULT_TOOLS: Tools = {
} }
@dataclass @dt.dataclass
class Target(Manifest): class Target(Manifest):
props: Props = field(default_factory=dict) props: Props = dt.field(default_factory=dict)
tools: Tools = field(default_factory=dict) tools: Tools = dt.field(default_factory=dict)
routing: dict[str, str] = field(default_factory=dict) routing: dict[str, str] = dt.field(default_factory=dict)
@property @property
def hashid(self) -> str: def hashid(self) -> str:
@ -283,27 +287,27 @@ class Target(Manifest):
# --- Component -------------------------------------------------------------- # # --- Component -------------------------------------------------------------- #
@dataclass @dt.dataclass
class Resolved: class Resolved:
reason: Optional[str] = None reason: Optional[str] = None
resolved: list[str] = field(default_factory=list) resolved: list[str] = dt.field(default_factory=list)
@property @property
def enabled(self) -> bool: def enabled(self) -> bool:
return self.reason is None return self.reason is None
@dataclass @dt.dataclass
class Component(Manifest): class Component(Manifest):
decription: str = field(default="(No description)") decription: str = dt.field(default="(No description)")
props: Props = field(default_factory=dict) props: Props = dt.field(default_factory=dict)
tools: Tools = field(default_factory=dict) tools: Tools = dt.field(default_factory=dict)
enableIf: dict[str, list[Any]] = field(default_factory=dict) enableIf: dict[str, list[Any]] = dt.field(default_factory=dict)
requires: list[str] = field(default_factory=list) requires: list[str] = dt.field(default_factory=list)
provides: list[str] = field(default_factory=list) provides: list[str] = dt.field(default_factory=list)
subdirs: list[str] = field(default_factory=list) subdirs: list[str] = dt.field(default_factory=list)
injects: list[str] = field(default_factory=list) injects: list[str] = dt.field(default_factory=list)
resolved: dict[str, Resolved] = field(default_factory=dict) resolved: dict[str, Resolved] = dt.field(default_factory=dict)
def isEnabled(self, target: Target) -> tuple[bool, str]: def isEnabled(self, target: Target) -> tuple[bool, str]:
for k, v in self.enableIf.items(): for k, v in self.enableIf.items():
@ -334,12 +338,12 @@ KINDS: dict[Kind, Type[Manifest]] = {
# --- Dependency resolution -------------------------------------------------- # # --- Dependency resolution -------------------------------------------------- #
@dataclass @dt.dataclass
class Resolver: class Resolver:
_registry: "Registry" _registry: "Registry"
_target: Target _target: Target
_mappings: dict[str, list[Component]] = field(default_factory=dict) _mappings: dict[str, list[Component]] = dt.field(default_factory=dict)
_cache: dict[str, Resolved] = field(default_factory=dict) _cache: dict[str, Resolved] = dt.field(default_factory=dict)
_baked = False _baked = False
def _bake(self): def _bake(self):
@ -446,21 +450,25 @@ class Resolver:
_registry: Optional["Registry"] = None _registry: Optional["Registry"] = None
@dataclass @dt.dataclass
class Registry(DataClassJsonMixin): class Registry(DataClassJsonMixin):
project: Project project: Project
manifests: dict[str, Manifest] = field(default_factory=dict) manifests: dict[str, Manifest] = dt.field(default_factory=dict)
def _append(self, m: Manifest): def _append(self, m: Optional[Manifest]) -> Optional[Manifest]:
""" """
Append a manifest to the model Append a manifest to the model
""" """
if m is None:
return m
if m.id in self.manifests: if m.id in self.manifests:
raise RuntimeError( raise RuntimeError(
f"Duplicated manifest '{m.id}' at '{m.path}' already loaded from '{self.manifests[m.id].path}'" f"Duplicated manifest '{m.id}' at '{m.path}' already loaded from '{self.manifests[m.id].path}'"
) )
self.manifests[m.id] = m self.manifests[m.id] = m
return m
def iter(self, type: Type[utils.T]) -> Generator[utils.T, None, None]: def iter(self, type: Type[utils.T]) -> Generator[utils.T, None, None]:
""" """
@ -525,62 +533,55 @@ class Registry(DataClassJsonMixin):
@staticmethod @staticmethod
def load(project: Project, mixins: list[str], props: Props) -> "Registry": def load(project: Project, mixins: list[str], props: Props) -> "Registry":
registry = Registry(project) r = Registry(project)
registry._append(project) r._append(project)
# Lookup and load all extern projects # Lookup and load all extern projects
for externDir in project.externDirs: for externDir in project.externDirs:
projectPath = os.path.join(externDir, "project.json") extern = r._append(
manifestPath = os.path.join(externDir, "manifest.json") Manifest.tryLoad(Path(externDir) / "project")
or Manifest.tryLoad(Path(externDir) / "manifest")
)
if os.path.exists(projectPath): if extern is not None:
registry._append(Manifest.load(Path(projectPath)).ensureType(Project)) _logger.warn("Extern project does not have a project or manifest")
elif os.path.exists(manifestPath):
# For simple library allow to have a manifest.json instead of a project.json
registry._append(
Manifest.load(Path(manifestPath)).ensureType(Component)
)
else:
_logger.warn(
"Extern project does not have a project.json or manifest.json"
)
# Load all manifests from projects # Load all manifests from projects
for project in list(registry.iter(Project)): for project in list(r.iter(Project)):
targetDir = os.path.join(project.dirname(), const.TARGETS_DIR) targetDir = os.path.join(project.dirname(), const.TARGETS_DIR)
targetFiles = shell.find(targetDir, ["*.json"]) targetFiles = shell.find(targetDir, Manifest.SUFFIXES_GLOBS)
for targetFile in targetFiles: for targetFile in targetFiles:
registry._append(Manifest.load(Path(targetFile)).ensureType(Target)) r._append(Manifest.load(Path(targetFile)).ensureType(Target))
componentDir = os.path.join(project.dirname(), const.SRC_DIR) componentFiles = shell.find(
rootComponent = os.path.join(project.dirname(), "manifest.json") os.path.join(project.dirname(), const.SRC_DIR),
componentFiles = shell.find(componentDir, ["manifest.json"]) ["manifest" + s for s in Manifest.SUFFIXES],
if os.path.exists(rootComponent):
componentFiles += [rootComponent]
for componentFile in componentFiles:
registry._append(
Manifest.load(Path(componentFile)).ensureType(Component)
) )
rootComponent = Manifest.tryLoad(Path(project.dirname()) / "manifest")
if rootComponent is not None:
r._append(rootComponent)
for componentFile in componentFiles:
r._append(Manifest.load(Path(componentFile)).ensureType(Component))
# Resolve all dependencies for all targets # Resolve all dependencies for all targets
for target in registry.iter(Target): for target in r.iter(Target):
target.props |= props target.props |= props
resolver = Resolver(registry, target) resolver = Resolver(r, target)
# Apply injects # Apply injects
for c in registry.iter(Component): for c in r.iter(Component):
if c.isEnabled(target)[0]: if c.isEnabled(target)[0]:
for inject in c.injects: for inject in c.injects:
victim = registry.lookup(inject, Component) victim = r.lookup(inject, Component)
if not victim: if not victim:
raise RuntimeError(f"Cannot find component '{inject}'") raise RuntimeError(f"Cannot find component '{inject}'")
victim.requires += [c.id] victim.requires += [c.id]
# Resolve all components # Resolve all components
for c in registry.iter(Component): for c in r.iter(Component):
resolved = resolver.resolve(c.id) resolved = resolver.resolve(c.id)
if resolved.reason: if resolved.reason:
_logger.info(f"Component '{c.id}' disabled: {resolved.reason}") _logger.info(f"Component '{c.id}' disabled: {resolved.reason}")
@ -592,7 +593,7 @@ class Registry(DataClassJsonMixin):
# Merge in default tools # Merge in default tools
for k, v in DEFAULT_TOOLS.items(): for k, v in DEFAULT_TOOLS.items():
if k not in tools: if k not in tools:
tools[k] = dataclasses.replace(v) tools[k] = dt.replace(v)
from . import mixins as mxs from . import mixins as mxs
@ -601,12 +602,12 @@ class Registry(DataClassJsonMixin):
tools = mixin(target, tools) tools = mixin(target, tools)
# Apply tooling from components # Apply tooling from components
for c in registry.iter(Component): for c in r.iter(Component):
if c.resolved[target.id].enabled: if c.resolved[target.id].enabled:
for k, v in c.tools.items(): for k, v in c.tools.items():
tools[k].args += v.args tools[k].args += v.args
return registry return r
@cli.command("l", "list", "List all components and targets") @cli.command("l", "list", "List all components and targets")

View file

@ -23,19 +23,16 @@ def load(path: str):
def loadAll(): def loadAll():
_logger.info("Loading plugins...") _logger.info("Loading plugins...")
root = model.Project.root() project = model.Project.topmost()
if project is None:
if root is None:
_logger.info("Not in project, skipping plugin loading") _logger.info("Not in project, skipping plugin loading")
return return
project = model.Project.at(root)
paths = list( paths = list(
map(lambda e: os.path.join(const.EXTERN_DIR, e), project.extern.keys()) map(lambda e: os.path.join(const.EXTERN_DIR, e), project.extern.keys())
) + ["."] ) + ["."]
for dirname in paths: for dirname in paths:
pluginDir = os.path.join(root, dirname, const.META_DIR, "plugins") pluginDir = os.path.join(project.dirname(), dirname, const.META_DIR, "plugins")
for files in shell.readdir(pluginDir): for files in shell.readdir(pluginDir):
if files.endswith(".py"): if files.endswith(".py"):

View file

@ -6,9 +6,9 @@ import hashlib
T = TypeVar("T") T = TypeVar("T")
def uniq(l: list[T]) -> list[T]: def uniq(lst: list[T]) -> list[T]:
result: list[T] = [] result: list[T] = []
for i in l: for i in lst:
if i in result: if i in result:
result.remove(i) result.remove(i)
result.append(i) result.append(i)