Compare commits
19 commits
stable
...
rfc_pathli
Author | SHA1 | Date | |
---|---|---|---|
Jordan ⌨️ | e54f8f5964 | ||
Jordan ⌨️ | 8f59111ad7 | ||
Sleepy Monax | 01f0868db0 | ||
Sleepy Monax | f6f36ea79e | ||
Sleepy Monax | 3dbf269cdd | ||
Sleepy Monax | 68cae44750 | ||
Sleepy Monax | a472abb90f | ||
Sleepy Monax | 39ee66364d | ||
Sleepy Monax | 9dc1575a57 | ||
Sleepy Monax | 0a5ed0c444 | ||
Sleepy Monax | 2307a72564 | ||
Sleepy Monax | ce3728ccfb | ||
Sleepy Monax | c8b23bc6c1 | ||
Sleepy Monax | e77e787547 | ||
Jordan ⌨️ | 31ca0b19e8 | ||
Jordan ⌨️ | d842c6af2d | ||
Sleepy Monax | 920762f56d | ||
Sleepy Monax | 97f9f0ddba | ||
Sleepy Monax | 9572c6a3df |
16
.github/workflows/checks.yml
vendored
16
.github/workflows/checks.yml
vendored
|
@ -13,28 +13,22 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install -r requirements.txt
|
||||
python -m pip install mypy pytest ruff
|
||||
python -m pip install -r .github/workflows/requirements.txt
|
||||
|
||||
- name: Type Checking
|
||||
- name: Run MyPy
|
||||
run: |
|
||||
python -m mypy --install-types --non-interactive .
|
||||
|
||||
- name: Linting
|
||||
run: |
|
||||
ruff check cutekit
|
||||
|
||||
- name: Unit Testing
|
||||
- name: Run PyTest
|
||||
run: |
|
||||
python -m pytest
|
||||
|
||||
|
|
7
.github/workflows/publish.yml
vendored
7
.github/workflows/publish.yml
vendored
|
@ -20,14 +20,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
5
.github/workflows/requirements.txt
vendored
Normal file
5
.github/workflows/requirements.txt
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
requests ~= 2.28.0
|
||||
graphviz ~= 0.20.1
|
||||
dataclasses-json ~= 0.6.2
|
||||
mypy ~= 1.7.0
|
||||
pytest ~= 7.4.3
|
16
README.md
16
README.md
|
@ -6,7 +6,7 @@
|
|||
</p>
|
||||
<h1 align="center">CuteKit</h1>
|
||||
<p align="center">
|
||||
The *magical* build system and package manager
|
||||
The Cute build system and package manager
|
||||
</p>
|
||||
<br/>
|
||||
<br/>
|
||||
|
@ -20,17 +20,17 @@
|
|||
- [Example](#example)
|
||||
|
||||
|
||||
## Introduction
|
||||
## Introduction
|
||||
|
||||
**CuteKit** is a simple - yet - powerful build system and package manager for C and C++. It:
|
||||
**CuteKit** is a simple - yet - powerful build system and package manager for C and C++. It:
|
||||
|
||||
- ✨ It uses **JSON**: Cutekit uses JSON instead of introducing a whole new programming language for describing the project. And also has macros to help the user experience (see [Jexpr](doc/spec/jexpr.md)).
|
||||
- ✨ It's a **package manager**: Cutekit package manager is based on **Git**, nothing is centralized.
|
||||
- ✨ It's **extendible**: Cutekit can be [extended](./doc/extends.md) by writing custom Python plugins.
|
||||
- ✨ It's **extendible**: Cutekit can be [extended](./doc/extends.md) by writing custom Python plugins.
|
||||
- ✨ It's **easy**: the [**templates**](./doc/templates.md) help the user quick-start a project.
|
||||
- ✨ It's **portable**: Cutekit can run on MacOS Gnu/Linux and Windows.
|
||||
|
||||
## Installation
|
||||
## Installation
|
||||
|
||||
To install Cutekit, you may use your favourite package manager if it is available. Or you can install it manually by following the instructions below.
|
||||
|
||||
|
@ -45,13 +45,13 @@ $ cd cutekit
|
|||
$ pip install --user -e .
|
||||
```
|
||||
|
||||
## Quick-start
|
||||
## Quick-start
|
||||
|
||||
-> If you directly want to start using Cutekit for a new project, you can just run `$ ck I host` and it will create a new project in the host directory (you can rename it later).
|
||||
-> If you directly want to start using Cutekit for a new project, you can just run `$ ck I host` and it will create a new project in the host directory (you can rename it later).
|
||||
|
||||
-> If you want to use Cutekit for writing operating systems, you can create a new [limine](https://github.com/limine-bootloader/limine/)-based project by running `$ ck I limine-barebone`.
|
||||
|
||||
## Example
|
||||
## Example
|
||||
|
||||
If you want to see how it works you can read the [doc/cutekit.md](doc/cutekit.md) file.
|
||||
|
||||
|
|
|
@ -3,12 +3,19 @@ import os
|
|||
import logging
|
||||
|
||||
from . import (
|
||||
builder, # noqa: F401 this is imported for side effects
|
||||
builder,
|
||||
cli,
|
||||
compat,
|
||||
const,
|
||||
graph, # noqa: F401 this is imported for side effects
|
||||
graph,
|
||||
jexpr,
|
||||
mixins,
|
||||
model,
|
||||
ninja,
|
||||
plugins,
|
||||
rules,
|
||||
shell,
|
||||
utils,
|
||||
vt100,
|
||||
)
|
||||
|
||||
|
@ -29,20 +36,19 @@ def ensure(version: tuple[int, int, int]):
|
|||
def setupLogger(verbose: bool):
|
||||
if verbose:
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
level=logging.INFO,
|
||||
format=f"{vt100.CYAN}%(asctime)s{vt100.RESET} {vt100.YELLOW}%(levelname)s{vt100.RESET} %(name)s: %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
else:
|
||||
projectRoot = model.Project.topmost()
|
||||
projectRoot = model.Project.root()
|
||||
logFile = const.GLOBAL_LOG_FILE
|
||||
if projectRoot is not None:
|
||||
logFile = os.path.join(projectRoot.dirname(), const.PROJECT_LOG_FILE)
|
||||
logfile = projectRoot / const.PROJECT_LOG_FILE
|
||||
|
||||
# create the directory if it doesn't exist
|
||||
logDir = os.path.dirname(logFile)
|
||||
if not os.path.isdir(logDir):
|
||||
os.makedirs(logDir)
|
||||
if not logFile.parent.is_dir():
|
||||
logFile.parent.mkdir(parents=True)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
import logging
|
||||
import dataclasses as dt
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TextIO, Union
|
||||
from dataclasses import dataclass
|
||||
from itertools import chain
|
||||
from typing import Generator, TextIO, Union, cast
|
||||
|
||||
from . import shell, rules, model, ninja, const, cli
|
||||
from . import shell, rules, model, ninja, const, utils, cli
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -46,26 +46,28 @@ def buildpath(target: model.Target, component: model.Component, path) -> Path:
|
|||
# --- Compilation ------------------------------------------------------------ #
|
||||
|
||||
|
||||
def wilcard(component: model.Component, wildcards: list[str]) -> list[str]:
|
||||
def listSrc(component: model.Component) -> list[str]:
|
||||
wildcards = set(chain(*map(lambda rule: rule.fileIn, rules.rules.values())))
|
||||
dirs = [component.dirname()] + list(
|
||||
map(lambda d: os.path.join(component.dirname(), d), component.subdirs)
|
||||
map(lambda d: component.parent / d, component.subdirs)
|
||||
)
|
||||
return shell.find(dirs, list(wildcards), recusive=False)
|
||||
|
||||
|
||||
def compile(
|
||||
w: ninja.Writer,
|
||||
target: model.Target,
|
||||
component: model.Component,
|
||||
rule: str,
|
||||
srcs: list[str],
|
||||
def compileSrc(
|
||||
w: ninja.Writer, target: model.Target, component: model.Component
|
||||
) -> list[str]:
|
||||
res: list[str] = []
|
||||
for src in srcs:
|
||||
for src in listSrc(component):
|
||||
rel = Path(src).relative_to(component.dirname())
|
||||
dest = buildpath(target, component, "obj") / rel.with_suffix(".o")
|
||||
t = target.tools[rule]
|
||||
w.build(str(dest), rule, inputs=src, order_only=t.files)
|
||||
|
||||
r = rules.byFileIn(src)
|
||||
if r is None:
|
||||
raise RuntimeError(f"Unknown rule for file {src}")
|
||||
|
||||
dest = buildpath(target, component, "obj") / rel.with_suffix(r.fileOut[0][1:])
|
||||
t = target.tools[r.id]
|
||||
w.build(str(dest), r.id, inputs=src, order_only=t.files)
|
||||
res.append(str(dest))
|
||||
return res
|
||||
|
||||
|
@ -125,16 +127,7 @@ def link(
|
|||
) -> str:
|
||||
w.newline()
|
||||
out = outfile(target, component)
|
||||
|
||||
objs = []
|
||||
objs += compile(w, target, component, "cc", wilcard(component, ["*.c"]))
|
||||
objs += compile(
|
||||
w, target, component, "cxx", wilcard(component, ["*.cpp", "*.cc", "*.cxx"])
|
||||
)
|
||||
objs += compile(
|
||||
w, target, component, "as", wilcard(component, ["*.s", "*.asm", "*.S"])
|
||||
)
|
||||
|
||||
objs: list[str] = compileSrc(w, target, component)
|
||||
res = compileRes(w, target, component)
|
||||
libs = collectLibs(registry, target, component)
|
||||
if component.type == model.Kind.LIB:
|
||||
|
@ -171,6 +164,7 @@ def gen(out: TextIO, target: model.Target, registry: model.Registry):
|
|||
w.variable("cdefs", " ".join(aggregateCdefs(target)))
|
||||
w.newline()
|
||||
|
||||
w.rule("cp", "cp $in $out")
|
||||
for i in target.tools:
|
||||
tool = target.tools[i]
|
||||
rule = rules.rules[i]
|
||||
|
@ -188,7 +182,7 @@ def gen(out: TextIO, target: model.Target, registry: model.Registry):
|
|||
all(w, registry, target)
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Product:
|
||||
path: Path
|
||||
target: model.Target
|
||||
|
@ -201,12 +195,11 @@ def build(
|
|||
components: Union[list[model.Component], model.Component, None] = None,
|
||||
) -> list[Product]:
|
||||
all = False
|
||||
shell.mkdir(target.builddir)
|
||||
ninjaPath = os.path.join(target.builddir, "build.ninja")
|
||||
target.builddir.mkdir(parents=True, exist_ok=True)
|
||||
ninjaPath = target.builddir / "build.ninja"
|
||||
|
||||
if not os.path.exists(ninjaPath):
|
||||
with open(ninjaPath, "w") as f:
|
||||
gen(f, target, registry)
|
||||
with ninjaPath.open("w") as f:
|
||||
gen(f, target, registry)
|
||||
|
||||
if components is None:
|
||||
all = True
|
||||
|
@ -217,10 +210,6 @@ def build(
|
|||
|
||||
products: list[Product] = []
|
||||
for c in components:
|
||||
r = c.resolved[target.id]
|
||||
if not r.enabled:
|
||||
raise RuntimeError(f"Component {c.id} is disabled: {r.reason}")
|
||||
|
||||
products.append(
|
||||
Product(
|
||||
path=Path(outfile(target, c)),
|
||||
|
@ -230,9 +219,10 @@ def build(
|
|||
)
|
||||
|
||||
outs = list(map(lambda p: str(p.path), products))
|
||||
|
||||
shell.exec("ninja", "-f", ninjaPath, *(outs if not all else []))
|
||||
|
||||
if all:
|
||||
shell.exec("ninja", "-v", "-f", ninjaPath)
|
||||
else:
|
||||
shell.exec("ninja", "-v", "-f", ninjaPath, *outs)
|
||||
return products
|
||||
|
||||
|
||||
|
@ -244,20 +234,23 @@ def buildCmd(args: cli.Args):
|
|||
registry = model.Registry.use(args)
|
||||
target = model.Target.use(args)
|
||||
componentSpec = args.consumeArg()
|
||||
component = None
|
||||
if componentSpec is not None:
|
||||
component = registry.lookup(componentSpec, model.Component)
|
||||
if componentSpec is None:
|
||||
raise RuntimeError("No component specified")
|
||||
component = registry.lookup(componentSpec, model.Component)
|
||||
build(target, registry, component)[0]
|
||||
|
||||
|
||||
@cli.command("r", "run", "Run a component")
|
||||
@cli.command("p", "project", "Show project information")
|
||||
def runCmd(args: cli.Args):
|
||||
registry = model.Registry.use(args)
|
||||
target = model.Target.use(args)
|
||||
debug = args.consumeOpt("debug", False) is True
|
||||
|
||||
componentSpec = args.consumeArg() or "__main__"
|
||||
component = registry.lookup(componentSpec, model.Component, includeProvides=True)
|
||||
componentSpec = args.consumeArg()
|
||||
if componentSpec is None:
|
||||
raise RuntimeError("No component specified")
|
||||
|
||||
component = registry.lookup(componentSpec, model.Component)
|
||||
if component is None:
|
||||
raise RuntimeError(f"Component {componentSpec} not found")
|
||||
|
||||
|
|
|
@ -1,17 +1,14 @@
|
|||
import inspect
|
||||
import logging
|
||||
import sys
|
||||
import dataclasses as dt
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from . import const, vt100
|
||||
|
||||
Value = Union[str, bool, int]
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Args:
|
||||
opts: dict[str, Value]
|
||||
|
@ -72,7 +69,7 @@ def parse(args: list[str]) -> Args:
|
|||
Callback = Callable[[Args], None]
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Command:
|
||||
shortName: Optional[str]
|
||||
longName: str
|
||||
|
@ -89,7 +86,6 @@ def command(shortName: Optional[str], longName: str, helpText: str):
|
|||
calframe = inspect.getouterframes(curframe, 2)
|
||||
|
||||
def wrap(fn: Callable[[Args], None]):
|
||||
_logger.debug(f"Registering command {longName}")
|
||||
commands.append(
|
||||
Command(
|
||||
shortName,
|
||||
|
@ -127,10 +123,7 @@ def helpCmd(args: Args):
|
|||
|
||||
print()
|
||||
vt100.title("Commands")
|
||||
for cmd in sorted(commands, key=lambda c: c.longName):
|
||||
if cmd.longName.startswith("_"):
|
||||
continue
|
||||
|
||||
for cmd in sorted(commands, key=lambda c: c.shortName or c.longName):
|
||||
pluginText = ""
|
||||
if cmd.isPlugin:
|
||||
pluginText = f"{vt100.CYAN}(plugin){vt100.RESET}"
|
||||
|
|
|
@ -1,19 +1,21 @@
|
|||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
VERSION = (0, 6, 0, "dev")
|
||||
VERSION_STR = f"{VERSION[0]}.{VERSION[1]}.{VERSION[2]}{'-' + VERSION[3] if len(VERSION) >= 4 else ''}"
|
||||
MODULE_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
ARGV0 = os.path.basename(sys.argv[0])
|
||||
PROJECT_CK_DIR = ".cutekit"
|
||||
GLOBAL_CK_DIR = os.path.join(os.path.expanduser("~"), ".cutekit")
|
||||
BUILD_DIR = os.path.join(PROJECT_CK_DIR, "build")
|
||||
CACHE_DIR = os.path.join(PROJECT_CK_DIR, "cache")
|
||||
EXTERN_DIR = os.path.join(PROJECT_CK_DIR, "extern")
|
||||
SRC_DIR = "src"
|
||||
META_DIR = "meta"
|
||||
TARGETS_DIR = os.path.join(META_DIR, "targets")
|
||||
VERSION_STR = (
|
||||
f"{VERSION[0]}.{VERSION[1]}.{VERSION[2]}{'-' + VERSION[3] if VERSION[3] else ''}"
|
||||
)
|
||||
ARGV0 = Path(sys.argv[0])
|
||||
PROJECT_CK_DIR = Path(".cutekit")
|
||||
GLOBAL_CK_DIR = Path.home() / ".cutekit"
|
||||
BUILD_DIR = PROJECT_CK_DIR / "build"
|
||||
CACHE_DIR = PROJECT_CK_DIR / "cache"
|
||||
EXTERN_DIR = PROJECT_CK_DIR / "extern"
|
||||
SRC_DIR = Path("src")
|
||||
META_DIR = Path("meta")
|
||||
TARGETS_DIR = META_DIR / "targets"
|
||||
DEFAULT_REPO_TEMPLATES = "cute-engineering/cutekit-templates"
|
||||
DESCRIPTION = "A build system and package manager for low-level software development"
|
||||
PROJECT_LOG_FILE = os.path.join(PROJECT_CK_DIR, "cutekit.log")
|
||||
GLOBAL_LOG_FILE = os.path.join(os.path.expanduser("~"), ".cutekit", "cutekit.log")
|
||||
PROJECT_LOG_FILE = PROJECT_CK_DIR / "cutekit.log"
|
||||
GLOBAL_LOG_FILE = GLOBAL_CK_DIR / "cutekit.log"
|
||||
|
|
|
@ -80,7 +80,7 @@ def view(
|
|||
for req in component.provides:
|
||||
g.edge(req, component.id, arrowhead="none", color="#aaaaaa")
|
||||
|
||||
g.view(filename=os.path.join(target.builddir, "graph.gv"))
|
||||
g.view(filename=str(target.builddir / "graph.gv"))
|
||||
|
||||
|
||||
@cli.command("g", "graph", "Show the dependency graph")
|
||||
|
|
|
@ -1,20 +1,17 @@
|
|||
import os
|
||||
import json
|
||||
import re
|
||||
import tomllib
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from typing import Any, Optional, cast, Callable, Final
|
||||
from . import shell
|
||||
from typing import Any, cast, Callable, Final
|
||||
from . import shell, compat
|
||||
|
||||
Json = Any
|
||||
Builtin = Callable[..., Json]
|
||||
|
||||
BUILTINS: Final[dict[str, Builtin]] = {
|
||||
"uname": lambda arg, ctx: getattr(shell.uname(), arg).lower(),
|
||||
"include": lambda arg, ctx: evalRead(Path(arg)),
|
||||
"evalRead": lambda arg, ctx: evalRead(Path(arg)),
|
||||
"include": lambda arg, ctx: evalRead(arg),
|
||||
"evalRead": lambda arg, ctx: evalRead(arg),
|
||||
"join": lambda lhs, rhs, ctx: cast(
|
||||
Json, {**lhs, **rhs} if isinstance(lhs, dict) else lhs + rhs
|
||||
),
|
||||
|
@ -53,26 +50,12 @@ def eval(jexpr: Json, filePath: Path) -> Json:
|
|||
return jexpr
|
||||
|
||||
|
||||
def extraSchema(toml: str) -> Optional[str]:
|
||||
schemaRegex = re.compile(r"#:schema\s+(.*)")
|
||||
schema = schemaRegex.search(toml)
|
||||
return schema.group(1) if schema else None
|
||||
|
||||
|
||||
def read(path: Path) -> Json:
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
if path.suffix == ".toml":
|
||||
tomlStr = f.read()
|
||||
toml = tomllib.loads(tomlStr)
|
||||
schema = extraSchema(tomlStr)
|
||||
if schema:
|
||||
toml["$schema"] = schema
|
||||
return toml
|
||||
else:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to read {path}: {e}")
|
||||
return json.load(f)
|
||||
except:
|
||||
raise RuntimeError(f"Failed to read {path}")
|
||||
|
||||
|
||||
def evalRead(path: Path) -> Json:
|
||||
|
|
|
@ -65,7 +65,6 @@ mixins: dict[str, Mixin] = {
|
|||
"tsan": makeMixinSan("thread"),
|
||||
"ubsan": makeMixinSan("undefined"),
|
||||
"tune": makeMixinTune("native"),
|
||||
"fast": makeMixinOptimize("fast"),
|
||||
"o3": makeMixinOptimize("3"),
|
||||
"o2": makeMixinOptimize("2"),
|
||||
"o1": makeMixinOptimize("1"),
|
||||
|
@ -78,6 +77,4 @@ def append(mixinSpec: str, mixin: Mixin):
|
|||
|
||||
|
||||
def byId(id: str) -> Mixin:
|
||||
if id not in mixins:
|
||||
raise RuntimeError(f"Unknown mixin {id}")
|
||||
return mixins[id]
|
||||
|
|
252
cutekit/model.py
252
cutekit/model.py
|
@ -1,12 +1,12 @@
|
|||
import os
|
||||
import logging
|
||||
import dataclasses as dt
|
||||
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any, Generator, Optional, Type, cast
|
||||
from pathlib import Path
|
||||
from dataclasses_json import DataClassJsonMixin
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from cutekit import const, shell
|
||||
|
||||
|
@ -28,13 +28,11 @@ class Kind(Enum):
|
|||
# --- Manifest --------------------------------------------------------------- #
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Manifest(DataClassJsonMixin):
|
||||
id: str
|
||||
type: Kind = dt.field(default=Kind.UNKNOWN)
|
||||
path: str = dt.field(default="")
|
||||
SUFFIXES = [".json", ".toml"]
|
||||
SUFFIXES_GLOBS = ["*.json", "*.toml"]
|
||||
type: Kind = field(default=Kind.UNKNOWN)
|
||||
path: Path = field(default=Path())
|
||||
|
||||
@staticmethod
|
||||
def parse(path: Path, data: dict[str, Any]) -> "Manifest":
|
||||
|
@ -45,36 +43,24 @@ class Manifest(DataClassJsonMixin):
|
|||
kind = Kind(data["type"])
|
||||
del data["$schema"]
|
||||
obj = KINDS[kind].from_dict(data)
|
||||
obj.path = str(path)
|
||||
obj.path = path
|
||||
return obj
|
||||
|
||||
@staticmethod
|
||||
def tryLoad(path: Path) -> Optional["Manifest"]:
|
||||
for suffix in Manifest.SUFFIXES:
|
||||
pathWithSuffix = path.with_suffix(suffix)
|
||||
if pathWithSuffix.exists():
|
||||
_logger.debug(f"Loading manifest from '{pathWithSuffix}'")
|
||||
return Manifest.parse(pathWithSuffix, jexpr.evalRead(pathWithSuffix))
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def load(path: Path) -> "Manifest":
|
||||
"""
|
||||
Load a manifest from a given path
|
||||
"""
|
||||
manifest = Manifest.tryLoad(path)
|
||||
if manifest is None:
|
||||
raise RuntimeError(f"Could not find manifest at '{path}'")
|
||||
return manifest
|
||||
return Manifest.parse(path, jexpr.evalRead(path))
|
||||
|
||||
def dirname(self) -> str:
|
||||
def dirname(self) -> Path:
|
||||
"""
|
||||
Return the directory of the manifest
|
||||
"""
|
||||
return os.path.relpath(os.path.dirname(self.path), Path.cwd())
|
||||
return self.path.parent
|
||||
|
||||
def subpath(self, path) -> Path:
|
||||
return Path(self.dirname()) / path
|
||||
return self.dirname() / path
|
||||
|
||||
def ensureType(self, t: Type[utils.T]) -> utils.T:
|
||||
"""
|
||||
|
@ -92,60 +78,71 @@ class Manifest(DataClassJsonMixin):
|
|||
_project: Optional["Project"] = None
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Extern(DataClassJsonMixin):
|
||||
git: str
|
||||
tag: str
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Project(Manifest):
|
||||
description: str = dt.field(default="(No description)")
|
||||
extern: dict[str, Extern] = dt.field(default_factory=dict)
|
||||
description: str = field(default="(No description)")
|
||||
extern: dict[str, Extern] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def externDirs(self) -> list[str]:
|
||||
res = map(lambda e: os.path.join(const.EXTERN_DIR, e), self.extern.keys())
|
||||
def externDirs(self) -> list[Path]:
|
||||
res = map(lambda e: const.EXTERN_DIR / e, self.extern.keys())
|
||||
return list(res)
|
||||
|
||||
@staticmethod
|
||||
def topmost() -> Optional["Project"]:
|
||||
def root() -> Optional[Path]:
|
||||
"""
|
||||
Find the root of the project by looking for a project.json
|
||||
"""
|
||||
cwd = Path.cwd()
|
||||
topmost: Optional["Project"] = None
|
||||
while str(cwd) != cwd.root:
|
||||
projectManifest = Manifest.tryLoad(cwd / "project")
|
||||
if projectManifest is not None:
|
||||
topmost = projectManifest.ensureType(Project)
|
||||
if (cwd / "project.json").is_file():
|
||||
return cwd
|
||||
cwd = cwd.parent
|
||||
return topmost
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def chdir() -> None:
|
||||
"""
|
||||
Change the current working directory to the root of the project
|
||||
"""
|
||||
path = Project.root()
|
||||
if path is None:
|
||||
raise RuntimeError(
|
||||
"No project.json found in this directory or any parent directory"
|
||||
)
|
||||
os.chdir(path)
|
||||
|
||||
@staticmethod
|
||||
def at(path: str | Path) -> Optional["Project"]:
|
||||
path = Path(path) / "project.json"
|
||||
if not path.exists():
|
||||
return None
|
||||
return Manifest.load(path).ensureType(Project)
|
||||
|
||||
@staticmethod
|
||||
def ensure() -> "Project":
|
||||
"""
|
||||
Ensure that a project exists in the current directory or any parent directory
|
||||
and chdir to the root of the project.
|
||||
"""
|
||||
project = Project.topmost()
|
||||
if project is None:
|
||||
root = Project.root()
|
||||
if root is None:
|
||||
raise RuntimeError(
|
||||
"No project found in this directory or any parent directory"
|
||||
"No project.json found in this directory or any parent directory"
|
||||
)
|
||||
os.chdir(project.dirname())
|
||||
return project
|
||||
os.chdir(root)
|
||||
return Manifest.load(Path(root / "project.json")).ensureType(
|
||||
Project
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def at(path: Path) -> Optional["Project"]:
|
||||
projectManifest = Manifest.tryLoad(path / "project")
|
||||
if projectManifest is None:
|
||||
return None
|
||||
return projectManifest.ensureType(Project)
|
||||
|
||||
@staticmethod
|
||||
def fetchs(extern: dict[str, Extern]):
|
||||
def fetchs(extern: dict[str | Path, Extern]):
|
||||
for extSpec, ext in extern.items():
|
||||
extPath = os.path.join(const.EXTERN_DIR, extSpec)
|
||||
extPath = const.EXTERN_DIR / extSpec
|
||||
|
||||
if os.path.exists(extPath):
|
||||
if extPath.exists():
|
||||
print(f"Skipping {extSpec}, already installed")
|
||||
continue
|
||||
|
||||
|
@ -161,7 +158,7 @@ class Project(Manifest):
|
|||
ext.git,
|
||||
extPath,
|
||||
)
|
||||
project = Project.at(Path(extPath))
|
||||
project = Project.at(extPath)
|
||||
if project is not None:
|
||||
Project.fetchs(project.extern)
|
||||
|
||||
|
@ -187,10 +184,9 @@ def initCmd(args: cli.Args):
|
|||
list = args.consumeOpt("list")
|
||||
|
||||
template = args.consumeArg()
|
||||
name = args.consumeArg()
|
||||
name = Path(args.consumeArg())
|
||||
|
||||
_logger.info("Fetching registry...")
|
||||
|
||||
r = requests.get(f"https://raw.githubusercontent.com/{repo}/main/registry.json")
|
||||
|
||||
if r.status_code != 200:
|
||||
|
@ -218,7 +214,7 @@ def initCmd(args: cli.Args):
|
|||
_logger.info(f"No name was provided, defaulting to {template}")
|
||||
name = template
|
||||
|
||||
if os.path.exists(name):
|
||||
if name.exists():
|
||||
raise RuntimeError(f"Directory {name} already exists")
|
||||
|
||||
print(f"Creating project {name} from template {template}...")
|
||||
|
@ -238,33 +234,29 @@ def initCmd(args: cli.Args):
|
|||
# --- Target ----------------------------------------------------------------- #
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Tool(DataClassJsonMixin):
|
||||
cmd: str = dt.field(default="")
|
||||
args: list[str] = dt.field(default_factory=list)
|
||||
files: list[str] = dt.field(default_factory=list)
|
||||
cmd: str = field(default="")
|
||||
args: list[str] = field(default_factory=list)
|
||||
files: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
Tools = dict[str, Tool]
|
||||
|
||||
DEFAULT_TOOLS: Tools = {
|
||||
"cp": Tool("cp"),
|
||||
}
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Target(Manifest):
|
||||
props: Props = dt.field(default_factory=dict)
|
||||
tools: Tools = dt.field(default_factory=dict)
|
||||
routing: dict[str, str] = dt.field(default_factory=dict)
|
||||
props: Props = field(default_factory=dict)
|
||||
tools: Tools = field(default_factory=dict)
|
||||
routing: dict[str, str] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def hashid(self) -> str:
|
||||
return utils.hash((self.props, [v.to_dict() for k, v in self.tools.items()]))
|
||||
|
||||
@property
|
||||
def builddir(self) -> str:
|
||||
return os.path.join(const.BUILD_DIR, f"{self.id}-{self.hashid[:8]}")
|
||||
def builddir(self) -> Path:
|
||||
return const.BUILD_DIR / f"{self.id}-{self.hashid[:8]}"
|
||||
|
||||
@staticmethod
|
||||
def use(args: cli.Args) -> "Target":
|
||||
|
@ -286,27 +278,27 @@ class Target(Manifest):
|
|||
# --- Component -------------------------------------------------------------- #
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Resolved:
|
||||
reason: Optional[str] = None
|
||||
resolved: list[str] = dt.field(default_factory=list)
|
||||
resolved: list[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
return self.reason is None
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Component(Manifest):
|
||||
decription: str = dt.field(default="(No description)")
|
||||
props: Props = dt.field(default_factory=dict)
|
||||
tools: Tools = dt.field(default_factory=dict)
|
||||
enableIf: dict[str, list[Any]] = dt.field(default_factory=dict)
|
||||
requires: list[str] = dt.field(default_factory=list)
|
||||
provides: list[str] = dt.field(default_factory=list)
|
||||
subdirs: list[str] = dt.field(default_factory=list)
|
||||
injects: list[str] = dt.field(default_factory=list)
|
||||
resolved: dict[str, Resolved] = dt.field(default_factory=dict)
|
||||
decription: str = field(default="(No description)")
|
||||
props: Props = field(default_factory=dict)
|
||||
tools: Tools = field(default_factory=dict)
|
||||
enableIf: dict[str, list[Any]] = field(default_factory=dict)
|
||||
requires: list[str] = field(default_factory=list)
|
||||
provides: list[str] = field(default_factory=list)
|
||||
subdirs: list[str] = field(default_factory=list)
|
||||
injects: list[str] = field(default_factory=list)
|
||||
resolved: dict[str, Resolved] = field(default_factory=dict)
|
||||
|
||||
def isEnabled(self, target: Target) -> tuple[bool, str]:
|
||||
for k, v in self.enableIf.items():
|
||||
|
@ -337,12 +329,12 @@ KINDS: dict[Kind, Type[Manifest]] = {
|
|||
# --- Dependency resolution -------------------------------------------------- #
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Resolver:
|
||||
_registry: "Registry"
|
||||
_target: Target
|
||||
_mappings: dict[str, list[Component]] = dt.field(default_factory=dict)
|
||||
_cache: dict[str, Resolved] = dt.field(default_factory=dict)
|
||||
_mappings: dict[str, list[Component]] = field(default_factory=dict)
|
||||
_cache: dict[str, Resolved] = field(default_factory=dict)
|
||||
_baked = False
|
||||
|
||||
def _bake(self):
|
||||
|
@ -449,25 +441,21 @@ class Resolver:
|
|||
_registry: Optional["Registry"] = None
|
||||
|
||||
|
||||
@dt.dataclass
|
||||
@dataclass
|
||||
class Registry(DataClassJsonMixin):
|
||||
project: Project
|
||||
manifests: dict[str, Manifest] = dt.field(default_factory=dict)
|
||||
manifests: dict[str, Manifest] = field(default_factory=dict)
|
||||
|
||||
def _append(self, m: Optional[Manifest]) -> Optional[Manifest]:
|
||||
def _append(self, m: Manifest):
|
||||
"""
|
||||
Append a manifest to the model
|
||||
"""
|
||||
if m is None:
|
||||
return m
|
||||
|
||||
if m.id in self.manifests:
|
||||
raise RuntimeError(
|
||||
f"Duplicated manifest '{m.id}' at '{m.path}' already loaded from '{self.manifests[m.id].path}'"
|
||||
)
|
||||
|
||||
self.manifests[m.id] = m
|
||||
return m
|
||||
|
||||
def iter(self, type: Type[utils.T]) -> Generator[utils.T, None, None]:
|
||||
"""
|
||||
|
@ -484,9 +472,7 @@ class Registry(DataClassJsonMixin):
|
|||
if resolve.enabled:
|
||||
yield c
|
||||
|
||||
def lookup(
|
||||
self, name: str, type: Type[utils.T], includeProvides: bool = False
|
||||
) -> Optional[utils.T]:
|
||||
def lookup(self, name: str, type: Type[utils.T]) -> Optional[utils.T]:
|
||||
"""
|
||||
Lookup a manifest of a given type by name
|
||||
"""
|
||||
|
@ -496,11 +482,6 @@ class Registry(DataClassJsonMixin):
|
|||
if isinstance(m, type):
|
||||
return m
|
||||
|
||||
if includeProvides and type is Component:
|
||||
for m in self.iter(Component):
|
||||
if name in m.provides:
|
||||
return m # type: ignore
|
||||
|
||||
return None
|
||||
|
||||
def ensure(self, name: str, type: Type[utils.T]) -> utils.T:
|
||||
|
@ -532,55 +513,62 @@ class Registry(DataClassJsonMixin):
|
|||
|
||||
@staticmethod
|
||||
def load(project: Project, mixins: list[str], props: Props) -> "Registry":
|
||||
r = Registry(project)
|
||||
r._append(project)
|
||||
registry = Registry(project)
|
||||
registry._append(project)
|
||||
|
||||
# Lookup and load all extern projects
|
||||
for externDir in project.externDirs:
|
||||
extern = r._append(
|
||||
Manifest.tryLoad(Path(externDir) / "project")
|
||||
or Manifest.tryLoad(Path(externDir) / "manifest")
|
||||
)
|
||||
projectPath = externDir / "project.json"
|
||||
manifestPath = externDir / "manifest.json"
|
||||
|
||||
if extern is not None:
|
||||
_logger.warn("Extern project does not have a project or manifest")
|
||||
if projectPath.exists():
|
||||
registry._append(Manifest.load(projectPath).ensureType(Project))
|
||||
elif manifestPath.exists():
|
||||
# For simple library allow to have a manifest.json instead of a project.json
|
||||
registry._append(
|
||||
Manifest.load(manifestPath).ensureType(Component)
|
||||
)
|
||||
else:
|
||||
_logger.warn(
|
||||
"Extern project does not have a project.json or manifest.json"
|
||||
)
|
||||
|
||||
# Load all manifests from projects
|
||||
for project in list(r.iter(Project)):
|
||||
targetDir = os.path.join(project.dirname(), const.TARGETS_DIR)
|
||||
targetFiles = shell.find(targetDir, Manifest.SUFFIXES_GLOBS)
|
||||
for project in list(registry.iter(Project)):
|
||||
targetDir = project.parent / const.TARGETS_DIR
|
||||
targetFiles = targetDir.glob("*.json")
|
||||
|
||||
for targetFile in targetFiles:
|
||||
r._append(Manifest.load(Path(targetFile)).ensureType(Target))
|
||||
registry._append(Manifest.load(Path(targetFile)).ensureType(Target))
|
||||
|
||||
componentFiles = shell.find(
|
||||
os.path.join(project.dirname(), const.SRC_DIR),
|
||||
["manifest" + s for s in Manifest.SUFFIXES],
|
||||
)
|
||||
componentDir = project.parent / const.COMPONENTS_DIR
|
||||
rootComponent = project.parent / "manifest.json"
|
||||
componentFiles = list(componentDir.glob("manifest.json"))
|
||||
|
||||
rootComponent = Manifest.tryLoad(Path(project.dirname()) / "manifest")
|
||||
if rootComponent is not None:
|
||||
r._append(rootComponent)
|
||||
if rootComponent.exists():
|
||||
componentFiles += [rootComponent]
|
||||
|
||||
for componentFile in componentFiles:
|
||||
r._append(Manifest.load(Path(componentFile)).ensureType(Component))
|
||||
registry._append(
|
||||
Manifest.load(componentFile).ensureType(Component)
|
||||
)
|
||||
|
||||
# Resolve all dependencies for all targets
|
||||
for target in r.iter(Target):
|
||||
for target in registry.iter(Target):
|
||||
target.props |= props
|
||||
resolver = Resolver(r, target)
|
||||
resolver = Resolver(registry, target)
|
||||
|
||||
# Apply injects
|
||||
for c in r.iter(Component):
|
||||
for c in registry.iter(Component):
|
||||
if c.isEnabled(target)[0]:
|
||||
for inject in c.injects:
|
||||
victim = r.lookup(inject, Component)
|
||||
victim = registry.lookup(inject, Component)
|
||||
if not victim:
|
||||
raise RuntimeError(f"Cannot find component '{inject}'")
|
||||
victim.requires += [c.id]
|
||||
|
||||
# Resolve all components
|
||||
for c in r.iter(Component):
|
||||
for c in registry.iter(Component):
|
||||
resolved = resolver.resolve(c.id)
|
||||
if resolved.reason:
|
||||
_logger.info(f"Component '{c.id}' disabled: {resolved.reason}")
|
||||
|
@ -588,12 +576,6 @@ class Registry(DataClassJsonMixin):
|
|||
|
||||
# Resolve tooling
|
||||
tools: Tools = target.tools
|
||||
|
||||
# Merge in default tools
|
||||
for k, v in DEFAULT_TOOLS.items():
|
||||
if k not in tools:
|
||||
tools[k] = dt.replace(v)
|
||||
|
||||
from . import mixins as mxs
|
||||
|
||||
for mix in mixins:
|
||||
|
@ -601,12 +583,12 @@ class Registry(DataClassJsonMixin):
|
|||
tools = mixin(target, tools)
|
||||
|
||||
# Apply tooling from components
|
||||
for c in r.iter(Component):
|
||||
for c in registry.iter(Component):
|
||||
if c.resolved[target.id].enabled:
|
||||
for k, v in c.tools.items():
|
||||
tools[k].args += v.args
|
||||
|
||||
return r
|
||||
return registry
|
||||
|
||||
|
||||
@cli.command("l", "list", "List all components and targets")
|
||||
|
|
|
@ -23,21 +23,21 @@ def load(path: str):
|
|||
def loadAll():
|
||||
_logger.info("Loading plugins...")
|
||||
|
||||
project = model.Project.topmost()
|
||||
if project is None:
|
||||
root = model.Project.root()
|
||||
|
||||
if root is None:
|
||||
_logger.info("Not in project, skipping plugin loading")
|
||||
return
|
||||
paths = list(
|
||||
map(lambda e: os.path.join(const.EXTERN_DIR, e), project.extern.keys())
|
||||
) + ["."]
|
||||
|
||||
project = model.Project.at(root)
|
||||
paths = list(map(lambda e: const.EXTERN_DIR / e, project.extern.keys())) + ["."]
|
||||
|
||||
for dirname in paths:
|
||||
pluginDir = os.path.join(project.dirname(), dirname, const.META_DIR, "plugins")
|
||||
pluginDir = root / dirname / const.META_DIR / "plugins"
|
||||
|
||||
for files in shell.readdir(pluginDir):
|
||||
if files.endswith(".py"):
|
||||
plugin = load(os.path.join(pluginDir, files))
|
||||
for script in pluginDir.glob("*.py"):
|
||||
plugin = load(script)
|
||||
|
||||
if plugin:
|
||||
_logger.info(f"Loaded plugin {plugin.name}")
|
||||
plugin.init()
|
||||
if plugin:
|
||||
_logger.info(f"Loaded plugin {plugin.name}")
|
||||
plugin.init()
|
||||
|
|
|
@ -27,7 +27,6 @@ class Rule:
|
|||
|
||||
|
||||
rules: dict[str, Rule] = {
|
||||
"cp": Rule("cp", ["*"], ["*"], "$in $out"),
|
||||
"cc": Rule(
|
||||
"cc",
|
||||
["*.c"],
|
||||
|
|
|
@ -12,6 +12,7 @@ import logging
|
|||
import tempfile
|
||||
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from . import const
|
||||
|
||||
|
@ -49,81 +50,30 @@ def sha256sum(path: str) -> str:
|
|||
return hashlib.sha256(f.read()).hexdigest()
|
||||
|
||||
|
||||
def find(
|
||||
path: str | list[str], wildcards: list[str] = [], recusive: bool = True
|
||||
) -> list[str]:
|
||||
_logger.info(f"Looking for files in {path} matching {wildcards}")
|
||||
|
||||
result: list[str] = []
|
||||
|
||||
if isinstance(path, list):
|
||||
for p in path:
|
||||
result += find(p, wildcards, recusive)
|
||||
return result
|
||||
|
||||
if not os.path.isdir(path):
|
||||
return []
|
||||
|
||||
if recusive:
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
if len(wildcards) == 0:
|
||||
result.append(os.path.join(root, f))
|
||||
else:
|
||||
for wildcard in wildcards:
|
||||
if fnmatch.fnmatch(f, wildcard):
|
||||
result.append(os.path.join(root, f))
|
||||
break
|
||||
else:
|
||||
for f in os.listdir(path):
|
||||
if len(wildcards) == 0:
|
||||
result.append(os.path.join(path, f))
|
||||
else:
|
||||
for wildcard in wildcards:
|
||||
if fnmatch.fnmatch(f, wildcard):
|
||||
result.append(os.path.join(path, f))
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def mkdir(path: str) -> str:
|
||||
_logger.info(f"Creating directory {path}")
|
||||
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as exc:
|
||||
if not (exc.errno == errno.EEXIST and os.path.isdir(path)):
|
||||
raise
|
||||
return path
|
||||
|
||||
|
||||
def rmrf(path: str) -> bool:
|
||||
def rmrf(path: Path) -> bool:
|
||||
_logger.info(f"Removing directory {path}")
|
||||
|
||||
if not os.path.exists(path):
|
||||
if not path.exists():
|
||||
return False
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
return True
|
||||
|
||||
|
||||
def wget(url: str, path: Optional[str] = None) -> str:
|
||||
def wget(url: str, path: Optional[Path] = None) -> Path:
|
||||
import requests
|
||||
|
||||
if path is None:
|
||||
path = os.path.join(
|
||||
const.CACHE_DIR, hashlib.sha256(url.encode("utf-8")).hexdigest()
|
||||
)
|
||||
path = const.CACHE_DIR / hashlib.sha256(url.encode("utf-8")).hexdigest()
|
||||
|
||||
if os.path.exists(path):
|
||||
if path.exists():
|
||||
return path
|
||||
|
||||
_logger.info(f"Downloading {url} to {path}")
|
||||
|
||||
r = requests.get(url, stream=True)
|
||||
r.raise_for_status()
|
||||
mkdir(os.path.dirname(path))
|
||||
with open(path, "wb") as f:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
|
@ -179,36 +129,28 @@ def popen(*args: str) -> str:
|
|||
return proc.stdout.decode("utf-8")
|
||||
|
||||
|
||||
def readdir(path: str) -> list[str]:
|
||||
_logger.info(f"Reading directory {path}")
|
||||
|
||||
try:
|
||||
return os.listdir(path)
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
|
||||
def cp(src: str, dst: str):
|
||||
def cp(src: Path, dst: Path):
|
||||
_logger.info(f"Copying {src} to {dst}")
|
||||
|
||||
shutil.copy(src, dst)
|
||||
|
||||
|
||||
def mv(src: str, dst: str):
|
||||
def mv(src: Path, dst: Path):
|
||||
_logger.info(f"Moving {src} to {dst}")
|
||||
|
||||
shutil.move(src, dst)
|
||||
|
||||
|
||||
def cpTree(src: str, dst: str):
|
||||
def cpTree(src: Path, dst: Path):
|
||||
_logger.info(f"Copying {src} to {dst}")
|
||||
|
||||
shutil.copytree(src, dst, dirs_exist_ok=True)
|
||||
|
||||
|
||||
def cloneDir(url: str, path: str, dest: str) -> str:
|
||||
def cloneDir(url: str, path: Path, dest: Path) -> Path:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
mkdir(tmp)
|
||||
tmp = Path(tmp)
|
||||
tmp.mkdir(parents=True, exist_ok=True)
|
||||
exec(
|
||||
*["git", "clone", "-n", "--depth=1", "--filter=tree:0", url, tmp, "-q"],
|
||||
quiet=True,
|
||||
|
@ -218,7 +160,7 @@ def cloneDir(url: str, path: str, dest: str) -> str:
|
|||
quiet=True,
|
||||
)
|
||||
exec(*["git", "-C", tmp, "checkout", "-q", "--no-progress"], quiet=True)
|
||||
mv(os.path.join(tmp, path), dest)
|
||||
mv(tmp / path, dest)
|
||||
|
||||
return dest
|
||||
|
||||
|
|
|
@ -6,9 +6,9 @@ import hashlib
|
|||
T = TypeVar("T")
|
||||
|
||||
|
||||
def uniq(lst: list[T]) -> list[T]:
|
||||
def uniq(l: list[T]) -> list[T]:
|
||||
result: list[T] = []
|
||||
for i in lst:
|
||||
for i in l:
|
||||
if i in result:
|
||||
result.remove(i)
|
||||
result.append(i)
|
||||
|
|
40
doc/mit.svg
40
doc/mit.svg
|
@ -1,40 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="93.217mm" height="69.497mm" version="1.1" viewBox="0 0 93.217 69.497" xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
|
||||
<metadata>
|
||||
<rdf:RDF>
|
||||
<cc:Work rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title />
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g transform="translate(0 19.497)">
|
||||
<rect y="-19.497" width="93.217" height="69.497" fill="#a00" />
|
||||
<rect x="5.6831" y="-14.441" width="8.8" height="40" ry="0" fill="#fff" />
|
||||
<rect x="5.6332" y="21.559" width="4.45" height="4.05" fill="#a00" />
|
||||
<rect x="10.083" y="17.559" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="5.6332" y="13.559" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="10.083" y="9.5594" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="5.6834" y="-14.441" width="35.2" height="8.8" fill="#fff" />
|
||||
<rect x="32.083" y="-14.441" width="8.8" height="40" ry="0" fill="#fff" />
|
||||
<rect x="36.483" y="21.559" width="4.45" height="4.05" fill="#a00" />
|
||||
<rect x="32.033" y="17.559" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="36.483" y="13.559" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="32.033" y="9.5594" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="20.083" y="-14.441" width="6.6202" height="40" ry="0" fill="#fff" />
|
||||
<rect x="46.083" y="-14.441" width="8.8" height="40" ry="0" fill="#fff" />
|
||||
<rect x="70.083" y="-14.441" width="8.8" height="40" ry="0" fill="#fff" />
|
||||
<rect x="60.283" y="-14.441" width="28.4" height="8.8" fill="#fff" />
|
||||
<rect x="70.033" y="21.559" width="4.45" height="4.05" fill="#a00" />
|
||||
<rect x="74.483" y="17.559" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="70.033" y="13.559" width="4.45" height="4" fill="#a00" />
|
||||
<rect x="74.483" y="9.5594" width="4.45" height="4" fill="#a00" />
|
||||
<text x="12.171427" y="44.462078" fill="#ffffff" font-family="'Liberation Sans'" font-size="11.289px"
|
||||
letter-spacing="0px" stroke-width="1px" word-spacing="0px" style="line-height:125%" xml:space="preserve">
|
||||
<tspan x="12.171427" y="44.462078" font-family="'Liberation Sans'" font-size="19.756px">License</tspan>
|
||||
</text>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 2.5 KiB |
|
@ -12,9 +12,14 @@ authors = [
|
|||
{ name = "Cute Engineering", email = "contact@cute.engineering" },
|
||||
]
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
requires-python = ">=3.10"
|
||||
license = { text = "MIT" }
|
||||
dynamic = ["version", "dependencies"]
|
||||
dependencies = [
|
||||
"requests ~= 2.28.0",
|
||||
"graphviz ~= 0.20.1",
|
||||
"dataclasses-json ~= 0.6.2",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
|
||||
[project.scripts]
|
||||
ck = "cutekit:main"
|
||||
|
@ -26,7 +31,6 @@ packages = ["cutekit"]
|
|||
|
||||
[tool.setuptools.dynamic]
|
||||
version = { attr = "cutekit.const.VERSION" }
|
||||
dependencies = { file = ["requirements.txt"] }
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
"cutekit" = ["py.typed"]
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
requests ~= 2.31.0
|
||||
graphviz ~= 0.20.1
|
||||
dataclasses-json ~= 0.6.2
|
Loading…
Reference in a new issue