ref: move to pathlib
This commit is contained in:
parent
8f59111ad7
commit
e54f8f5964
|
@ -44,12 +44,11 @@ def setupLogger(verbose: bool):
|
|||
projectRoot = model.Project.root()
|
||||
logFile = const.GLOBAL_LOG_FILE
|
||||
if projectRoot is not None:
|
||||
logFile = os.path.join(projectRoot, const.PROJECT_LOG_FILE)
|
||||
logfile = projectRoot / const.PROJECT_LOG_FILE
|
||||
|
||||
# create the directory if it doesn't exist
|
||||
logDir = os.path.dirname(logFile)
|
||||
if not os.path.isdir(logDir):
|
||||
os.makedirs(logDir)
|
||||
if not logFile.parent.is_dir():
|
||||
logFile.parent.mkdir(parents=True)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
|
|
|
@ -49,7 +49,7 @@ def buildpath(target: model.Target, component: model.Component, path) -> Path:
|
|||
def listSrc(component: model.Component) -> list[str]:
|
||||
wildcards = set(chain(*map(lambda rule: rule.fileIn, rules.rules.values())))
|
||||
dirs = [component.dirname()] + list(
|
||||
map(lambda d: os.path.join(component.dirname(), d), component.subdirs)
|
||||
map(lambda d: component.parent / d, component.subdirs)
|
||||
)
|
||||
return shell.find(dirs, list(wildcards), recusive=False)
|
||||
|
||||
|
@ -195,9 +195,10 @@ def build(
|
|||
components: Union[list[model.Component], model.Component, None] = None,
|
||||
) -> list[Product]:
|
||||
all = False
|
||||
shell.mkdir(target.builddir)
|
||||
ninjaPath = os.path.join(target.builddir, "build.ninja")
|
||||
with open(ninjaPath, "w") as f:
|
||||
target.builddir.mkdir(parents=True, exist_ok=True)
|
||||
ninjaPath = target.builddir / "build.ninja"
|
||||
|
||||
with ninjaPath.open("w") as f:
|
||||
gen(f, target, registry)
|
||||
|
||||
if components is None:
|
||||
|
|
|
@ -6,16 +6,16 @@ VERSION = (0, 6, 0, "dev")
|
|||
VERSION_STR = (
|
||||
f"{VERSION[0]}.{VERSION[1]}.{VERSION[2]}{'-' + VERSION[3] if VERSION[3] else ''}"
|
||||
)
|
||||
ARGV0 = os.path.basename(sys.argv[0])
|
||||
PROJECT_CK_DIR = ".cutekit"
|
||||
GLOBAL_CK_DIR = os.path.join(os.path.expanduser("~"), ".cutekit")
|
||||
BUILD_DIR = os.path.join(PROJECT_CK_DIR, "build")
|
||||
CACHE_DIR = os.path.join(PROJECT_CK_DIR, "cache")
|
||||
EXTERN_DIR = os.path.join(PROJECT_CK_DIR, "extern")
|
||||
SRC_DIR = "src"
|
||||
META_DIR = "meta"
|
||||
TARGETS_DIR = os.path.join(META_DIR, "targets")
|
||||
ARGV0 = Path(sys.argv[0])
|
||||
PROJECT_CK_DIR = Path(".cutekit")
|
||||
GLOBAL_CK_DIR = Path.home() / ".cutekit"
|
||||
BUILD_DIR = PROJECT_CK_DIR / "build"
|
||||
CACHE_DIR = PROJECT_CK_DIR / "cache"
|
||||
EXTERN_DIR = PROJECT_CK_DIR / "extern"
|
||||
SRC_DIR = Path("src")
|
||||
META_DIR = Path("meta")
|
||||
TARGETS_DIR = META_DIR / "targets"
|
||||
DEFAULT_REPO_TEMPLATES = "cute-engineering/cutekit-templates"
|
||||
DESCRIPTION = "A build system and package manager for low-level software development"
|
||||
PROJECT_LOG_FILE = os.path.join(PROJECT_CK_DIR, "cutekit.log")
|
||||
GLOBAL_LOG_FILE = os.path.join(os.path.expanduser("~"), ".cutekit", "cutekit.log")
|
||||
PROJECT_LOG_FILE = PROJECT_CK_DIR / "cutekit.log"
|
||||
GLOBAL_LOG_FILE = GLOBAL_CK_DIR / "cutekit.log"
|
||||
|
|
|
@ -80,7 +80,7 @@ def view(
|
|||
for req in component.provides:
|
||||
g.edge(req, component.id, arrowhead="none", color="#aaaaaa")
|
||||
|
||||
g.view(filename=os.path.join(target.builddir, "graph.gv"))
|
||||
g.view(filename=str(target.builddir / "graph.gv"))
|
||||
|
||||
|
||||
@cli.command("g", "graph", "Show the dependency graph")
|
||||
|
|
|
@ -32,7 +32,7 @@ class Kind(Enum):
|
|||
class Manifest(DataClassJsonMixin):
|
||||
id: str
|
||||
type: Kind = field(default=Kind.UNKNOWN)
|
||||
path: str = field(default="")
|
||||
path: Path = field(default=Path())
|
||||
|
||||
@staticmethod
|
||||
def parse(path: Path, data: dict[str, Any]) -> "Manifest":
|
||||
|
@ -43,7 +43,7 @@ class Manifest(DataClassJsonMixin):
|
|||
kind = Kind(data["type"])
|
||||
del data["$schema"]
|
||||
obj = KINDS[kind].from_dict(data)
|
||||
obj.path = str(path)
|
||||
obj.path = path
|
||||
return obj
|
||||
|
||||
@staticmethod
|
||||
|
@ -53,14 +53,14 @@ class Manifest(DataClassJsonMixin):
|
|||
"""
|
||||
return Manifest.parse(path, jexpr.evalRead(path))
|
||||
|
||||
def dirname(self) -> str:
|
||||
def dirname(self) -> Path:
|
||||
"""
|
||||
Return the directory of the manifest
|
||||
"""
|
||||
return os.path.dirname(self.path)
|
||||
return self.path.parent
|
||||
|
||||
def subpath(self, path) -> Path:
|
||||
return Path(self.dirname()) / path
|
||||
return self.dirname() / path
|
||||
|
||||
def ensureType(self, t: Type[utils.T]) -> utils.T:
|
||||
"""
|
||||
|
@ -90,19 +90,19 @@ class Project(Manifest):
|
|||
extern: dict[str, Extern] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def externDirs(self) -> list[str]:
|
||||
res = map(lambda e: os.path.join(const.EXTERN_DIR, e), self.extern.keys())
|
||||
def externDirs(self) -> list[Path]:
|
||||
res = map(lambda e: const.EXTERN_DIR / e, self.extern.keys())
|
||||
return list(res)
|
||||
|
||||
@staticmethod
|
||||
def root() -> Optional[str]:
|
||||
def root() -> Optional[Path]:
|
||||
"""
|
||||
Find the root of the project by looking for a project.json
|
||||
"""
|
||||
cwd = Path.cwd()
|
||||
while str(cwd) != cwd.root:
|
||||
if (cwd / "project.json").is_file():
|
||||
return str(cwd)
|
||||
return cwd
|
||||
cwd = cwd.parent
|
||||
return None
|
||||
|
||||
|
@ -119,11 +119,11 @@ class Project(Manifest):
|
|||
os.chdir(path)
|
||||
|
||||
@staticmethod
|
||||
def at(path: str) -> Optional["Project"]:
|
||||
path = os.path.join(path, "project.json")
|
||||
if not os.path.exists(path):
|
||||
def at(path: str | Path) -> Optional["Project"]:
|
||||
path = Path(path) / "project.json"
|
||||
if not path.exists():
|
||||
return None
|
||||
return Manifest.load(Path(path)).ensureType(Project)
|
||||
return Manifest.load(path).ensureType(Project)
|
||||
|
||||
@staticmethod
|
||||
def ensure() -> "Project":
|
||||
|
@ -133,16 +133,16 @@ class Project(Manifest):
|
|||
"No project.json found in this directory or any parent directory"
|
||||
)
|
||||
os.chdir(root)
|
||||
return Manifest.load(Path(os.path.join(root, "project.json"))).ensureType(
|
||||
return Manifest.load(Path(root / "project.json")).ensureType(
|
||||
Project
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def fetchs(extern: dict[str, Extern]):
|
||||
def fetchs(extern: dict[str | Path, Extern]):
|
||||
for extSpec, ext in extern.items():
|
||||
extPath = os.path.join(const.EXTERN_DIR, extSpec)
|
||||
extPath = const.EXTERN_DIR / extSpec
|
||||
|
||||
if os.path.exists(extPath):
|
||||
if extPath.exists():
|
||||
print(f"Skipping {extSpec}, already installed")
|
||||
continue
|
||||
|
||||
|
@ -184,7 +184,7 @@ def initCmd(args: cli.Args):
|
|||
list = args.consumeOpt("list")
|
||||
|
||||
template = args.consumeArg()
|
||||
name = args.consumeArg()
|
||||
name = Path(args.consumeArg())
|
||||
|
||||
_logger.info("Fetching registry...")
|
||||
r = requests.get(f"https://raw.githubusercontent.com/{repo}/main/registry.json")
|
||||
|
@ -214,7 +214,7 @@ def initCmd(args: cli.Args):
|
|||
_logger.info(f"No name was provided, defaulting to {template}")
|
||||
name = template
|
||||
|
||||
if os.path.exists(name):
|
||||
if name.exists():
|
||||
raise RuntimeError(f"Directory {name} already exists")
|
||||
|
||||
print(f"Creating project {name} from template {template}...")
|
||||
|
@ -255,8 +255,8 @@ class Target(Manifest):
|
|||
return utils.hash((self.props, [v.to_dict() for k, v in self.tools.items()]))
|
||||
|
||||
@property
|
||||
def builddir(self) -> str:
|
||||
return os.path.join(const.BUILD_DIR, f"{self.id}-{self.hashid[:8]}")
|
||||
def builddir(self) -> Path:
|
||||
return const.BUILD_DIR / f"{self.id}-{self.hashid[:8]}"
|
||||
|
||||
@staticmethod
|
||||
def use(args: cli.Args) -> "Target":
|
||||
|
@ -518,15 +518,15 @@ class Registry(DataClassJsonMixin):
|
|||
|
||||
# Lookup and load all extern projects
|
||||
for externDir in project.externDirs:
|
||||
projectPath = os.path.join(externDir, "project.json")
|
||||
manifestPath = os.path.join(externDir, "manifest.json")
|
||||
projectPath = externDir / "project.json"
|
||||
manifestPath = externDir / "manifest.json"
|
||||
|
||||
if os.path.exists(projectPath):
|
||||
registry._append(Manifest.load(Path(projectPath)).ensureType(Project))
|
||||
elif os.path.exists(manifestPath):
|
||||
if projectPath.exists():
|
||||
registry._append(Manifest.load(projectPath).ensureType(Project))
|
||||
elif manifestPath.exists():
|
||||
# For simple library allow to have a manifest.json instead of a project.json
|
||||
registry._append(
|
||||
Manifest.load(Path(manifestPath)).ensureType(Component)
|
||||
Manifest.load(manifestPath).ensureType(Component)
|
||||
)
|
||||
else:
|
||||
_logger.warn(
|
||||
|
@ -535,22 +535,22 @@ class Registry(DataClassJsonMixin):
|
|||
|
||||
# Load all manifests from projects
|
||||
for project in list(registry.iter(Project)):
|
||||
targetDir = os.path.join(project.dirname(), const.TARGETS_DIR)
|
||||
targetFiles = shell.find(targetDir, ["*.json"])
|
||||
targetDir = project.parent / const.TARGETS_DIR
|
||||
targetFiles = targetDir.glob("*.json")
|
||||
|
||||
for targetFile in targetFiles:
|
||||
registry._append(Manifest.load(Path(targetFile)).ensureType(Target))
|
||||
|
||||
componentDir = os.path.join(project.dirname(), const.SRC_DIR)
|
||||
rootComponent = os.path.join(project.dirname(), "manifest.json")
|
||||
componentFiles = shell.find(componentDir, ["manifest.json"])
|
||||
componentDir = project.parent / const.COMPONENTS_DIR
|
||||
rootComponent = project.parent / "manifest.json"
|
||||
componentFiles = list(componentDir.glob("manifest.json"))
|
||||
|
||||
if os.path.exists(rootComponent):
|
||||
if rootComponent.exists():
|
||||
componentFiles += [rootComponent]
|
||||
|
||||
for componentFile in componentFiles:
|
||||
registry._append(
|
||||
Manifest.load(Path(componentFile)).ensureType(Component)
|
||||
Manifest.load(componentFile).ensureType(Component)
|
||||
)
|
||||
|
||||
# Resolve all dependencies for all targets
|
||||
|
|
|
@ -30,16 +30,13 @@ def loadAll():
|
|||
return
|
||||
|
||||
project = model.Project.at(root)
|
||||
paths = list(
|
||||
map(lambda e: os.path.join(const.EXTERN_DIR, e), project.extern.keys())
|
||||
) + ["."]
|
||||
paths = list(map(lambda e: const.EXTERN_DIR / e, project.extern.keys())) + ["."]
|
||||
|
||||
for dirname in paths:
|
||||
pluginDir = os.path.join(root, dirname, const.META_DIR, "plugins")
|
||||
pluginDir = root / dirname / const.META_DIR / "plugins"
|
||||
|
||||
for files in shell.readdir(pluginDir):
|
||||
if files.endswith(".py"):
|
||||
plugin = load(os.path.join(pluginDir, files))
|
||||
for script in pluginDir.glob("*.py"):
|
||||
plugin = load(script)
|
||||
|
||||
if plugin:
|
||||
_logger.info(f"Loaded plugin {plugin.name}")
|
||||
|
|
|
@ -12,6 +12,7 @@ import logging
|
|||
import tempfile
|
||||
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from . import const
|
||||
|
||||
|
@ -49,81 +50,30 @@ def sha256sum(path: str) -> str:
|
|||
return hashlib.sha256(f.read()).hexdigest()
|
||||
|
||||
|
||||
def find(
|
||||
path: str | list[str], wildcards: list[str] = [], recusive: bool = True
|
||||
) -> list[str]:
|
||||
_logger.info(f"Looking for files in {path} matching {wildcards}")
|
||||
|
||||
result: list[str] = []
|
||||
|
||||
if isinstance(path, list):
|
||||
for p in path:
|
||||
result += find(p, wildcards, recusive)
|
||||
return result
|
||||
|
||||
if not os.path.isdir(path):
|
||||
return []
|
||||
|
||||
if recusive:
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
if len(wildcards) == 0:
|
||||
result.append(os.path.join(root, f))
|
||||
else:
|
||||
for wildcard in wildcards:
|
||||
if fnmatch.fnmatch(f, wildcard):
|
||||
result.append(os.path.join(root, f))
|
||||
break
|
||||
else:
|
||||
for f in os.listdir(path):
|
||||
if len(wildcards) == 0:
|
||||
result.append(os.path.join(path, f))
|
||||
else:
|
||||
for wildcard in wildcards:
|
||||
if fnmatch.fnmatch(f, wildcard):
|
||||
result.append(os.path.join(path, f))
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def mkdir(path: str) -> str:
|
||||
_logger.info(f"Creating directory {path}")
|
||||
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as exc:
|
||||
if not (exc.errno == errno.EEXIST and os.path.isdir(path)):
|
||||
raise
|
||||
return path
|
||||
|
||||
|
||||
def rmrf(path: str) -> bool:
|
||||
def rmrf(path: Path) -> bool:
|
||||
_logger.info(f"Removing directory {path}")
|
||||
|
||||
if not os.path.exists(path):
|
||||
if not path.exists():
|
||||
return False
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
return True
|
||||
|
||||
|
||||
def wget(url: str, path: Optional[str] = None) -> str:
|
||||
def wget(url: str, path: Optional[Path] = None) -> Path:
|
||||
import requests
|
||||
|
||||
if path is None:
|
||||
path = os.path.join(
|
||||
const.CACHE_DIR, hashlib.sha256(url.encode("utf-8")).hexdigest()
|
||||
)
|
||||
path = const.CACHE_DIR / hashlib.sha256(url.encode("utf-8")).hexdigest()
|
||||
|
||||
if os.path.exists(path):
|
||||
if path.exists():
|
||||
return path
|
||||
|
||||
_logger.info(f"Downloading {url} to {path}")
|
||||
|
||||
r = requests.get(url, stream=True)
|
||||
r.raise_for_status()
|
||||
mkdir(os.path.dirname(path))
|
||||
with open(path, "wb") as f:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
|
@ -179,36 +129,28 @@ def popen(*args: str) -> str:
|
|||
return proc.stdout.decode("utf-8")
|
||||
|
||||
|
||||
def readdir(path: str) -> list[str]:
|
||||
_logger.info(f"Reading directory {path}")
|
||||
|
||||
try:
|
||||
return os.listdir(path)
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
|
||||
def cp(src: str, dst: str):
|
||||
def cp(src: Path, dst: Path):
|
||||
_logger.info(f"Copying {src} to {dst}")
|
||||
|
||||
shutil.copy(src, dst)
|
||||
|
||||
|
||||
def mv(src: str, dst: str):
|
||||
def mv(src: Path, dst: Path):
|
||||
_logger.info(f"Moving {src} to {dst}")
|
||||
|
||||
shutil.move(src, dst)
|
||||
|
||||
|
||||
def cpTree(src: str, dst: str):
|
||||
def cpTree(src: Path, dst: Path):
|
||||
_logger.info(f"Copying {src} to {dst}")
|
||||
|
||||
shutil.copytree(src, dst, dirs_exist_ok=True)
|
||||
|
||||
|
||||
def cloneDir(url: str, path: str, dest: str) -> str:
|
||||
def cloneDir(url: str, path: Path, dest: Path) -> Path:
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
mkdir(tmp)
|
||||
tmp = Path(tmp)
|
||||
tmp.mkdir(parents=True, exist_ok=True)
|
||||
exec(
|
||||
*["git", "clone", "-n", "--depth=1", "--filter=tree:0", url, tmp, "-q"],
|
||||
quiet=True,
|
||||
|
@ -218,7 +160,7 @@ def cloneDir(url: str, path: str, dest: str) -> str:
|
|||
quiet=True,
|
||||
)
|
||||
exec(*["git", "-C", tmp, "checkout", "-q", "--no-progress"], quiet=True)
|
||||
mv(os.path.join(tmp, path), dest)
|
||||
mv(tmp / path, dest)
|
||||
|
||||
return dest
|
||||
|
||||
|
|
Loading…
Reference in a new issue