flow
This commit is contained in:
0
core/__init__.py
Normal file
0
core/__init__.py
Normal file
BIN
core/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
core/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/action.cpython-313.pyc
Normal file
BIN
core/__pycache__/action.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/config.cpython-313.pyc
Normal file
BIN
core/__pycache__/config.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/console.cpython-313.pyc
Normal file
BIN
core/__pycache__/console.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/paths.cpython-313.pyc
Normal file
BIN
core/__pycache__/paths.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/platform.cpython-313.pyc
Normal file
BIN
core/__pycache__/platform.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/process.cpython-313.pyc
Normal file
BIN
core/__pycache__/process.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/stow.cpython-313.pyc
Normal file
BIN
core/__pycache__/stow.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/variables.cpython-313.pyc
Normal file
BIN
core/__pycache__/variables.cpython-313.pyc
Normal file
Binary file not shown.
120
core/action.py
Normal file
120
core/action.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""Action dataclass and ActionExecutor for plan-then-execute workflows."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
|
||||
from flow.core.console import ConsoleLogger
|
||||
|
||||
|
||||
@dataclass
|
||||
class Action:
|
||||
type: str
|
||||
description: str
|
||||
data: Dict[str, Any] = field(default_factory=dict)
|
||||
skip_on_error: bool = True
|
||||
os_filter: Optional[str] = None
|
||||
status: str = "pending"
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class ActionExecutor:
|
||||
"""Register handlers for action types, then execute a plan."""
|
||||
|
||||
def __init__(self, console: ConsoleLogger):
|
||||
self.console = console
|
||||
self._handlers: Dict[str, Callable] = {}
|
||||
self.post_comments: List[str] = []
|
||||
|
||||
def register(self, action_type: str, handler: Callable) -> None:
|
||||
self._handlers[action_type] = handler
|
||||
|
||||
def execute(self, actions: List[Action], *, dry_run: bool = False, current_os: str = "") -> None:
|
||||
if dry_run:
|
||||
self._print_plan(actions)
|
||||
return
|
||||
|
||||
# Filter OS-incompatible actions
|
||||
compatible = [a for a in actions if a.os_filter is None or a.os_filter == current_os]
|
||||
skipped_count = len(actions) - len(compatible)
|
||||
if skipped_count:
|
||||
self.console.info(f"Skipped {skipped_count} OS-incompatible actions")
|
||||
|
||||
self.console.section_header(f"EXECUTING {len(compatible)} ACTIONS")
|
||||
|
||||
for i, action in enumerate(compatible, 1):
|
||||
self.console.step_start(i, len(compatible), action.description)
|
||||
|
||||
handler = self._handlers.get(action.type)
|
||||
if not handler:
|
||||
action.status = "skipped"
|
||||
self.console.step_skip(f"No handler for action type: {action.type}")
|
||||
continue
|
||||
|
||||
try:
|
||||
handler(action.data)
|
||||
action.status = "completed"
|
||||
self.console.step_complete()
|
||||
except Exception as e:
|
||||
action.error = str(e)
|
||||
if action.skip_on_error:
|
||||
action.status = "skipped"
|
||||
self.console.step_skip(str(e))
|
||||
else:
|
||||
action.status = "failed"
|
||||
self.console.step_fail(str(e))
|
||||
print(f"\n{self.console.RED}Critical action failed, stopping execution{self.console.RESET}")
|
||||
break
|
||||
|
||||
self._print_summary(compatible)
|
||||
|
||||
def _print_plan(self, actions: List[Action]) -> None:
|
||||
self.console.plan_header("EXECUTION PLAN", len(actions))
|
||||
|
||||
grouped: Dict[str, List[Action]] = {}
|
||||
for action in actions:
|
||||
category = action.type.split("-")[0]
|
||||
grouped.setdefault(category, []).append(action)
|
||||
|
||||
for category, category_actions in grouped.items():
|
||||
self.console.plan_category(category)
|
||||
for i, action in enumerate(category_actions, 1):
|
||||
self.console.plan_item(
|
||||
i,
|
||||
action.description,
|
||||
action.os_filter,
|
||||
not action.skip_on_error,
|
||||
)
|
||||
|
||||
self.console.plan_legend()
|
||||
|
||||
def _print_summary(self, actions: List[Action]) -> None:
|
||||
completed = sum(1 for a in actions if a.status == "completed")
|
||||
failed = sum(1 for a in actions if a.status == "failed")
|
||||
skipped = sum(1 for a in actions if a.status == "skipped")
|
||||
|
||||
self.console.section_summary("EXECUTION SUMMARY")
|
||||
c = self.console
|
||||
|
||||
print(f"Total actions: {c.BOLD}{len(actions)}{c.RESET}")
|
||||
print(f"Completed: {c.GREEN}{completed}{c.RESET}")
|
||||
if failed:
|
||||
print(f"Failed: {c.RED}{failed}{c.RESET}")
|
||||
if skipped:
|
||||
print(f"Skipped: {c.YELLOW}{skipped}{c.RESET}")
|
||||
|
||||
if self.post_comments:
|
||||
print(f"\n{c.BOLD}POST-INSTALL NOTES{c.RESET}")
|
||||
print(f"{c.CYAN}{'-' * 25}{c.RESET}")
|
||||
for i, comment in enumerate(self.post_comments, 1):
|
||||
print(f"{i}. {comment}")
|
||||
|
||||
if failed:
|
||||
print(f"\n{c.BOLD}FAILED ACTIONS{c.RESET}")
|
||||
print(f"{c.RED}{'-' * 20}{c.RESET}")
|
||||
for action in actions:
|
||||
if action.status == "failed":
|
||||
print(f"{c.RED}>{c.RESET} {action.description}")
|
||||
print(f" {c.GRAY}Error: {action.error}{c.RESET}")
|
||||
print(f"\n{c.RED}{failed} action(s) failed. Check the errors above.{c.RESET}")
|
||||
else:
|
||||
print(f"\n{c.GREEN}All actions completed successfully!{c.RESET}")
|
||||
151
core/config.py
Normal file
151
core/config.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""Configuration loading (INI config + YAML manifest) and FlowContext."""
|
||||
|
||||
import configparser
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
from flow.core.console import ConsoleLogger
|
||||
from flow.core import paths
|
||||
from flow.core.platform import PlatformInfo
|
||||
|
||||
|
||||
@dataclass
|
||||
class TargetConfig:
|
||||
namespace: str
|
||||
platform: str
|
||||
ssh_host: str
|
||||
ssh_identity: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppConfig:
|
||||
dotfiles_url: str = ""
|
||||
dotfiles_branch: str = "main"
|
||||
projects_dir: str = "~/projects"
|
||||
container_registry: str = "registry.tomastm.com"
|
||||
container_tag: str = "latest"
|
||||
tmux_session: str = "default"
|
||||
targets: List[TargetConfig] = field(default_factory=list)
|
||||
|
||||
|
||||
def _parse_target_config(key: str, value: str) -> Optional[TargetConfig]:
|
||||
"""Parse a target line from config.
|
||||
|
||||
Supported formats:
|
||||
1) namespace = platform ssh_host [ssh_identity]
|
||||
2) namespace@platform = ssh_host [ssh_identity]
|
||||
"""
|
||||
parts = value.split()
|
||||
if not parts:
|
||||
return None
|
||||
|
||||
if "@" in key:
|
||||
namespace, platform = key.split("@", 1)
|
||||
ssh_host = parts[0]
|
||||
ssh_identity = parts[1] if len(parts) > 1 else None
|
||||
if not namespace or not platform:
|
||||
return None
|
||||
return TargetConfig(
|
||||
namespace=namespace,
|
||||
platform=platform,
|
||||
ssh_host=ssh_host,
|
||||
ssh_identity=ssh_identity,
|
||||
)
|
||||
|
||||
if len(parts) < 2:
|
||||
return None
|
||||
|
||||
return TargetConfig(
|
||||
namespace=key,
|
||||
platform=parts[0],
|
||||
ssh_host=parts[1],
|
||||
ssh_identity=parts[2] if len(parts) > 2 else None,
|
||||
)
|
||||
|
||||
|
||||
def load_config(path: Optional[Path] = None) -> AppConfig:
|
||||
"""Load INI config file into AppConfig with cascading priority.
|
||||
|
||||
Priority:
|
||||
1. Dotfiles repo (self-hosted): ~/.local/share/devflow/dotfiles/flow/.config/flow/config
|
||||
2. Local override: ~/.config/devflow/config
|
||||
3. Empty fallback
|
||||
"""
|
||||
cfg = AppConfig()
|
||||
|
||||
if path is None:
|
||||
# Priority 1: Check dotfiles repo for self-hosted config
|
||||
if paths.DOTFILES_CONFIG.exists():
|
||||
path = paths.DOTFILES_CONFIG
|
||||
# Priority 2: Fall back to local config
|
||||
else:
|
||||
path = paths.CONFIG_FILE
|
||||
|
||||
assert path is not None
|
||||
|
||||
if not path.exists():
|
||||
return cfg
|
||||
|
||||
parser = configparser.ConfigParser()
|
||||
parser.read(path)
|
||||
|
||||
if parser.has_section("repository"):
|
||||
cfg.dotfiles_url = parser.get("repository", "dotfiles_url", fallback=cfg.dotfiles_url)
|
||||
cfg.dotfiles_branch = parser.get("repository", "dotfiles_branch", fallback=cfg.dotfiles_branch)
|
||||
|
||||
if parser.has_section("paths"):
|
||||
cfg.projects_dir = parser.get("paths", "projects_dir", fallback=cfg.projects_dir)
|
||||
|
||||
if parser.has_section("defaults"):
|
||||
cfg.container_registry = parser.get("defaults", "container_registry", fallback=cfg.container_registry)
|
||||
cfg.container_tag = parser.get("defaults", "container_tag", fallback=cfg.container_tag)
|
||||
cfg.tmux_session = parser.get("defaults", "tmux_session", fallback=cfg.tmux_session)
|
||||
|
||||
if parser.has_section("targets"):
|
||||
for key in parser.options("targets"):
|
||||
raw_value = parser.get("targets", key)
|
||||
tc = _parse_target_config(key, raw_value)
|
||||
if tc is not None:
|
||||
cfg.targets.append(tc)
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
def load_manifest(path: Optional[Path] = None) -> Dict[str, Any]:
|
||||
"""Load YAML manifest file with cascading priority.
|
||||
|
||||
Priority:
|
||||
1. Dotfiles repo (self-hosted): ~/.local/share/devflow/dotfiles/flow/.config/flow/manifest.yaml
|
||||
2. Local override: ~/.config/devflow/manifest.yaml
|
||||
3. Empty fallback
|
||||
"""
|
||||
if path is None:
|
||||
# Priority 1: Check dotfiles repo for self-hosted manifest
|
||||
if paths.DOTFILES_MANIFEST.exists():
|
||||
path = paths.DOTFILES_MANIFEST
|
||||
# Priority 2: Fall back to local manifest
|
||||
else:
|
||||
path = paths.MANIFEST_FILE
|
||||
|
||||
assert path is not None
|
||||
|
||||
if not path.exists():
|
||||
return {}
|
||||
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
data = yaml.safe_load(f)
|
||||
except yaml.YAMLError as e:
|
||||
raise RuntimeError(f"Invalid YAML in {path}: {e}") from e
|
||||
return data if isinstance(data, dict) else {}
|
||||
|
||||
|
||||
@dataclass
|
||||
class FlowContext:
|
||||
config: AppConfig
|
||||
manifest: Dict[str, Any]
|
||||
platform: PlatformInfo
|
||||
console: ConsoleLogger
|
||||
138
core/console.py
Normal file
138
core/console.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""Console output formatting — ported from dotfiles_v2/src/console_logger.py."""
|
||||
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class ConsoleLogger:
|
||||
# Color constants
|
||||
BLUE = "\033[34m"
|
||||
GREEN = "\033[32m"
|
||||
YELLOW = "\033[33m"
|
||||
RED = "\033[31m"
|
||||
CYAN = "\033[36m"
|
||||
GRAY = "\033[90m"
|
||||
DARK_GRAY = "\033[2;37m"
|
||||
BOLD = "\033[1m"
|
||||
DIM = "\033[2m"
|
||||
RESET = "\033[0m"
|
||||
|
||||
# Box drawing characters
|
||||
BOX_VERTICAL = "\u2502"
|
||||
BOX_HORIZONTAL = "\u2500"
|
||||
BOX_TOP_LEFT = "\u250c"
|
||||
BOX_TOP_RIGHT = "\u2510"
|
||||
BOX_BOTTOM_LEFT = "\u2514"
|
||||
BOX_BOTTOM_RIGHT = "\u2518"
|
||||
|
||||
def __init__(self):
|
||||
self.step_counter = 0
|
||||
self.start_time = None
|
||||
|
||||
def info(self, message: str):
|
||||
print(f"{self.CYAN}[INFO]{self.RESET} {message}")
|
||||
|
||||
def warn(self, message: str):
|
||||
print(f"{self.YELLOW}[WARN]{self.RESET} {message}")
|
||||
|
||||
def error(self, message: str):
|
||||
print(f"{self.RED}[ERROR]{self.RESET} {message}")
|
||||
|
||||
def success(self, message: str):
|
||||
print(f"{self.GREEN}[SUCCESS]{self.RESET} {message}")
|
||||
|
||||
def step_start(self, current: int, total: int, description: str):
|
||||
print(
|
||||
f"\n{self.BOLD}{self.BLUE}Step {current}/{total}:{self.RESET} "
|
||||
f"{self.BOLD}{description}{self.RESET}"
|
||||
)
|
||||
print(f"{self.BLUE}{self.BOX_HORIZONTAL * 4}{self.RESET} {self.GRAY}Starting...{self.RESET}")
|
||||
self.start_time = time.time()
|
||||
|
||||
def step_command(self, command: str):
|
||||
print(f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}{self.GRAY}$ {command}{self.RESET}")
|
||||
|
||||
def step_output(self, line: str):
|
||||
if line.strip():
|
||||
print(f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}{self.DARK_GRAY} {line.rstrip()}{self.RESET}")
|
||||
|
||||
def step_complete(self, message: str = "Completed successfully"):
|
||||
elapsed = time.time() - self.start_time if self.start_time else 0
|
||||
print(f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}{self.GREEN}> {message} ({elapsed:.1f}s){self.RESET}")
|
||||
|
||||
def step_skip(self, message: str):
|
||||
elapsed = time.time() - self.start_time if self.start_time else 0
|
||||
print(
|
||||
f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}"
|
||||
f"{self.YELLOW}> Skipped: {message} ({elapsed:.1f}s){self.RESET}"
|
||||
)
|
||||
|
||||
def step_fail(self, message: str):
|
||||
elapsed = time.time() - self.start_time if self.start_time else 0
|
||||
print(
|
||||
f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}"
|
||||
f"{self.RED}> Failed: {message} ({elapsed:.1f}s){self.RESET}"
|
||||
)
|
||||
|
||||
def section_header(self, title: str, subtitle: str = ""):
|
||||
width = 70
|
||||
print(f"\n{self.BOLD}{self.BLUE}{'=' * width}{self.RESET}")
|
||||
if subtitle:
|
||||
print(f"{self.BOLD}{self.BLUE} {title.upper()} - {subtitle}{self.RESET}")
|
||||
else:
|
||||
print(f"{self.BOLD}{self.BLUE} {title.upper()}{self.RESET}")
|
||||
print(f"{self.BOLD}{self.BLUE}{'=' * width}{self.RESET}")
|
||||
|
||||
def section_summary(self, title: str):
|
||||
width = 70
|
||||
print(f"\n{self.BOLD}{self.GREEN}{'=' * width}{self.RESET}")
|
||||
print(f"{self.BOLD}{self.GREEN} {title.upper()}{self.RESET}")
|
||||
print(f"{self.BOLD}{self.GREEN}{'=' * width}{self.RESET}")
|
||||
|
||||
def plan_header(self, title: str, count: int):
|
||||
width = 70
|
||||
print(f"\n{self.BOLD}{self.CYAN}{'=' * width}{self.RESET}")
|
||||
print(f"{self.BOLD}{self.CYAN} {title.upper()} ({count} actions){self.RESET}")
|
||||
print(f"{self.BOLD}{self.CYAN}{'=' * width}{self.RESET}")
|
||||
|
||||
def plan_category(self, category: str):
|
||||
print(f"\n{self.BOLD}{self.CYAN}{category.upper()}{self.RESET}")
|
||||
print(f"{self.CYAN}{'-' * 20}{self.RESET}")
|
||||
|
||||
def plan_item(self, number: int, description: str, os_filter: Optional[str] = None, critical: bool = False):
|
||||
os_indicator = f" {self.GRAY}({os_filter}){self.RESET}" if os_filter else ""
|
||||
error_indicator = f" {self.RED}(critical){self.RESET}" if critical else ""
|
||||
print(f" {number:2d}. {description}{os_indicator}{error_indicator}")
|
||||
|
||||
def plan_legend(self):
|
||||
print(
|
||||
f"\n{self.GRAY}Legend: {self.RED}(critical){self.GRAY} = stops on failure, "
|
||||
f"{self.GRAY}(os){self.GRAY} = OS-specific{self.RESET}"
|
||||
)
|
||||
|
||||
def table(self, headers: list[str], rows: list[list[str]]):
|
||||
"""Print a formatted table."""
|
||||
if not rows:
|
||||
return
|
||||
|
||||
normalized_headers = [str(h) for h in headers]
|
||||
normalized_rows = [[str(cell) for cell in row] for row in rows]
|
||||
|
||||
# Calculate column widths
|
||||
widths = [len(h) for h in normalized_headers]
|
||||
for row in normalized_rows:
|
||||
for i, cell in enumerate(row):
|
||||
if i < len(widths):
|
||||
widths[i] = max(widths[i], len(cell))
|
||||
|
||||
# Header
|
||||
header_line = " ".join(
|
||||
f"{self.BOLD}{h:<{widths[i]}}{self.RESET}" for i, h in enumerate(normalized_headers)
|
||||
)
|
||||
print(header_line)
|
||||
print(self.GRAY + " ".join("-" * w for w in widths) + self.RESET)
|
||||
|
||||
# Rows
|
||||
for row in normalized_rows:
|
||||
line = " ".join(f"{cell:<{widths[i]}}" for i, cell in enumerate(row))
|
||||
print(line)
|
||||
37
core/paths.py
Normal file
37
core/paths.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""XDG-compliant path constants for DevFlow."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _xdg(env_var: str, fallback: str) -> Path:
|
||||
return Path(os.environ.get(env_var, fallback))
|
||||
|
||||
|
||||
HOME = Path.home()
|
||||
|
||||
CONFIG_DIR = _xdg("XDG_CONFIG_HOME", str(HOME / ".config")) / "devflow"
|
||||
DATA_DIR = _xdg("XDG_DATA_HOME", str(HOME / ".local" / "share")) / "devflow"
|
||||
STATE_DIR = _xdg("XDG_STATE_HOME", str(HOME / ".local" / "state")) / "devflow"
|
||||
|
||||
MANIFEST_FILE = CONFIG_DIR / "manifest.yaml"
|
||||
CONFIG_FILE = CONFIG_DIR / "config"
|
||||
|
||||
DOTFILES_DIR = DATA_DIR / "dotfiles"
|
||||
PACKAGES_DIR = DATA_DIR / "packages"
|
||||
SCRATCH_DIR = DATA_DIR / "scratch"
|
||||
PROJECTS_DIR = HOME / "projects"
|
||||
|
||||
LINKED_STATE = STATE_DIR / "linked.json"
|
||||
INSTALLED_STATE = STATE_DIR / "installed.json"
|
||||
|
||||
# Self-hosted flow config paths (from dotfiles repo)
|
||||
DOTFILES_FLOW_CONFIG = DOTFILES_DIR / "flow" / ".config" / "flow"
|
||||
DOTFILES_MANIFEST = DOTFILES_FLOW_CONFIG / "manifest.yaml"
|
||||
DOTFILES_CONFIG = DOTFILES_FLOW_CONFIG / "config"
|
||||
|
||||
|
||||
def ensure_dirs() -> None:
|
||||
"""Create all required directories if they don't exist."""
|
||||
for d in (CONFIG_DIR, DATA_DIR, STATE_DIR, PACKAGES_DIR, SCRATCH_DIR):
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
43
core/platform.py
Normal file
43
core/platform.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""OS and architecture detection."""
|
||||
|
||||
import platform as _platform
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlatformInfo:
|
||||
os: str = "linux" # "linux" or "macos"
|
||||
arch: str = "amd64" # "amd64" or "arm64"
|
||||
platform: str = "" # "linux-amd64", etc.
|
||||
|
||||
def __post_init__(self):
|
||||
if not self.platform:
|
||||
self.platform = f"{self.os}-{self.arch}"
|
||||
|
||||
|
||||
_OS_MAP = {"Darwin": "macos", "Linux": "linux"}
|
||||
_ARCH_MAP = {"x86_64": "amd64", "aarch64": "arm64", "arm64": "arm64"}
|
||||
|
||||
|
||||
def detect_platform() -> PlatformInfo:
|
||||
raw_os = _platform.system()
|
||||
os_name = _OS_MAP.get(raw_os)
|
||||
if os_name is None:
|
||||
raise RuntimeError(f"Unsupported operating system: {raw_os}")
|
||||
|
||||
raw_arch = _platform.machine().lower()
|
||||
arch = _ARCH_MAP.get(raw_arch)
|
||||
if arch is None:
|
||||
raise RuntimeError(f"Unsupported architecture: {raw_arch}")
|
||||
|
||||
return PlatformInfo(os=os_name, arch=arch, platform=f"{os_name}-{arch}")
|
||||
|
||||
|
||||
def detect_container_runtime() -> Optional[str]:
|
||||
"""Return 'docker' or 'podman' if available, else None."""
|
||||
for runtime in ("docker", "podman"):
|
||||
if shutil.which(runtime):
|
||||
return runtime
|
||||
return None
|
||||
45
core/process.py
Normal file
45
core/process.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Command execution with streaming output."""
|
||||
|
||||
import subprocess
|
||||
|
||||
from flow.core.console import ConsoleLogger
|
||||
|
||||
|
||||
def run_command(
|
||||
command: str,
|
||||
console: ConsoleLogger,
|
||||
*,
|
||||
check: bool = True,
|
||||
shell: bool = True,
|
||||
capture: bool = False,
|
||||
) -> subprocess.CompletedProcess:
|
||||
"""Run a command with real-time streamed output."""
|
||||
console.step_command(command)
|
||||
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
shell=shell,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
bufsize=1,
|
||||
)
|
||||
|
||||
output_lines = []
|
||||
for line in process.stdout:
|
||||
line = line.rstrip()
|
||||
if line:
|
||||
if not capture:
|
||||
console.step_output(line)
|
||||
output_lines.append(line)
|
||||
|
||||
process.wait()
|
||||
|
||||
if check and process.returncode != 0:
|
||||
raise RuntimeError(
|
||||
f"Command failed (exit {process.returncode}): {command}"
|
||||
)
|
||||
|
||||
return subprocess.CompletedProcess(
|
||||
command, process.returncode, stdout="\n".join(output_lines), stderr=""
|
||||
)
|
||||
358
core/stow.py
Normal file
358
core/stow.py
Normal file
@@ -0,0 +1,358 @@
|
||||
"""GNU Stow-style tree folding/unfolding for efficient symlink management."""
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
|
||||
@dataclass
|
||||
class LinkOperation:
|
||||
"""Represents a single operation to perform during linking."""
|
||||
|
||||
type: str # "create_symlink" | "create_dir" | "unfold" | "remove" | "remove_dir"
|
||||
source: Path
|
||||
target: Path
|
||||
package: str
|
||||
is_directory_link: bool = False
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.type == "create_symlink":
|
||||
link_type = "DIR" if self.is_directory_link else "FILE"
|
||||
return f" {link_type} LINK: {self.target} -> {self.source}"
|
||||
elif self.type == "create_dir":
|
||||
return f" CREATE DIR: {self.target}"
|
||||
elif self.type == "unfold":
|
||||
return f" UNFOLD: {self.target} (directory symlink -> individual file symlinks)"
|
||||
elif self.type == "remove":
|
||||
return f" REMOVE: {self.target}"
|
||||
elif self.type == "remove_dir":
|
||||
return f" REMOVE DIR: {self.target}"
|
||||
return f" {self.type}: {self.target}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class LinkTree:
|
||||
"""Represents the current state of symlinks."""
|
||||
|
||||
links: Dict[Path, Path] = field(default_factory=dict) # target -> source
|
||||
packages: Dict[Path, str] = field(default_factory=dict) # target -> package_name
|
||||
directory_links: Set[Path] = field(default_factory=set) # targets that are directory links
|
||||
|
||||
def add_link(self, target: Path, source: Path, package: str, is_dir_link: bool = False):
|
||||
"""Add a link to the tree."""
|
||||
self.links[target] = source
|
||||
self.packages[target] = package
|
||||
if is_dir_link:
|
||||
self.directory_links.add(target)
|
||||
|
||||
def remove_link(self, target: Path):
|
||||
"""Remove a link from the tree."""
|
||||
self.links.pop(target, None)
|
||||
self.packages.pop(target, None)
|
||||
self.directory_links.discard(target)
|
||||
|
||||
def is_directory_link(self, target: Path) -> bool:
|
||||
"""Check if a target is a directory symlink."""
|
||||
return target in self.directory_links
|
||||
|
||||
def get_package(self, target: Path) -> Optional[str]:
|
||||
"""Get the package that owns a link."""
|
||||
return self.packages.get(target)
|
||||
|
||||
def can_fold(self, target_dir: Path, package: str) -> bool:
|
||||
"""Check if all links in target_dir belong to the same package.
|
||||
|
||||
Returns True if we can create a single directory symlink instead of
|
||||
individual file symlinks.
|
||||
"""
|
||||
# Check all direct children of target_dir
|
||||
for link_target, link_package in self.packages.items():
|
||||
# If link_target is a child of target_dir
|
||||
try:
|
||||
link_target.relative_to(target_dir)
|
||||
# If parent is target_dir and package differs, cannot fold
|
||||
if link_target.parent == target_dir and link_package != package:
|
||||
return False
|
||||
except ValueError:
|
||||
# link_target is not under target_dir, skip
|
||||
continue
|
||||
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def from_state(cls, state: dict) -> "LinkTree":
|
||||
"""Build a LinkTree from the linked.json state format (v2 only)."""
|
||||
tree = cls()
|
||||
links_dict = state.get("links", {})
|
||||
|
||||
for package_name, pkg_links in links_dict.items():
|
||||
for target_str, link_info in pkg_links.items():
|
||||
target = Path(target_str)
|
||||
if not isinstance(link_info, dict) or "source" not in link_info:
|
||||
raise RuntimeError(
|
||||
"Unsupported linked state format. Remove linked.json and relink dotfiles."
|
||||
)
|
||||
|
||||
source = Path(link_info["source"])
|
||||
is_dir_link = bool(link_info.get("is_directory_link", False))
|
||||
|
||||
tree.add_link(target, source, package_name, is_dir_link)
|
||||
|
||||
return tree
|
||||
|
||||
def to_state(self) -> dict:
|
||||
"""Convert LinkTree to linked.json state format."""
|
||||
state = {"version": 2, "links": {}}
|
||||
|
||||
# Group links by package
|
||||
package_links: Dict[str, Dict[str, dict]] = {}
|
||||
for target, source in self.links.items():
|
||||
package = self.packages[target]
|
||||
if package not in package_links:
|
||||
package_links[package] = {}
|
||||
|
||||
package_links[package][str(target)] = {
|
||||
"source": str(source),
|
||||
"is_directory_link": target in self.directory_links,
|
||||
}
|
||||
|
||||
state["links"] = package_links
|
||||
return state
|
||||
|
||||
|
||||
class TreeFolder:
|
||||
"""Implements GNU Stow tree folding/unfolding algorithm."""
|
||||
|
||||
def __init__(self, tree: LinkTree):
|
||||
self.tree = tree
|
||||
|
||||
def plan_link(
|
||||
self, source: Path, target: Path, package: str, is_dir_link: bool = False
|
||||
) -> List[LinkOperation]:
|
||||
"""Plan operations needed to create a link (may include unfolding).
|
||||
|
||||
Args:
|
||||
source: Source path (file or directory in dotfiles)
|
||||
target: Target path (where symlink should be created)
|
||||
package: Package name
|
||||
is_dir_link: Whether this is a directory symlink (folded)
|
||||
|
||||
Returns a list of operations to execute in order.
|
||||
"""
|
||||
operations = []
|
||||
|
||||
# Check if parent is a directory symlink that needs unfolding
|
||||
parent = target.parent
|
||||
if parent in self.tree.links and self.tree.is_directory_link(parent):
|
||||
# Parent is a folded directory symlink, need to unfold
|
||||
unfold_ops = self._plan_unfold(parent)
|
||||
operations.extend(unfold_ops)
|
||||
|
||||
# Create symlink operation (conflict detection will handle existing links)
|
||||
operations.append(
|
||||
LinkOperation(
|
||||
type="create_symlink",
|
||||
source=source,
|
||||
target=target,
|
||||
package=package,
|
||||
is_directory_link=is_dir_link,
|
||||
)
|
||||
)
|
||||
|
||||
return operations
|
||||
|
||||
def _find_fold_point(
|
||||
self, source: Path, target: Path, package: str
|
||||
) -> Path:
|
||||
"""Find the deepest directory level where we can create a folder symlink.
|
||||
|
||||
Returns the target path where the symlink should be created.
|
||||
For single files, this should just return the file path (no folding).
|
||||
Folding only makes sense when linking entire directories.
|
||||
"""
|
||||
# For now, disable automatic folding at the plan_link level
|
||||
# Folding should be done at a higher level when we know we're
|
||||
# linking an entire directory tree from a package
|
||||
return target
|
||||
|
||||
def _plan_unfold(self, folded_dir: Path) -> List[LinkOperation]:
|
||||
"""Plan operations to unfold a directory symlink.
|
||||
|
||||
When unfolding:
|
||||
1. Remove the directory symlink
|
||||
2. Create a real directory
|
||||
3. Create individual file symlinks for all files
|
||||
"""
|
||||
operations = []
|
||||
|
||||
# Get the source of the folded directory
|
||||
source_dir = self.tree.links.get(folded_dir)
|
||||
if not source_dir:
|
||||
return operations
|
||||
|
||||
package = self.tree.packages.get(folded_dir, "")
|
||||
|
||||
# Remove the directory symlink
|
||||
operations.append(
|
||||
LinkOperation(
|
||||
type="remove",
|
||||
source=source_dir,
|
||||
target=folded_dir,
|
||||
package=package,
|
||||
is_directory_link=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Create real directory
|
||||
operations.append(
|
||||
LinkOperation(
|
||||
type="create_dir",
|
||||
source=source_dir,
|
||||
target=folded_dir,
|
||||
package=package,
|
||||
)
|
||||
)
|
||||
|
||||
# Create individual file symlinks for all files in source
|
||||
if source_dir.exists() and source_dir.is_dir():
|
||||
for root, _dirs, files in os.walk(source_dir):
|
||||
for fname in files:
|
||||
src_file = Path(root) / fname
|
||||
rel = src_file.relative_to(source_dir)
|
||||
dst_file = folded_dir / rel
|
||||
|
||||
operations.append(
|
||||
LinkOperation(
|
||||
type="create_symlink",
|
||||
source=src_file,
|
||||
target=dst_file,
|
||||
package=package,
|
||||
is_directory_link=False,
|
||||
)
|
||||
)
|
||||
|
||||
return operations
|
||||
|
||||
def plan_unlink(self, target: Path, package: str) -> List[LinkOperation]:
|
||||
"""Plan operations to remove a link (may include refolding)."""
|
||||
operations = []
|
||||
|
||||
# Check if this is a directory link
|
||||
if self.tree.is_directory_link(target):
|
||||
# Remove all file links under this directory
|
||||
to_remove = []
|
||||
for link_target in self.tree.links.keys():
|
||||
try:
|
||||
link_target.relative_to(target)
|
||||
to_remove.append(link_target)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
for link_target in to_remove:
|
||||
operations.append(
|
||||
LinkOperation(
|
||||
type="remove",
|
||||
source=self.tree.links[link_target],
|
||||
target=link_target,
|
||||
package=self.tree.packages[link_target],
|
||||
is_directory_link=False,
|
||||
)
|
||||
)
|
||||
|
||||
# Remove the link itself
|
||||
if target in self.tree.links:
|
||||
operations.append(
|
||||
LinkOperation(
|
||||
type="remove",
|
||||
source=self.tree.links[target],
|
||||
target=target,
|
||||
package=package,
|
||||
is_directory_link=self.tree.is_directory_link(target),
|
||||
)
|
||||
)
|
||||
|
||||
return operations
|
||||
|
||||
def detect_conflicts(self, operations: List[LinkOperation]) -> List[str]:
|
||||
"""Detect conflicts before executing operations.
|
||||
|
||||
Returns a list of conflict error messages.
|
||||
"""
|
||||
conflicts = []
|
||||
|
||||
for op in operations:
|
||||
if op.type == "create_symlink":
|
||||
# Check if target already exists in tree (managed by flow)
|
||||
if op.target in self.tree.links:
|
||||
existing_pkg = self.tree.packages[op.target]
|
||||
if existing_pkg != op.package:
|
||||
conflicts.append(
|
||||
f"Conflict: {op.target} is already linked by package '{existing_pkg}'"
|
||||
)
|
||||
# Check if target exists on disk but not managed by flow
|
||||
elif op.target.exists() or op.target.is_symlink():
|
||||
conflicts.append(
|
||||
f"Conflict: {op.target} already exists and is not managed by flow"
|
||||
)
|
||||
|
||||
# Check if target's parent is a file (can't create file in file)
|
||||
if op.target.parent.exists() and op.target.parent.is_file():
|
||||
conflicts.append(
|
||||
f"Conflict: {op.target.parent} is a file, cannot create {op.target}"
|
||||
)
|
||||
|
||||
return conflicts
|
||||
|
||||
def execute_operations(
|
||||
self, operations: List[LinkOperation], dry_run: bool = False
|
||||
) -> None:
|
||||
"""Execute a list of operations atomically.
|
||||
|
||||
If dry_run is True, only print what would be done.
|
||||
"""
|
||||
if dry_run:
|
||||
for op in operations:
|
||||
print(str(op))
|
||||
return
|
||||
|
||||
# Execute operations
|
||||
for op in operations:
|
||||
if op.type == "create_symlink":
|
||||
# Create parent directories
|
||||
op.target.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if op.target.is_symlink():
|
||||
current = op.target.resolve(strict=False)
|
||||
desired = op.source.resolve(strict=False)
|
||||
if current == desired:
|
||||
self.tree.add_link(op.target, op.source, op.package, op.is_directory_link)
|
||||
continue
|
||||
op.target.unlink()
|
||||
elif op.target.exists():
|
||||
if op.target.is_file():
|
||||
op.target.unlink()
|
||||
else:
|
||||
raise RuntimeError(f"Cannot overwrite directory: {op.target}")
|
||||
|
||||
# Create symlink
|
||||
op.target.symlink_to(op.source)
|
||||
|
||||
# Update tree
|
||||
self.tree.add_link(op.target, op.source, op.package, op.is_directory_link)
|
||||
|
||||
elif op.type == "create_dir":
|
||||
op.target.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
elif op.type == "remove":
|
||||
if op.target.exists() or op.target.is_symlink():
|
||||
op.target.unlink()
|
||||
self.tree.remove_link(op.target)
|
||||
|
||||
elif op.type == "remove_dir":
|
||||
if op.target.exists() and op.target.is_dir():
|
||||
op.target.rmdir()
|
||||
|
||||
def to_state(self) -> dict:
|
||||
"""Convert current tree to state format for persistence."""
|
||||
return self.tree.to_state()
|
||||
38
core/variables.py
Normal file
38
core/variables.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Variable substitution for $VAR/${VAR} and {{var}} templates."""
|
||||
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
|
||||
def substitute(text: str, variables: Dict[str, str]) -> str:
|
||||
"""Replace $VAR and ${VAR} with values from variables dict or env."""
|
||||
if not isinstance(text, str):
|
||||
return text
|
||||
|
||||
pattern = re.compile(r"\$(\w+)|\$\{([^}]+)\}")
|
||||
|
||||
def _replace(match: re.Match[str]) -> str:
|
||||
key = match.group(1) or match.group(2) or ""
|
||||
if key in variables:
|
||||
return str(variables[key])
|
||||
if key == "HOME":
|
||||
return str(Path.home())
|
||||
if key in os.environ:
|
||||
return os.environ[key]
|
||||
return match.group(0)
|
||||
|
||||
return pattern.sub(_replace, text)
|
||||
|
||||
|
||||
def substitute_template(text: str, context: Dict[str, str]) -> str:
|
||||
"""Replace {{key}} placeholders with values from context dict."""
|
||||
if not isinstance(text, str):
|
||||
return text
|
||||
|
||||
def _replace(match: re.Match[str]) -> str:
|
||||
key = match.group(1).strip()
|
||||
return context.get(key, match.group(0))
|
||||
|
||||
return re.sub(r"\{\{(\w+)\}\}", _replace, text)
|
||||
Reference in New Issue
Block a user