This commit is contained in:
2026-02-12 09:42:59 +02:00
commit 906adb539d
87 changed files with 5288 additions and 0 deletions

0
commands/__init__.py Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

418
commands/bootstrap.py Normal file
View File

@@ -0,0 +1,418 @@
"""flow bootstrap — environment provisioning with plan-then-execute model."""
import os
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Any, Dict, List
from flow.core.action import Action, ActionExecutor
from flow.core.config import FlowContext, load_manifest
from flow.core.paths import DOTFILES_DIR
from flow.core.process import run_command
from flow.core.variables import substitute
def register(subparsers):
p = subparsers.add_parser(
"bootstrap", aliases=["setup", "provision"],
help="Environment provisioning",
)
sub = p.add_subparsers(dest="bootstrap_command")
# run
run_p = sub.add_parser("run", help="Run bootstrap actions")
run_p.add_argument("--profile", help="Profile name to use")
run_p.add_argument("--dry-run", action="store_true", help="Show plan without executing")
run_p.add_argument("--var", action="append", default=[], help="Set variable KEY=VALUE")
run_p.set_defaults(handler=run_bootstrap)
# list
ls = sub.add_parser("list", help="List available profiles")
ls.set_defaults(handler=run_list)
# show
show = sub.add_parser("show", help="Show profile configuration")
show.add_argument("profile", help="Profile name")
show.set_defaults(handler=run_show)
p.set_defaults(handler=lambda ctx, args: p.print_help())
def _get_profiles(ctx: FlowContext) -> dict:
profiles = ctx.manifest.get("profiles")
if profiles is None:
if "environments" in ctx.manifest:
raise RuntimeError(
"Manifest key 'environments' is no longer supported. Rename it to 'profiles'."
)
return {}
if not isinstance(profiles, dict):
raise RuntimeError("Manifest key 'profiles' must be a mapping")
return profiles
def _parse_variables(var_args: list) -> dict:
variables = {}
for v in var_args:
if "=" not in v:
raise ValueError(f"Invalid --var value '{v}'. Expected KEY=VALUE")
key, value = v.split("=", 1)
if not key:
raise ValueError(f"Invalid --var value '{v}'. KEY cannot be empty")
variables[key] = value
return variables
def _plan_actions(ctx: FlowContext, profile_name: str, env_config: dict, variables: dict) -> List[Action]:
"""Plan all actions from a profile configuration."""
actions = []
# Variable checks
for req_var in env_config.get("requires", []):
actions.append(Action(
type="check-variable",
description=f"Check required variable: {req_var}",
data={"variable": req_var, "variables": variables},
skip_on_error=False,
))
# Hostname
if "hostname" in env_config:
hostname = substitute(env_config["hostname"], variables)
actions.append(Action(
type="set-hostname",
description=f"Set hostname to: {hostname}",
data={"hostname": hostname},
skip_on_error=False,
))
# Locale
if "locale" in env_config:
actions.append(Action(
type="set-locale",
description=f"Set locale to: {env_config['locale']}",
data={"locale": env_config["locale"]},
skip_on_error=True,
os_filter="linux",
))
# Shell
if "shell" in env_config:
actions.append(Action(
type="set-shell",
description=f"Set shell to: {env_config['shell']}",
data={"shell": env_config["shell"]},
skip_on_error=True,
os_filter="linux",
))
# Packages
if "packages" in env_config:
packages_config = env_config["packages"]
pm = env_config.get("package-manager", "apt-get")
# Package manager update
actions.append(Action(
type="pm-update",
description=f"Update {pm} package repositories",
data={"pm": pm},
skip_on_error=False,
))
# Standard packages
standard = []
for pkg in packages_config.get("standard", []) + packages_config.get("package", []):
if isinstance(pkg, str):
standard.append(pkg)
else:
standard.append(pkg["name"])
if standard:
actions.append(Action(
type="install-packages",
description=f"Install {len(standard)} packages via {pm}",
data={"pm": pm, "packages": standard, "type": "standard"},
skip_on_error=False,
))
# Cask packages (macOS)
cask = []
for pkg in packages_config.get("cask", []):
if isinstance(pkg, str):
cask.append(pkg)
else:
cask.append(pkg["name"])
if cask:
actions.append(Action(
type="install-packages",
description=f"Install {len(cask)} cask packages via {pm}",
data={"pm": pm, "packages": cask, "type": "cask"},
skip_on_error=False,
os_filter="macos",
))
# Binary packages
binaries_manifest = ctx.manifest.get("binaries", {})
for pkg in packages_config.get("binary", []):
pkg_name = pkg if isinstance(pkg, str) else pkg["name"]
binary_def = binaries_manifest.get(pkg_name, {})
actions.append(Action(
type="install-binary",
description=f"Install binary: {pkg_name}",
data={"name": pkg_name, "definition": binary_def, "spec": pkg if isinstance(pkg, dict) else {}},
skip_on_error=True,
))
# SSH keygen
for ssh_config in env_config.get("ssh_keygen", []):
filename = ssh_config.get("filename", f"id_{ssh_config['type']}")
actions.append(Action(
type="generate-ssh-key",
description=f"Generate SSH key: {filename}",
data=ssh_config,
skip_on_error=True,
))
# Config linking
for config in env_config.get("configs", []):
config_name = config if isinstance(config, str) else config["name"]
actions.append(Action(
type="link-config",
description=f"Link configuration: {config_name}",
data={"config_name": config_name},
skip_on_error=True,
))
# Custom commands
for i, command in enumerate(env_config.get("runcmd", []), 1):
actions.append(Action(
type="run-command",
description=f"Run custom command {i}",
data={"command": command},
skip_on_error=True,
))
return actions
def _register_handlers(executor: ActionExecutor, ctx: FlowContext, variables: dict):
"""Register all action type handlers."""
def handle_check_variable(data):
var = data["variable"]
if var not in data.get("variables", {}):
raise RuntimeError(f"Required variable not set: {var}")
def handle_set_hostname(data):
hostname = data["hostname"]
if ctx.platform.os == "macos":
run_command(f"sudo scutil --set ComputerName '{hostname}'", ctx.console)
run_command(f"sudo scutil --set HostName '{hostname}'", ctx.console)
run_command(f"sudo scutil --set LocalHostName '{hostname}'", ctx.console)
else:
run_command(f"sudo hostnamectl set-hostname '{hostname}'", ctx.console)
def handle_set_locale(data):
locale = data["locale"]
run_command(f"sudo locale-gen {locale}", ctx.console)
run_command(f"sudo update-locale LANG={locale}", ctx.console)
def handle_set_shell(data):
shell = data["shell"]
shell_path = shutil.which(shell)
if not shell_path:
raise RuntimeError(f"Shell not found: {shell}")
try:
with open("/etc/shells") as f:
if shell_path not in f.read():
run_command(f"echo '{shell_path}' | sudo tee -a /etc/shells", ctx.console)
except FileNotFoundError:
pass
run_command(f"chsh -s {shell_path}", ctx.console)
def handle_pm_update(data):
pm = data["pm"]
commands = {
"apt-get": "sudo apt-get update -qq",
"apt": "sudo apt update -qq",
"brew": "brew update",
}
cmd = commands.get(pm, f"sudo {pm} update")
run_command(cmd, ctx.console)
def handle_install_packages(data):
pm = data["pm"]
packages = data["packages"]
pkg_type = data.get("type", "standard")
pkg_str = " ".join(packages)
if pm in ("apt-get", "apt"):
cmd = f"sudo {pm} install -y {pkg_str}"
elif pm == "brew" and pkg_type == "cask":
cmd = f"brew install --cask {pkg_str}"
elif pm == "brew":
cmd = f"brew install {pkg_str}"
else:
cmd = f"sudo {pm} install {pkg_str}"
run_command(cmd, ctx.console)
def handle_install_binary(data):
from flow.core.variables import substitute_template
pkg_name = data["name"]
pkg_def = data["definition"]
if not pkg_def:
raise RuntimeError(f"No binary definition for: {pkg_name}")
source = pkg_def.get("source", "")
if not source.startswith("github:"):
raise RuntimeError(f"Unsupported source: {source}")
owner_repo = source[len("github:"):]
version = pkg_def.get("version", "")
asset_pattern = pkg_def.get("asset-pattern", "")
platform_map = pkg_def.get("platform-map", {})
mapping = platform_map.get(ctx.platform.platform)
if not mapping:
raise RuntimeError(f"No platform mapping for {ctx.platform.platform}")
template_ctx = {**mapping, "version": version}
asset = substitute_template(asset_pattern, template_ctx)
url = f"https://github.com/{owner_repo}/releases/download/v{version}/{asset}"
template_ctx["downloadUrl"] = url
install_script = pkg_def.get("install-script", "")
if install_script:
resolved = substitute_template(install_script, template_ctx)
run_command(resolved, ctx.console)
def handle_generate_ssh_key(data):
ssh_dir = Path.home() / ".ssh"
ssh_dir.mkdir(mode=0o700, exist_ok=True)
key_type = data["type"]
comment = substitute(data.get("comment", ""), variables)
filename = data.get("filename", f"id_{key_type}")
key_path = ssh_dir / filename
if key_path.exists():
ctx.console.warn(f"SSH key already exists: {key_path}")
return
run_command(f'ssh-keygen -t {key_type} -f "{key_path}" -N "" -C "{comment}"', ctx.console)
def handle_link_config(data):
config_name = data["config_name"]
ctx.console.info(f"Linking config: {config_name}")
def handle_run_command(data):
command = substitute(data["command"], variables)
run_command(command, ctx.console)
executor.register("check-variable", handle_check_variable)
executor.register("set-hostname", handle_set_hostname)
executor.register("set-locale", handle_set_locale)
executor.register("set-shell", handle_set_shell)
executor.register("pm-update", handle_pm_update)
executor.register("install-packages", handle_install_packages)
executor.register("install-binary", handle_install_binary)
executor.register("generate-ssh-key", handle_generate_ssh_key)
executor.register("link-config", handle_link_config)
executor.register("run-command", handle_run_command)
def run_bootstrap(ctx: FlowContext, args):
# Check if flow package exists in dotfiles and link it first
flow_pkg = DOTFILES_DIR / "common" / "flow"
if flow_pkg.exists() and (flow_pkg / ".config" / "flow").exists():
ctx.console.info("Found flow config in dotfiles, linking...")
# Link flow package first
result = subprocess.run(
[sys.executable, "-m", "flow", "dotfiles", "link", "flow"],
capture_output=True, text=True,
)
if result.returncode == 0:
ctx.console.success("Flow config linked from dotfiles")
# Reload manifest from newly linked location
ctx.manifest = load_manifest()
else:
detail = (result.stderr or "").strip() or (result.stdout or "").strip() or "unknown error"
ctx.console.warn(f"Failed to link flow config: {detail}")
profiles = _get_profiles(ctx)
if not profiles:
ctx.console.error("No profiles found in manifest.")
sys.exit(1)
profile_name = args.profile
if not profile_name:
if len(profiles) == 1:
profile_name = next(iter(profiles))
else:
ctx.console.error(f"Multiple profiles available. Specify with --profile: {', '.join(profiles.keys())}")
sys.exit(1)
if profile_name not in profiles:
ctx.console.error(f"Profile not found: {profile_name}. Available: {', '.join(profiles.keys())}")
sys.exit(1)
env_config = profiles[profile_name]
profile_os = env_config.get("os")
if profile_os and profile_os != ctx.platform.os:
ctx.console.error(
f"Profile '{profile_name}' targets '{profile_os}', current OS is '{ctx.platform.os}'"
)
sys.exit(1)
try:
variables = _parse_variables(args.var)
except ValueError as e:
ctx.console.error(str(e))
sys.exit(1)
actions = _plan_actions(ctx, profile_name, env_config, variables)
executor = ActionExecutor(ctx.console)
_register_handlers(executor, ctx, variables)
executor.execute(actions, dry_run=args.dry_run, current_os=ctx.platform.os)
def run_list(ctx: FlowContext, args):
profiles = _get_profiles(ctx)
if not profiles:
ctx.console.info("No profiles defined in manifest.")
return
headers = ["PROFILE", "OS", "PACKAGES", "ACTIONS"]
rows = []
for name, config in sorted(profiles.items()):
os_name = config.get("os", "any")
pkg_count = 0
for section in config.get("packages", {}).values():
if isinstance(section, list):
pkg_count += len(section)
action_count = sum(1 for k in ("hostname", "locale", "shell") if k in config)
action_count += len(config.get("ssh_keygen", []))
action_count += len(config.get("configs", []))
action_count += len(config.get("runcmd", []))
rows.append([name, os_name, str(pkg_count), str(action_count)])
ctx.console.table(headers, rows)
def run_show(ctx: FlowContext, args):
profiles = _get_profiles(ctx)
profile_name = args.profile
if profile_name not in profiles:
ctx.console.error(f"Profile not found: {profile_name}. Available: {', '.join(profiles.keys())}")
sys.exit(1)
env_config = profiles[profile_name]
variables = {}
actions = _plan_actions(ctx, profile_name, env_config, variables)
executor = ActionExecutor(ctx.console)
executor.execute(actions, dry_run=True)

525
commands/completion.py Normal file
View File

@@ -0,0 +1,525 @@
"""flow completion — shell completion support (dynamic zsh)."""
import argparse
import json
import shutil
import subprocess
from pathlib import Path
from typing import List, Optional, Sequence, Set
from flow.commands.enter import HOST_TEMPLATES
from flow.core.config import load_config, load_manifest
from flow.core.paths import DOTFILES_DIR, INSTALLED_STATE
ZSH_RC_START = "# >>> flow completion >>>"
ZSH_RC_END = "# <<< flow completion <<<"
TOP_LEVEL_COMMANDS = [
"enter",
"dev",
"dotfiles",
"dot",
"bootstrap",
"setup",
"provision",
"package",
"pkg",
"sync",
"completion",
]
def register(subparsers):
p = subparsers.add_parser("completion", help="Shell completion helpers")
sub = p.add_subparsers(dest="completion_command")
zsh = sub.add_parser("zsh", help="Print zsh completion script")
zsh.set_defaults(handler=run_zsh_script)
install = sub.add_parser("install-zsh", help="Install zsh completion script")
install.add_argument(
"--dir",
default="~/.zsh/completions",
help="Directory where _flow completion file is written",
)
install.add_argument(
"--rc",
default="~/.zshrc",
help="Shell rc file to update with fpath/compinit snippet",
)
install.add_argument(
"--no-rc",
action="store_true",
help="Do not modify rc file; only write completion script",
)
install.set_defaults(handler=run_install_zsh)
hidden = sub.add_parser("_zsh_complete", help=argparse.SUPPRESS)
hidden.add_argument("--cword", type=int, required=True, help=argparse.SUPPRESS)
hidden.add_argument("words", nargs="*", help=argparse.SUPPRESS)
hidden.set_defaults(handler=run_zsh_complete)
p.set_defaults(handler=lambda _ctx, args: p.print_help())
def _canonical_command(command: str) -> str:
alias_map = {
"dot": "dotfiles",
"setup": "bootstrap",
"provision": "bootstrap",
"pkg": "package",
}
return alias_map.get(command, command)
def _safe_config():
try:
return load_config()
except Exception:
return None
def _safe_manifest():
try:
return load_manifest()
except Exception:
return {}
def _list_targets() -> List[str]:
cfg = _safe_config()
if cfg is None:
return []
return sorted({f"{t.namespace}@{t.platform}" for t in cfg.targets})
def _list_namespaces() -> List[str]:
cfg = _safe_config()
if cfg is None:
return []
return sorted({t.namespace for t in cfg.targets})
def _list_platforms() -> List[str]:
cfg = _safe_config()
config_platforms: Set[str] = set()
if cfg is not None:
config_platforms = {t.platform for t in cfg.targets}
return sorted(set(HOST_TEMPLATES.keys()) | config_platforms)
def _list_bootstrap_profiles() -> List[str]:
manifest = _safe_manifest()
return sorted(manifest.get("profiles", {}).keys())
def _list_manifest_packages() -> List[str]:
manifest = _safe_manifest()
return sorted(manifest.get("binaries", {}).keys())
def _list_installed_packages() -> List[str]:
if not INSTALLED_STATE.exists():
return []
try:
with open(INSTALLED_STATE) as f:
state = json.load(f)
except Exception:
return []
if not isinstance(state, dict):
return []
return sorted(state.keys())
def _list_dotfiles_profiles() -> List[str]:
profiles_dir = DOTFILES_DIR / "profiles"
if not profiles_dir.is_dir():
return []
return sorted([p.name for p in profiles_dir.iterdir() if p.is_dir() and not p.name.startswith(".")])
def _list_dotfiles_packages(profile: Optional[str] = None) -> List[str]:
package_names: Set[str] = set()
common = DOTFILES_DIR / "common"
if common.is_dir():
for pkg in common.iterdir():
if pkg.is_dir() and not pkg.name.startswith("."):
package_names.add(pkg.name)
if profile:
profile_dir = DOTFILES_DIR / "profiles" / profile
if profile_dir.is_dir():
for pkg in profile_dir.iterdir():
if pkg.is_dir() and not pkg.name.startswith("."):
package_names.add(pkg.name)
else:
profiles_dir = DOTFILES_DIR / "profiles"
if profiles_dir.is_dir():
for profile_dir in profiles_dir.iterdir():
if not profile_dir.is_dir():
continue
for pkg in profile_dir.iterdir():
if pkg.is_dir() and not pkg.name.startswith("."):
package_names.add(pkg.name)
return sorted(package_names)
def _list_container_names() -> List[str]:
runtime = None
for rt in ("docker", "podman"):
if shutil.which(rt):
runtime = rt
break
if not runtime:
return []
try:
result = subprocess.run(
[
runtime,
"ps",
"-a",
"--filter",
"label=dev=true",
"--format",
'{{.Label "dev.name"}}',
],
capture_output=True,
text=True,
timeout=1,
)
except Exception:
return []
if result.returncode != 0:
return []
names = []
for line in result.stdout.splitlines():
line = line.strip()
if line:
names.append(line)
return sorted(set(names))
def _split_words(words: Sequence[str], cword: int):
tokens = list(words)
index = max(0, cword - 1)
if tokens:
tokens = tokens[1:]
index = max(0, cword - 2)
if index > len(tokens):
index = len(tokens)
current = tokens[index] if index < len(tokens) else ""
before = tokens[:index]
return before, current
def _filter(candidates: Sequence[str], prefix: str) -> List[str]:
unique = sorted(set(candidates))
if not prefix:
return unique
return [c for c in unique if c.startswith(prefix)]
def _profile_from_before(before: Sequence[str]) -> Optional[str]:
for i, token in enumerate(before):
if token == "--profile" and i + 1 < len(before):
return before[i + 1]
return None
def _complete_dev(before: Sequence[str], current: str) -> List[str]:
if len(before) <= 1:
return _filter(["create", "exec", "connect", "list", "stop", "remove", "rm", "respawn"], current)
sub = "remove" if before[1] == "rm" else before[1]
if sub in {"remove", "stop", "connect", "exec", "respawn"}:
options = {
"remove": ["-f", "--force", "-h", "--help"],
"stop": ["--kill", "-h", "--help"],
"exec": ["-h", "--help"],
"connect": ["-h", "--help"],
"respawn": ["-h", "--help"],
}[sub]
if current.startswith("-"):
return _filter(options, current)
non_opt = [t for t in before[2:] if not t.startswith("-")]
if len(non_opt) == 0:
return _filter(_list_container_names(), current)
return []
if sub == "create":
options = ["-i", "--image", "-p", "--project", "-h", "--help"]
if before and before[-1] in ("-i", "--image"):
return _filter(["tm0/node", "docker/python", "docker/alpine"], current)
if current.startswith("-"):
return _filter(options, current)
return []
if sub == "list":
return []
return []
def _complete_dotfiles(before: Sequence[str], current: str) -> List[str]:
if len(before) <= 1:
return _filter(
["init", "link", "unlink", "status", "sync", "relink", "clean", "edit"],
current,
)
sub = before[1]
if sub == "init":
return _filter(["--repo", "-h", "--help"], current) if current.startswith("-") else []
if sub in {"link", "relink"}:
if before and before[-1] == "--profile":
return _filter(_list_dotfiles_profiles(), current)
if current.startswith("-"):
return _filter(["--profile", "--copy", "--force", "--dry-run", "-h", "--help"], current)
profile = _profile_from_before(before)
return _filter(_list_dotfiles_packages(profile), current)
if sub == "unlink":
if current.startswith("-"):
return _filter(["-h", "--help"], current)
return _filter(_list_dotfiles_packages(), current)
if sub == "edit":
if current.startswith("-"):
return _filter(["--no-commit", "-h", "--help"], current)
non_opt = [t for t in before[2:] if not t.startswith("-")]
if len(non_opt) == 0:
return _filter(_list_dotfiles_packages(), current)
return []
if sub == "clean":
return _filter(["--dry-run", "-h", "--help"], current) if current.startswith("-") else []
return []
def _complete_bootstrap(before: Sequence[str], current: str) -> List[str]:
if len(before) <= 1:
return _filter(["run", "list", "show"], current)
sub = before[1]
if sub == "run":
if before and before[-1] == "--profile":
return _filter(_list_bootstrap_profiles(), current)
if current.startswith("-"):
return _filter(["--profile", "--dry-run", "--var", "-h", "--help"], current)
return []
if sub == "show":
if current.startswith("-"):
return _filter(["-h", "--help"], current)
non_opt = [t for t in before[2:] if not t.startswith("-")]
if len(non_opt) == 0:
return _filter(_list_bootstrap_profiles(), current)
return []
return []
def _complete_package(before: Sequence[str], current: str) -> List[str]:
if len(before) <= 1:
return _filter(["install", "list", "remove"], current)
sub = before[1]
if sub == "install":
if current.startswith("-"):
return _filter(["--dry-run", "-h", "--help"], current)
return _filter(_list_manifest_packages(), current)
if sub == "remove":
if current.startswith("-"):
return _filter(["-h", "--help"], current)
return _filter(_list_installed_packages(), current)
if sub == "list":
if current.startswith("-"):
return _filter(["--all", "-h", "--help"], current)
return []
return []
def _complete_sync(before: Sequence[str], current: str) -> List[str]:
if len(before) <= 1:
return _filter(["check", "fetch", "summary"], current)
sub = before[1]
if sub == "check" and current.startswith("-"):
return _filter(["--fetch", "--no-fetch", "-h", "--help"], current)
if current.startswith("-"):
return _filter(["-h", "--help"], current)
return []
def complete(words: Sequence[str], cword: int) -> List[str]:
before, current = _split_words(words, cword)
if not before:
return _filter(TOP_LEVEL_COMMANDS + ["-h", "--help", "-v", "--version"], current)
command = _canonical_command(before[0])
if command == "enter":
if before and before[-1] in ("-p", "--platform"):
return _filter(_list_platforms(), current)
if before and before[-1] in ("-n", "--namespace"):
return _filter(_list_namespaces(), current)
if current.startswith("-"):
return _filter(
["-u", "--user", "-n", "--namespace", "-p", "--platform", "-s", "--session", "--no-tmux", "-d", "--dry-run", "-h", "--help"],
current,
)
return _filter(_list_targets(), current)
if command == "dev":
return _complete_dev(before, current)
if command == "dotfiles":
return _complete_dotfiles(before, current)
if command == "bootstrap":
return _complete_bootstrap(before, current)
if command == "package":
return _complete_package(before, current)
if command == "sync":
return _complete_sync(before, current)
if command == "completion":
if len(before) <= 1:
return _filter(["zsh", "install-zsh"], current)
sub = before[1]
if sub == "install-zsh" and current.startswith("-"):
return _filter(["--dir", "--rc", "--no-rc", "-h", "--help"], current)
return []
return []
def run_zsh_complete(_ctx, args):
candidates = complete(args.words, args.cword)
for item in candidates:
print(item)
def _zsh_script_text() -> str:
return r'''#compdef flow
_flow() {
local -a suggestions
suggestions=("${(@f)$(flow completion _zsh_complete --cword "$CURRENT" -- "${words[@]}" 2>/dev/null)}")
if (( ${#suggestions[@]} > 0 )); then
compadd -Q -- "${suggestions[@]}"
return 0
fi
if [[ "$words[CURRENT]" == */* || "$words[CURRENT]" == ./* || "$words[CURRENT]" == ~* ]]; then
_files
return 0
fi
return 1
}
compdef _flow flow
'''
def _zsh_dir_for_rc(path: Path) -> str:
home = Path.home().resolve()
resolved = path.expanduser().resolve()
try:
rel = resolved.relative_to(home)
return f"~/{rel}" if str(rel) != "." else "~"
except ValueError:
return str(resolved)
def _zsh_rc_snippet(completions_dir: Path) -> str:
dir_expr = _zsh_dir_for_rc(completions_dir)
return (
f"{ZSH_RC_START}\n"
f"fpath=({dir_expr} $fpath)\n"
"autoload -Uz compinit && compinit\n"
f"{ZSH_RC_END}\n"
)
def _ensure_rc_snippet(rc_path: Path, completions_dir: Path) -> bool:
snippet = _zsh_rc_snippet(completions_dir)
if rc_path.exists():
content = rc_path.read_text()
else:
content = ""
if ZSH_RC_START in content and ZSH_RC_END in content:
start = content.find(ZSH_RC_START)
end = content.find(ZSH_RC_END, start)
if end >= 0:
end += len(ZSH_RC_END)
updated = content[:start] + snippet.rstrip("\n") + content[end:]
if updated == content:
return False
rc_path.parent.mkdir(parents=True, exist_ok=True)
rc_path.write_text(updated)
return True
sep = "" if content.endswith("\n") or content == "" else "\n"
rc_path.parent.mkdir(parents=True, exist_ok=True)
rc_path.write_text(content + sep + snippet)
return True
def run_install_zsh(_ctx, args):
completions_dir = Path(args.dir).expanduser()
completions_dir.mkdir(parents=True, exist_ok=True)
completion_file = completions_dir / "_flow"
completion_file.write_text(_zsh_script_text())
print(f"Installed completion script: {completion_file}")
if args.no_rc:
print("Skipped rc file update (--no-rc)")
return
rc_path = Path(args.rc).expanduser()
changed = _ensure_rc_snippet(rc_path, completions_dir)
if changed:
print(f"Updated shell rc: {rc_path}")
else:
print(f"Shell rc already configured: {rc_path}")
print("Restart shell or run: autoload -Uz compinit && compinit")
def run_zsh_script(_ctx, _args):
print(_zsh_script_text())

349
commands/container.py Normal file
View File

@@ -0,0 +1,349 @@
"""flow dev <subcommand> — container management."""
import os
import shutil
import subprocess
import sys
from flow.core.config import FlowContext
DEFAULT_REGISTRY = "registry.tomastm.com"
DEFAULT_TAG = "latest"
CONTAINER_HOME = "/home/dev"
def register(subparsers):
p = subparsers.add_parser("dev", help="Manage development containers")
sub = p.add_subparsers(dest="dev_command")
# create
create = sub.add_parser("create", help="Create and start a development container")
create.add_argument("name", help="Container name")
create.add_argument("-i", "--image", required=True, help="Container image")
create.add_argument("-p", "--project", help="Path to project directory")
create.set_defaults(handler=run_create)
# exec
exec_cmd = sub.add_parser("exec", help="Execute command in a container")
exec_cmd.add_argument("name", help="Container name")
exec_cmd.add_argument("cmd", nargs="*", help="Command to run (default: interactive shell)")
exec_cmd.set_defaults(handler=run_exec)
# connect
connect = sub.add_parser("connect", help="Attach to container tmux session")
connect.add_argument("name", help="Container name")
connect.set_defaults(handler=run_connect)
# list
ls = sub.add_parser("list", help="List development containers")
ls.set_defaults(handler=run_list)
# stop
stop = sub.add_parser("stop", help="Stop a development container")
stop.add_argument("name", help="Container name")
stop.add_argument("--kill", action="store_true", help="Kill instead of graceful stop")
stop.set_defaults(handler=run_stop)
# remove
remove = sub.add_parser("remove", aliases=["rm"], help="Remove a development container")
remove.add_argument("name", help="Container name")
remove.add_argument("-f", "--force", action="store_true", help="Force removal")
remove.set_defaults(handler=run_remove)
# respawn
respawn = sub.add_parser("respawn", help="Respawn all tmux panes for a session")
respawn.add_argument("name", help="Session/container name")
respawn.set_defaults(handler=run_respawn)
p.set_defaults(handler=lambda ctx, args: p.print_help())
def _runtime():
for rt in ("docker", "podman"):
if shutil.which(rt):
return rt
raise RuntimeError("No container runtime found (docker or podman)")
def _cname(name: str) -> str:
"""Normalize to dev- prefix."""
return name if name.startswith("dev-") else f"dev-{name}"
def _parse_image_ref(
image: str,
*,
default_registry: str = DEFAULT_REGISTRY,
default_tag: str = DEFAULT_TAG,
):
"""Parse image shorthand into (full_ref, repo, tag, label)."""
registry = default_registry
tag = default_tag
if image.startswith("docker/"):
registry = "docker.io"
image = f"library/{image.split('/', 1)[1]}"
elif image.startswith("tm0/"):
registry = default_registry
image = image.split("/", 1)[1]
elif "/" in image:
prefix, remainder = image.split("/", 1)
if "." in prefix or ":" in prefix or prefix == "localhost":
registry = prefix
image = remainder
if ":" in image.split("/")[-1]:
tag = image.rsplit(":", 1)[1]
image = image.rsplit(":", 1)[0]
repo = image
full_ref = f"{registry}/{repo}:{tag}"
label_prefix = registry.rsplit(".", 1)[0].rsplit(".", 1)[-1] if "." in registry else registry
label = f"{label_prefix}/{repo.split('/')[-1]}"
return full_ref, repo, tag, label
def _container_exists(rt: str, cname: str) -> bool:
result = subprocess.run(
[rt, "container", "ls", "-a", "--format", "{{.Names}}"],
capture_output=True, text=True,
)
return cname in result.stdout.strip().split("\n")
def _container_running(rt: str, cname: str) -> bool:
result = subprocess.run(
[rt, "container", "ls", "--format", "{{.Names}}"],
capture_output=True, text=True,
)
return cname in result.stdout.strip().split("\n")
def run_create(ctx: FlowContext, args):
rt = _runtime()
cname = _cname(args.name)
if _container_exists(rt, cname):
ctx.console.error(f"Container already exists: {cname}")
sys.exit(1)
project_path = os.path.realpath(args.project) if args.project else None
if project_path and not os.path.isdir(project_path):
ctx.console.error(f"Invalid project path: {project_path}")
sys.exit(1)
full_ref, _, _, _ = _parse_image_ref(
args.image,
default_registry=ctx.config.container_registry,
default_tag=ctx.config.container_tag,
)
cmd = [
rt, "run", "-d",
"--name", cname,
"--label", "dev=true",
"--label", f"dev.name={args.name}",
"--label", f"dev.image_ref={full_ref}",
"--network", "host",
"--init",
]
if project_path:
cmd.extend(["-v", f"{project_path}:/workspace"])
cmd.extend(["--label", f"dev.project_path={project_path}"])
docker_sock = "/var/run/docker.sock"
if os.path.exists(docker_sock):
cmd.extend(["-v", f"{docker_sock}:{docker_sock}"])
home = os.path.expanduser("~")
if os.path.isdir(f"{home}/.ssh"):
cmd.extend(["-v", f"{home}/.ssh:{CONTAINER_HOME}/.ssh:ro"])
if os.path.isfile(f"{home}/.npmrc"):
cmd.extend(["-v", f"{home}/.npmrc:{CONTAINER_HOME}/.npmrc:ro"])
if os.path.isdir(f"{home}/.npm"):
cmd.extend(["-v", f"{home}/.npm:{CONTAINER_HOME}/.npm"])
# Add docker group if available
try:
import grp
docker_gid = str(grp.getgrnam("docker").gr_gid)
cmd.extend(["--group-add", docker_gid])
except (KeyError, ImportError):
pass
cmd.extend([full_ref, "sleep", "infinity"])
subprocess.run(cmd, check=True)
ctx.console.success(f"Created and started container: {cname}")
def run_exec(ctx: FlowContext, args):
rt = _runtime()
cname = _cname(args.name)
if not _container_running(rt, cname):
ctx.console.error(f"Container {cname} not running")
sys.exit(1)
if args.cmd:
exec_cmd = [rt, "exec"]
if sys.stdin.isatty():
exec_cmd.extend(["-it"])
exec_cmd.append(cname)
exec_cmd.extend(args.cmd)
result = subprocess.run(exec_cmd)
sys.exit(result.returncode)
# No command — try shells in order
last_code = 0
for shell in ("zsh -l", "bash -l", "sh"):
parts = shell.split()
exec_cmd = [rt, "exec", "--detach-keys", "ctrl-q,ctrl-p", "-it", cname] + parts
result = subprocess.run(exec_cmd)
if result.returncode == 0:
return
last_code = result.returncode
ctx.console.error(f"Unable to start an interactive shell in {cname}")
sys.exit(last_code or 1)
def run_connect(ctx: FlowContext, args):
rt = _runtime()
cname = _cname(args.name)
if not _container_exists(rt, cname):
ctx.console.error(f"Container does not exist: {cname}")
sys.exit(1)
if not _container_running(rt, cname):
subprocess.run([rt, "start", cname], capture_output=True)
if not shutil.which("tmux"):
ctx.console.warn("tmux not found; falling back to direct exec")
args.cmd = []
run_exec(ctx, args)
return
# Get image label for env
result = subprocess.run(
[rt, "container", "inspect", cname, "--format", "{{ .Config.Image }}"],
capture_output=True, text=True,
)
image_ref = result.stdout.strip()
_, _, _, image_label = _parse_image_ref(image_ref)
# Create tmux session if needed
check = subprocess.run(["tmux", "has-session", "-t", cname], capture_output=True)
if check.returncode != 0:
ns = os.environ.get("DF_NAMESPACE", "")
plat = os.environ.get("DF_PLATFORM", "")
subprocess.run([
"tmux", "new-session", "-ds", cname,
"-e", f"DF_IMAGE={image_label}",
"-e", f"DF_NAMESPACE={ns}",
"-e", f"DF_PLATFORM={plat}",
f"flow dev exec {args.name}",
])
subprocess.run([
"tmux", "set-option", "-t", cname,
"default-command", f"flow dev exec {args.name}",
])
if os.environ.get("TMUX"):
os.execvp("tmux", ["tmux", "switch-client", "-t", cname])
else:
os.execvp("tmux", ["tmux", "attach", "-t", cname])
def run_list(ctx: FlowContext, args):
rt = _runtime()
result = subprocess.run(
[rt, "ps", "-a", "--filter", "label=dev=true",
"--format", '{{.Label "dev.name"}}|{{.Image}}|{{.Label "dev.project_path"}}|{{.Status}}'],
capture_output=True, text=True,
)
headers = ["NAME", "IMAGE", "PROJECT", "STATUS"]
rows = []
for line in result.stdout.strip().split("\n"):
if not line:
continue
parts = line.split("|")
if len(parts) >= 4:
name, image, project, status = parts[0], parts[1], parts[2], parts[3]
# Shorten paths
home = os.path.expanduser("~")
if project.startswith(home):
project = "~" + project[len(home):]
rows.append([name, image, project, status])
if not rows:
ctx.console.info("No development containers found.")
return
ctx.console.table(headers, rows)
def run_stop(ctx: FlowContext, args):
rt = _runtime()
cname = _cname(args.name)
if not _container_exists(rt, cname):
ctx.console.error(f"Container {cname} does not exist")
sys.exit(1)
if args.kill:
ctx.console.info(f"Killing container {cname}...")
subprocess.run([rt, "kill", cname], check=True)
else:
ctx.console.info(f"Stopping container {cname}...")
subprocess.run([rt, "stop", cname], check=True)
_tmux_fallback(cname)
def run_remove(ctx: FlowContext, args):
rt = _runtime()
cname = _cname(args.name)
if not _container_exists(rt, cname):
ctx.console.error(f"Container {cname} does not exist")
sys.exit(1)
if args.force:
ctx.console.info(f"Removing container {cname} (force)...")
subprocess.run([rt, "rm", "-f", cname], check=True)
else:
ctx.console.info(f"Removing container {cname}...")
subprocess.run([rt, "rm", cname], check=True)
_tmux_fallback(cname)
def run_respawn(ctx: FlowContext, args):
cname = _cname(args.name)
result = subprocess.run(
["tmux", "list-panes", "-t", cname, "-s",
"-F", "#{session_name}:#{window_index}.#{pane_index}"],
capture_output=True, text=True,
)
for pane in result.stdout.strip().split("\n"):
if pane:
ctx.console.info(f"Respawning {pane}...")
subprocess.run(["tmux", "respawn-pane", "-t", pane])
def _tmux_fallback(cname: str):
"""If inside tmux in the target session, switch to default."""
if not os.environ.get("TMUX"):
return
result = subprocess.run(
["tmux", "display-message", "-p", "#S"],
capture_output=True, text=True,
)
current = result.stdout.strip()
if current == cname:
subprocess.run(["tmux", "new-session", "-ds", "default"], capture_output=True)
subprocess.run(["tmux", "switch-client", "-t", "default"])

425
commands/dotfiles.py Normal file
View File

@@ -0,0 +1,425 @@
"""flow dotfiles — dotfile management with GNU Stow-style symlinking."""
import json
import os
import shlex
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Optional
from flow.core.config import FlowContext
from flow.core.paths import DOTFILES_DIR, LINKED_STATE
from flow.core.stow import LinkTree, TreeFolder
def register(subparsers):
p = subparsers.add_parser("dotfiles", aliases=["dot"], help="Manage dotfiles")
sub = p.add_subparsers(dest="dotfiles_command")
# init
init = sub.add_parser("init", help="Clone dotfiles repository")
init.add_argument("--repo", help="Override repository URL")
init.set_defaults(handler=run_init)
# link
link = sub.add_parser("link", help="Create symlinks for dotfile packages")
link.add_argument("packages", nargs="*", help="Specific packages to link (default: all)")
link.add_argument("--profile", help="Profile to use for overrides")
link.add_argument("--copy", action="store_true", help="Copy instead of symlink")
link.add_argument("--force", action="store_true", help="Overwrite existing files")
link.add_argument("--dry-run", action="store_true", help="Show what would be done")
link.set_defaults(handler=run_link)
# unlink
unlink = sub.add_parser("unlink", help="Remove dotfile symlinks")
unlink.add_argument("packages", nargs="*", help="Specific packages to unlink (default: all)")
unlink.set_defaults(handler=run_unlink)
# status
status = sub.add_parser("status", help="Show dotfiles link status")
status.set_defaults(handler=run_status)
# sync
sync = sub.add_parser("sync", help="Pull latest dotfiles from remote")
sync.set_defaults(handler=run_sync)
# relink
relink = sub.add_parser("relink", help="Refresh symlinks after changes")
relink.add_argument("packages", nargs="*", help="Specific packages to relink (default: all)")
relink.add_argument("--profile", help="Profile to use for overrides")
relink.set_defaults(handler=run_relink)
# clean
clean = sub.add_parser("clean", help="Remove broken symlinks")
clean.add_argument("--dry-run", action="store_true", help="Show what would be done")
clean.set_defaults(handler=run_clean)
# edit
edit = sub.add_parser("edit", help="Edit package config with auto-commit")
edit.add_argument("package", help="Package name to edit")
edit.add_argument("--no-commit", action="store_true", help="Skip auto-commit")
edit.set_defaults(handler=run_edit)
p.set_defaults(handler=lambda ctx, args: p.print_help())
def _load_state() -> dict:
if LINKED_STATE.exists():
with open(LINKED_STATE) as f:
return json.load(f)
return {"links": {}}
def _save_state(state: dict):
LINKED_STATE.parent.mkdir(parents=True, exist_ok=True)
with open(LINKED_STATE, "w") as f:
json.dump(state, f, indent=2)
def _discover_packages(dotfiles_dir: Path, profile: Optional[str] = None) -> dict:
"""Discover packages from common/ and optionally profiles/<name>/.
Returns {package_name: source_dir} with profile dirs taking precedence.
"""
packages = {}
common = dotfiles_dir / "common"
if common.is_dir():
for pkg in sorted(common.iterdir()):
if pkg.is_dir() and not pkg.name.startswith("."):
packages[pkg.name] = pkg
if profile:
profile_dir = dotfiles_dir / "profiles" / profile
if profile_dir.is_dir():
for pkg in sorted(profile_dir.iterdir()):
if pkg.is_dir() and not pkg.name.startswith("."):
packages[pkg.name] = pkg # Override common
return packages
def _walk_package(source_dir: Path, home: Path):
"""Yield (source_file, target_file) pairs for a package directory.
Files in the package directory map relative to $HOME.
"""
for root, _dirs, files in os.walk(source_dir):
for fname in files:
src = Path(root) / fname
rel = src.relative_to(source_dir)
dst = home / rel
yield src, dst
def run_init(ctx: FlowContext, args):
repo_url = args.repo or ctx.config.dotfiles_url
if not repo_url:
ctx.console.error("No dotfiles repository URL. Set it in config or pass --repo.")
sys.exit(1)
if DOTFILES_DIR.exists():
ctx.console.warn(f"Dotfiles directory already exists: {DOTFILES_DIR}")
return
DOTFILES_DIR.parent.mkdir(parents=True, exist_ok=True)
branch = ctx.config.dotfiles_branch
cmd = ["git", "clone", "-b", branch, repo_url, str(DOTFILES_DIR)]
ctx.console.info(f"Cloning {repo_url} (branch: {branch})...")
subprocess.run(cmd, check=True)
ctx.console.success(f"Dotfiles cloned to {DOTFILES_DIR}")
def run_link(ctx: FlowContext, args):
if not DOTFILES_DIR.exists():
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
sys.exit(1)
home = Path.home()
packages = _discover_packages(DOTFILES_DIR, args.profile)
# Filter to requested packages
if args.packages:
packages = {k: v for k, v in packages.items() if k in args.packages}
missing = set(args.packages) - set(packages.keys())
if missing:
ctx.console.warn(f"Packages not found: {', '.join(missing)}")
if not packages:
ctx.console.error("No valid packages selected")
sys.exit(1)
# Build current link tree from state
state = _load_state()
try:
tree = LinkTree.from_state(state)
except RuntimeError as e:
ctx.console.error(str(e))
sys.exit(1)
folder = TreeFolder(tree)
all_operations = []
copied_count = 0
for pkg_name, source_dir in packages.items():
ctx.console.info(f"[{pkg_name}]")
for src, dst in _walk_package(source_dir, home):
if args.copy:
if dst.exists() or dst.is_symlink():
if not args.force:
ctx.console.warn(f" Skipped (exists): {dst}")
continue
if dst.is_dir() and not dst.is_symlink():
ctx.console.error(f"Cannot overwrite directory with --copy: {dst}")
continue
if not args.dry_run:
dst.unlink()
if args.dry_run:
print(f" COPY: {src} -> {dst}")
else:
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(src, dst)
print(f" Copied: {src} -> {dst}")
copied_count += 1
continue
ops = folder.plan_link(src, dst, pkg_name)
all_operations.extend(ops)
if args.copy:
if args.dry_run:
return
ctx.console.success(f"Copied {copied_count} item(s)")
return
# Conflict detection (two-phase)
conflicts = folder.detect_conflicts(all_operations)
if conflicts and not args.force:
for conflict in conflicts:
ctx.console.error(conflict)
ctx.console.error("\nUse --force to overwrite existing files")
sys.exit(1)
# Handle force mode: remove conflicting targets
if args.force and not args.dry_run:
for op in all_operations:
if op.type != "create_symlink":
continue
if not (op.target.exists() or op.target.is_symlink()):
continue
if op.target in tree.links:
continue
if op.target.is_dir() and not op.target.is_symlink():
ctx.console.error(f"Cannot overwrite directory with --force: {op.target}")
sys.exit(1)
op.target.unlink()
# Execute operations
if args.dry_run:
ctx.console.info("\nPlanned operations:")
for op in all_operations:
print(str(op))
else:
folder.execute_operations(all_operations, dry_run=False)
state = folder.to_state()
_save_state(state)
ctx.console.success(f"Linked {len(all_operations)} item(s)")
def run_unlink(ctx: FlowContext, args):
state = _load_state()
links_by_package = state.get("links", {})
if not links_by_package:
ctx.console.info("No linked dotfiles found.")
return
packages_to_unlink = args.packages if args.packages else list(links_by_package.keys())
removed = 0
for pkg_name in packages_to_unlink:
links = links_by_package.get(pkg_name, {})
if not links:
continue
ctx.console.info(f"[{pkg_name}]")
for dst_str in list(links.keys()):
dst = Path(dst_str)
if dst.is_symlink():
dst.unlink()
print(f" Removed: {dst}")
removed += 1
elif dst.exists():
ctx.console.warn(f" Not a symlink, skipping: {dst}")
else:
print(f" Already gone: {dst}")
links_by_package.pop(pkg_name, None)
_save_state(state)
ctx.console.success(f"Removed {removed} symlink(s)")
def run_status(ctx: FlowContext, args):
state = _load_state()
links_by_package = state.get("links", {})
if not links_by_package:
ctx.console.info("No linked dotfiles.")
return
for pkg_name, links in links_by_package.items():
ctx.console.info(f"[{pkg_name}]")
for dst_str, link_info in links.items():
dst = Path(dst_str)
if not isinstance(link_info, dict) or "source" not in link_info:
ctx.console.error(
"Unsupported linked state format. Remove linked.json and relink dotfiles."
)
sys.exit(1)
src_str = link_info["source"]
is_dir_link = bool(link_info.get("is_directory_link", False))
link_type = "FOLDED" if is_dir_link else "OK"
if dst.is_symlink():
target = os.readlink(dst)
if target == src_str or str(dst.resolve()) == str(Path(src_str).resolve()):
print(f" {link_type}: {dst} -> {src_str}")
else:
print(f" CHANGED: {dst} -> {target} (expected {src_str})")
elif dst.exists():
print(f" NOT SYMLINK: {dst}")
else:
print(f" BROKEN: {dst} (missing)")
def run_sync(ctx: FlowContext, args):
if not DOTFILES_DIR.exists():
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
sys.exit(1)
ctx.console.info("Pulling latest dotfiles...")
result = subprocess.run(
["git", "-C", str(DOTFILES_DIR), "pull", "--rebase"],
capture_output=True, text=True,
)
if result.returncode == 0:
if result.stdout.strip():
print(result.stdout.strip())
ctx.console.success("Dotfiles synced.")
else:
ctx.console.error(f"Git pull failed: {result.stderr.strip()}")
sys.exit(1)
def run_relink(ctx: FlowContext, args):
"""Refresh symlinks after changes (unlink + link)."""
if not DOTFILES_DIR.exists():
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
sys.exit(1)
# First unlink
ctx.console.info("Unlinking current symlinks...")
run_unlink(ctx, args)
# Then link again
ctx.console.info("Relinking with updated configuration...")
run_link(ctx, args)
def run_clean(ctx: FlowContext, args):
"""Remove broken symlinks."""
state = _load_state()
if not state.get("links"):
ctx.console.info("No linked dotfiles found.")
return
removed = 0
for pkg_name, links in state["links"].items():
for dst_str in list(links.keys()):
dst = Path(dst_str)
# Check if symlink is broken
if dst.is_symlink() and not dst.exists():
if args.dry_run:
print(f"Would remove broken symlink: {dst}")
else:
dst.unlink()
print(f"Removed broken symlink: {dst}")
del links[dst_str]
removed += 1
if not args.dry_run:
_save_state(state)
if removed > 0:
ctx.console.success(f"Cleaned {removed} broken symlink(s)")
else:
ctx.console.info("No broken symlinks found")
def run_edit(ctx: FlowContext, args):
"""Edit package config with auto-commit workflow."""
if not DOTFILES_DIR.exists():
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
sys.exit(1)
package_name = args.package
# Find package directory
common_dir = DOTFILES_DIR / "common" / package_name
profile_dirs = list((DOTFILES_DIR / "profiles").glob(f"*/{package_name}"))
package_dir = None
if common_dir.exists():
package_dir = common_dir
elif profile_dirs:
package_dir = profile_dirs[0]
else:
ctx.console.error(f"Package not found: {package_name}")
sys.exit(1)
# Git pull before editing
ctx.console.info("Pulling latest changes...")
result = subprocess.run(
["git", "-C", str(DOTFILES_DIR), "pull", "--rebase"],
capture_output=True, text=True,
)
if result.returncode != 0:
ctx.console.warn(f"Git pull failed: {result.stderr.strip()}")
# Open editor
editor = os.environ.get("EDITOR", "vim")
ctx.console.info(f"Opening {package_dir} in {editor}...")
edit_result = subprocess.run(shlex.split(editor) + [str(package_dir)])
if edit_result.returncode != 0:
ctx.console.warn(f"Editor exited with status {edit_result.returncode}")
# Check for changes
result = subprocess.run(
["git", "-C", str(DOTFILES_DIR), "status", "--porcelain"],
capture_output=True, text=True,
)
if result.stdout.strip() and not args.no_commit:
# Auto-commit changes
ctx.console.info("Changes detected, committing...")
subprocess.run(["git", "-C", str(DOTFILES_DIR), "add", "."], check=True)
subprocess.run(
["git", "-C", str(DOTFILES_DIR), "commit", "-m", f"Update {package_name} config"],
check=True,
)
# Ask before pushing
response = input("Push changes to remote? [Y/n] ")
if response.lower() != "n":
subprocess.run(["git", "-C", str(DOTFILES_DIR), "push"], check=True)
ctx.console.success("Changes committed and pushed")
else:
ctx.console.info("Changes committed locally (not pushed)")
elif result.stdout.strip() and args.no_commit:
ctx.console.info("Changes detected; skipped commit (--no-commit)")
else:
ctx.console.info("No changes to commit")

127
commands/enter.py Normal file
View File

@@ -0,0 +1,127 @@
"""flow enter — connect to a development instance via SSH."""
import getpass
import os
import sys
from flow.core.config import FlowContext
# Default host templates per platform
HOST_TEMPLATES = {
"orb": "<namespace>.orb",
"utm": "<namespace>.utm.local",
"core": "<namespace>.core.lan",
}
def register(subparsers):
p = subparsers.add_parser("enter", help="Connect to a development instance via SSH")
p.add_argument("target", help="Target: [user@]namespace@platform")
p.add_argument("-u", "--user", help="SSH user (overrides target)")
p.add_argument("-n", "--namespace", help="Namespace (overrides target)")
p.add_argument("-p", "--platform", help="Platform (overrides target)")
p.add_argument("-s", "--session", default="default", help="Tmux session name (default: 'default')")
p.add_argument("--no-tmux", action="store_true", help="Skip tmux attachment")
p.add_argument("-d", "--dry-run", action="store_true", help="Show command without executing")
p.set_defaults(handler=run)
def _parse_target(target: str):
"""Parse [user@]namespace@platform into (user, namespace, platform)."""
user = None
namespace = None
platform = None
if "@" in target:
platform = target.rsplit("@", 1)[1]
rest = target.rsplit("@", 1)[0]
else:
rest = target
if "@" in rest:
user = rest.rsplit("@", 1)[0]
namespace = rest.rsplit("@", 1)[1]
else:
namespace = rest
return user, namespace, platform
def _build_destination(user: str, host: str, preserve_host_user: bool = False) -> str:
if "@" in host:
host_user, host_name = host.rsplit("@", 1)
effective_user = host_user if preserve_host_user else (user or host_user)
return f"{effective_user}@{host_name}"
if not user:
return host
return f"{user}@{host}"
def run(ctx: FlowContext, args):
# Warn if already inside an instance
if os.environ.get("DF_NAMESPACE") and os.environ.get("DF_PLATFORM"):
ns = os.environ["DF_NAMESPACE"]
plat = os.environ["DF_PLATFORM"]
ctx.console.error(
f"Not recommended inside an instance. Currently in: {ns}@{plat}"
)
sys.exit(1)
user, namespace, platform = _parse_target(args.target)
# Apply overrides
if args.user:
user = args.user
if args.namespace:
namespace = args.namespace
if args.platform:
platform = args.platform
user_was_explicit = bool(user)
if not user:
user = os.environ.get("USER") or getpass.getuser()
if not namespace:
ctx.console.error("Namespace is required in target")
sys.exit(1)
if not platform:
ctx.console.error("Platform is required in target")
sys.exit(1)
# Resolve SSH host from template or config
host_template = HOST_TEMPLATES.get(platform)
ssh_identity = None
# Check config targets for override
for tc in ctx.config.targets:
if tc.namespace == namespace and tc.platform == platform:
host_template = tc.ssh_host
ssh_identity = tc.ssh_identity
break
if not host_template:
ctx.console.error(f"Unknown platform: {platform}")
sys.exit(1)
ssh_host = host_template.replace("<namespace>", namespace)
destination = _build_destination(user, ssh_host, preserve_host_user=not user_was_explicit)
# Build SSH command
ssh_cmd = ["ssh", "-tt"]
if ssh_identity:
ssh_cmd.extend(["-i", os.path.expanduser(ssh_identity)])
ssh_cmd.append(destination)
if not args.no_tmux:
ssh_cmd.extend([
"tmux", "new-session", "-As", args.session,
"-e", f"DF_NAMESPACE={namespace}",
"-e", f"DF_PLATFORM={platform}",
])
if args.dry_run:
ctx.console.info("Dry run command:")
print(" " + " ".join(ssh_cmd))
return
os.execvp("ssh", ssh_cmd)

181
commands/package.py Normal file
View File

@@ -0,0 +1,181 @@
"""flow package — binary package management from manifest definitions."""
import json
import subprocess
import sys
from typing import Any, Dict, Optional, Tuple
from flow.core.config import FlowContext
from flow.core.paths import INSTALLED_STATE
from flow.core.variables import substitute_template
def register(subparsers):
p = subparsers.add_parser("package", aliases=["pkg"], help="Manage binary packages")
sub = p.add_subparsers(dest="package_command")
# install
inst = sub.add_parser("install", help="Install packages from manifest")
inst.add_argument("packages", nargs="+", help="Package names to install")
inst.add_argument("--dry-run", action="store_true", help="Show what would be done")
inst.set_defaults(handler=run_install)
# list
ls = sub.add_parser("list", help="List installed and available packages")
ls.add_argument("--all", action="store_true", help="Show all available packages")
ls.set_defaults(handler=run_list)
# remove
rm = sub.add_parser("remove", help="Remove installed packages")
rm.add_argument("packages", nargs="+", help="Package names to remove")
rm.set_defaults(handler=run_remove)
p.set_defaults(handler=lambda ctx, args: p.print_help())
def _load_installed() -> dict:
if INSTALLED_STATE.exists():
with open(INSTALLED_STATE) as f:
return json.load(f)
return {}
def _save_installed(state: dict):
INSTALLED_STATE.parent.mkdir(parents=True, exist_ok=True)
with open(INSTALLED_STATE, "w") as f:
json.dump(state, f, indent=2)
def _get_definitions(ctx: FlowContext) -> dict:
"""Get package definitions from manifest (binaries section)."""
return ctx.manifest.get("binaries", {})
def _resolve_download_url(
pkg_def: Dict[str, Any],
platform_str: str,
) -> Optional[Tuple[str, Dict[str, str]]]:
"""Build GitHub release download URL from package definition."""
source = pkg_def.get("source", "")
if not source.startswith("github:"):
return None
owner_repo = source[len("github:"):]
version = pkg_def.get("version", "")
asset_pattern = pkg_def.get("asset-pattern", "")
platform_map = pkg_def.get("platform-map", {})
mapping = platform_map.get(platform_str)
if not mapping:
return None
# Build template context
template_ctx = {**mapping, "version": version}
asset = substitute_template(asset_pattern, template_ctx)
url = f"https://github.com/{owner_repo}/releases/download/v{version}/{asset}"
template_ctx["downloadUrl"] = url
return url, template_ctx
def run_install(ctx: FlowContext, args):
definitions = _get_definitions(ctx)
installed = _load_installed()
platform_str = ctx.platform.platform
had_error = False
for pkg_name in args.packages:
pkg_def = definitions.get(pkg_name)
if not pkg_def:
ctx.console.error(f"Package not found in manifest: {pkg_name}")
had_error = True
continue
ctx.console.info(f"Installing {pkg_name} v{pkg_def.get('version', '?')}...")
result = _resolve_download_url(pkg_def, platform_str)
if not result:
ctx.console.error(f"No download available for {pkg_name} on {platform_str}")
had_error = True
continue
url, template_ctx = result
if args.dry_run:
ctx.console.info(f"[{pkg_name}] Would download: {url}")
install_script = pkg_def.get("install-script", "")
if install_script:
ctx.console.info(f"[{pkg_name}] Would run install script")
continue
# Run install script with template vars resolved
install_script = pkg_def.get("install-script", "")
if not install_script:
ctx.console.error(f"Package '{pkg_name}' has no install-script")
had_error = True
continue
resolved_script = substitute_template(install_script, template_ctx)
ctx.console.info(f"Running install script for {pkg_name}...")
proc = subprocess.run(
resolved_script, shell=True,
capture_output=False,
)
if proc.returncode != 0:
ctx.console.error(f"Install script failed for {pkg_name}")
had_error = True
continue
installed[pkg_name] = {
"version": pkg_def.get("version", ""),
"source": pkg_def.get("source", ""),
}
ctx.console.success(f"Installed {pkg_name} v{pkg_def.get('version', '')}")
_save_installed(installed)
if had_error:
sys.exit(1)
def run_list(ctx: FlowContext, args):
definitions = _get_definitions(ctx)
installed = _load_installed()
headers = ["PACKAGE", "INSTALLED", "AVAILABLE"]
rows = []
if args.all:
# Show all defined packages
if not definitions:
ctx.console.info("No packages defined in manifest.")
return
for name, pkg_def in sorted(definitions.items()):
inst_ver = installed.get(name, {}).get("version", "-")
avail_ver = pkg_def.get("version", "?")
rows.append([name, inst_ver, avail_ver])
else:
# Show installed only
if not installed:
ctx.console.info("No packages installed.")
return
for name, info in sorted(installed.items()):
avail = definitions.get(name, {}).get("version", "?")
rows.append([name, info.get("version", "?"), avail])
ctx.console.table(headers, rows)
def run_remove(ctx: FlowContext, args):
installed = _load_installed()
for pkg_name in args.packages:
if pkg_name not in installed:
ctx.console.warn(f"Package not installed: {pkg_name}")
continue
# Remove from installed state
del installed[pkg_name]
ctx.console.success(f"Removed {pkg_name} from installed packages")
ctx.console.warn("Note: binary files were not automatically deleted. Remove manually if needed.")
_save_installed(installed)

199
commands/sync.py Normal file
View File

@@ -0,0 +1,199 @@
"""flow sync — check git sync status of all projects."""
import os
import subprocess
import sys
from flow.core.config import FlowContext
def register(subparsers):
p = subparsers.add_parser("sync", help="Git sync tools for projects")
sub = p.add_subparsers(dest="sync_command")
check = sub.add_parser("check", help="Check all projects status")
check.add_argument(
"--fetch",
dest="fetch",
action="store_true",
help="Run git fetch before checking (default)",
)
check.add_argument(
"--no-fetch",
dest="fetch",
action="store_false",
help="Skip git fetch",
)
check.set_defaults(fetch=True)
check.set_defaults(handler=run_check)
fetch = sub.add_parser("fetch", help="Fetch all project remotes")
fetch.set_defaults(handler=run_fetch)
summary = sub.add_parser("summary", help="Quick overview of project status")
summary.set_defaults(handler=run_summary)
p.set_defaults(handler=lambda ctx, args: p.print_help())
def _git(repo: str, *cmd, capture: bool = True) -> subprocess.CompletedProcess:
return subprocess.run(
["git", "-C", repo] + list(cmd),
capture_output=capture, text=True,
)
def _check_repo(repo_path: str, do_fetch: bool = True):
"""Check a single repo, return (name, issues list)."""
name = os.path.basename(repo_path)
git_dir = os.path.join(repo_path, ".git")
if not os.path.isdir(git_dir):
return name, None # Not a git repo
issues = []
if do_fetch:
fetch_result = _git(repo_path, "fetch", "--all", "--quiet")
if fetch_result.returncode != 0:
issues.append("git fetch failed")
# Current branch
result = _git(repo_path, "rev-parse", "--abbrev-ref", "HEAD")
branch = result.stdout.strip() if result.returncode == 0 else "HEAD"
# Uncommitted changes
diff_result = _git(repo_path, "diff", "--quiet")
cached_result = _git(repo_path, "diff", "--cached", "--quiet")
if diff_result.returncode != 0 or cached_result.returncode != 0:
issues.append("uncommitted changes")
else:
untracked = _git(repo_path, "ls-files", "--others", "--exclude-standard")
if untracked.stdout.strip():
issues.append("untracked files")
# Unpushed commits
upstream_check = _git(repo_path, "rev-parse", "--abbrev-ref", f"{branch}@{{u}}")
if upstream_check.returncode == 0:
unpushed = _git(repo_path, "rev-list", "--oneline", f"{branch}@{{u}}..{branch}")
if unpushed.stdout.strip():
count = len(unpushed.stdout.strip().split("\n"))
issues.append(f"{count} unpushed commit(s) on {branch}")
else:
issues.append(f"no upstream for {branch}")
# Unpushed branches
branches_result = _git(repo_path, "for-each-ref", "--format=%(refname:short)", "refs/heads")
for b in branches_result.stdout.strip().split("\n"):
if not b or b == branch:
continue
up = _git(repo_path, "rev-parse", "--abbrev-ref", f"{b}@{{u}}")
if up.returncode == 0:
ahead = _git(repo_path, "rev-list", "--count", f"{b}@{{u}}..{b}")
if ahead.stdout.strip() != "0":
issues.append(f"branch {b}: {ahead.stdout.strip()} ahead")
else:
issues.append(f"branch {b}: no upstream")
return name, issues
def run_check(ctx: FlowContext, args):
projects_dir = os.path.expanduser(ctx.config.projects_dir)
if not os.path.isdir(projects_dir):
ctx.console.error(f"Projects directory not found: {projects_dir}")
sys.exit(1)
rows = []
needs_action = []
not_git = []
checked = 0
for entry in sorted(os.listdir(projects_dir)):
repo_path = os.path.join(projects_dir, entry)
if not os.path.isdir(repo_path):
continue
name, issues = _check_repo(repo_path, do_fetch=args.fetch)
if issues is None:
not_git.append(name)
continue
checked += 1
if issues:
needs_action.append(name)
rows.append([name, "; ".join(issues)])
else:
rows.append([name, "clean and synced"])
if checked == 0:
ctx.console.info("No git repositories found in projects directory.")
if not_git:
ctx.console.info(f"Skipped non-git directories: {', '.join(sorted(not_git))}")
return
ctx.console.table(["PROJECT", "STATUS"], rows)
if needs_action:
ctx.console.warn(f"Projects needing action: {', '.join(sorted(needs_action))}")
else:
ctx.console.success("All repositories clean and synced.")
if not_git:
ctx.console.info(f"Skipped non-git directories: {', '.join(sorted(not_git))}")
def run_fetch(ctx: FlowContext, args):
projects_dir = os.path.expanduser(ctx.config.projects_dir)
if not os.path.isdir(projects_dir):
ctx.console.error(f"Projects directory not found: {projects_dir}")
sys.exit(1)
had_error = False
fetched = 0
for entry in sorted(os.listdir(projects_dir)):
repo_path = os.path.join(projects_dir, entry)
if not os.path.isdir(os.path.join(repo_path, ".git")):
continue
ctx.console.info(f"Fetching {entry}...")
result = _git(repo_path, "fetch", "--all", "--quiet")
fetched += 1
if result.returncode != 0:
ctx.console.error(f"Failed to fetch {entry}")
had_error = True
if fetched == 0:
ctx.console.info("No git repositories found in projects directory.")
return
if had_error:
sys.exit(1)
ctx.console.success("All remotes fetched.")
def run_summary(ctx: FlowContext, args):
projects_dir = os.path.expanduser(ctx.config.projects_dir)
if not os.path.isdir(projects_dir):
ctx.console.error(f"Projects directory not found: {projects_dir}")
sys.exit(1)
headers = ["PROJECT", "STATUS"]
rows = []
for entry in sorted(os.listdir(projects_dir)):
repo_path = os.path.join(projects_dir, entry)
if not os.path.isdir(repo_path):
continue
name, issues = _check_repo(repo_path, do_fetch=False)
if issues is None:
rows.append([name, "not a git repo"])
elif issues:
rows.append([name, "; ".join(issues)])
else:
rows.append([name, "clean"])
if not rows:
ctx.console.info("No projects found.")
return
ctx.console.table(headers, rows)