flow
This commit is contained in:
31
Makefile
Normal file
31
Makefile
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
PYTHON ?= python3
|
||||||
|
PROJECT_ROOT := $(abspath ..)
|
||||||
|
ENTRYPOINT := __main__.py
|
||||||
|
DIST_DIR := dist
|
||||||
|
BUILD_DIR := build
|
||||||
|
SPEC_FILE := flow.spec
|
||||||
|
BINARY := $(DIST_DIR)/flow
|
||||||
|
INSTALL_DIR ?= $(HOME)/.local/bin
|
||||||
|
|
||||||
|
.PHONY: build install-local check-binary clean help
|
||||||
|
|
||||||
|
help:
|
||||||
|
@printf "Targets:\n"
|
||||||
|
@printf " make build Build standalone binary at dist/flow\n"
|
||||||
|
@printf " make install-local Install binary to ~/.local/bin/flow\n"
|
||||||
|
@printf " make check-binary Run dist/flow --help\n"
|
||||||
|
@printf " make clean Remove build artifacts\n"
|
||||||
|
|
||||||
|
build:
|
||||||
|
$(PYTHON) -m PyInstaller --noconfirm --clean --onefile --name flow --paths "$(PROJECT_ROOT)" "$(ENTRYPOINT)"
|
||||||
|
|
||||||
|
install-local: build
|
||||||
|
mkdir -p "$(INSTALL_DIR)"
|
||||||
|
install -m 755 "$(BINARY)" "$(INSTALL_DIR)/flow"
|
||||||
|
@printf "Installed flow to $(INSTALL_DIR)/flow\n"
|
||||||
|
|
||||||
|
check-binary:
|
||||||
|
"./$(BINARY)" --help
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf "$(BUILD_DIR)" "$(DIST_DIR)" "$(SPEC_FILE)"
|
||||||
257
README.md
Normal file
257
README.md
Normal file
@@ -0,0 +1,257 @@
|
|||||||
|
# flow
|
||||||
|
|
||||||
|
`flow` is a CLI for managing development instances, containers, dotfiles,
|
||||||
|
bootstrap profiles, and binary packages.
|
||||||
|
|
||||||
|
This repository contains the Python implementation of the tool and its command
|
||||||
|
modules.
|
||||||
|
|
||||||
|
## What is implemented
|
||||||
|
|
||||||
|
- Instance access via `flow enter`
|
||||||
|
- Container lifecycle commands under `flow dev` (`create`, `exec`, `connect`,
|
||||||
|
`list`, `stop`, `remove`, `respawn`)
|
||||||
|
- Dotfiles management (`dotfiles` / `dot`)
|
||||||
|
- Bootstrap planning and execution (`bootstrap` / `setup` / `provision`)
|
||||||
|
- Binary package installation from manifest definitions (`package` / `pkg`)
|
||||||
|
- Multi-repo sync checks (`sync`)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Build and install a standalone binary (no pip install required for use):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make build
|
||||||
|
make install-local
|
||||||
|
```
|
||||||
|
|
||||||
|
This installs `flow` to `~/.local/bin/flow`.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
`flow` uses XDG paths by default:
|
||||||
|
|
||||||
|
- `~/.config/devflow/config`
|
||||||
|
- `~/.config/devflow/manifest.yaml`
|
||||||
|
- `~/.local/share/devflow/`
|
||||||
|
- `~/.local/state/devflow/`
|
||||||
|
|
||||||
|
### `config` (INI)
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[repository]
|
||||||
|
dotfiles_url = git@github.com:you/dotfiles.git
|
||||||
|
dotfiles_branch = main
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
projects_dir = ~/projects
|
||||||
|
|
||||||
|
[defaults]
|
||||||
|
container_registry = registry.tomastm.com
|
||||||
|
container_tag = latest
|
||||||
|
tmux_session = default
|
||||||
|
|
||||||
|
[targets]
|
||||||
|
# Format A: namespace = platform ssh_host [ssh_identity]
|
||||||
|
personal = orb personal.orb
|
||||||
|
|
||||||
|
# Format B: namespace@platform = ssh_host [ssh_identity]
|
||||||
|
work@ec2 = work.internal ~/.ssh/id_work
|
||||||
|
```
|
||||||
|
|
||||||
|
## Manifest format
|
||||||
|
|
||||||
|
The manifest is YAML with two top-level sections used by the current code:
|
||||||
|
|
||||||
|
- `profiles` for bootstrap profiles
|
||||||
|
- `binaries` for package definitions
|
||||||
|
|
||||||
|
`environments` is no longer supported.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
profiles:
|
||||||
|
linux-vm:
|
||||||
|
os: linux
|
||||||
|
hostname: "$HOSTNAME"
|
||||||
|
shell: zsh
|
||||||
|
locale: en_US.UTF-8
|
||||||
|
requires: [HOSTNAME]
|
||||||
|
packages:
|
||||||
|
standard: [git, tmux, zsh]
|
||||||
|
binary: [neovim]
|
||||||
|
ssh_keygen:
|
||||||
|
- type: ed25519
|
||||||
|
comment: "$USER@$HOSTNAME"
|
||||||
|
runcmd:
|
||||||
|
- mkdir -p ~/projects
|
||||||
|
|
||||||
|
binaries:
|
||||||
|
neovim:
|
||||||
|
source: github:neovim/neovim
|
||||||
|
version: "0.10.4"
|
||||||
|
asset-pattern: "nvim-{{os}}-{{arch}}.tar.gz"
|
||||||
|
platform-map:
|
||||||
|
linux-amd64: { os: linux, arch: x86_64 }
|
||||||
|
linux-arm64: { os: linux, arch: arm64 }
|
||||||
|
macos-arm64: { os: macos, arch: arm64 }
|
||||||
|
install-script: |
|
||||||
|
curl -fL "{{downloadUrl}}" -o /tmp/nvim.tar.gz
|
||||||
|
tar -xzf /tmp/nvim.tar.gz -C /tmp
|
||||||
|
rm -rf ~/.local/bin/nvim
|
||||||
|
cp /tmp/nvim-*/bin/nvim ~/.local/bin/nvim
|
||||||
|
```
|
||||||
|
|
||||||
|
## Command overview
|
||||||
|
|
||||||
|
### Enter instances
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow enter personal@orb
|
||||||
|
flow enter root@personal@orb
|
||||||
|
flow enter personal@orb --dry-run
|
||||||
|
```
|
||||||
|
|
||||||
|
### Containers
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow dev create api -i tm0/node -p ~/projects/api
|
||||||
|
flow dev connect api
|
||||||
|
flow dev exec api -- npm test
|
||||||
|
flow dev list
|
||||||
|
flow dev stop api
|
||||||
|
flow dev remove api
|
||||||
|
```
|
||||||
|
|
||||||
|
### Dotfiles
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow dotfiles init --repo git@github.com:you/dotfiles.git
|
||||||
|
flow dotfiles link
|
||||||
|
flow dotfiles status
|
||||||
|
flow dotfiles relink
|
||||||
|
flow dotfiles clean --dry-run
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bootstrap
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow bootstrap list
|
||||||
|
flow bootstrap show linux-vm
|
||||||
|
flow bootstrap run --profile linux-vm --var HOSTNAME=devbox
|
||||||
|
flow bootstrap run --profile linux-vm --dry-run
|
||||||
|
```
|
||||||
|
|
||||||
|
### Packages
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow package install neovim
|
||||||
|
flow package list
|
||||||
|
flow package list --all
|
||||||
|
flow package remove neovim
|
||||||
|
```
|
||||||
|
|
||||||
|
### Sync
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow sync check
|
||||||
|
flow sync check --no-fetch
|
||||||
|
flow sync fetch
|
||||||
|
flow sync summary
|
||||||
|
```
|
||||||
|
|
||||||
|
### Completion
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow completion install-zsh
|
||||||
|
flow completion zsh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Self-hosted config priority
|
||||||
|
|
||||||
|
When present, `flow` prefers config from a linked dotfiles package:
|
||||||
|
|
||||||
|
1. `~/.local/share/devflow/dotfiles/flow/.config/flow/config`
|
||||||
|
2. `~/.config/devflow/config`
|
||||||
|
|
||||||
|
And for manifest:
|
||||||
|
|
||||||
|
1. `~/.local/share/devflow/dotfiles/flow/.config/flow/manifest.yaml`
|
||||||
|
2. `~/.config/devflow/manifest.yaml`
|
||||||
|
|
||||||
|
Passing an explicit file path to internal loaders bypasses this cascade.
|
||||||
|
|
||||||
|
## State format policy
|
||||||
|
|
||||||
|
`flow` currently supports only the v2 dotfiles link state format
|
||||||
|
(`linked.json`). Older state formats are intentionally not supported.
|
||||||
|
|
||||||
|
## CLI behavior
|
||||||
|
|
||||||
|
- User errors return non-zero exit codes.
|
||||||
|
- External command failures are surfaced as concise one-line errors (no
|
||||||
|
traceback spam).
|
||||||
|
- `Ctrl+C` exits with code `130`.
|
||||||
|
|
||||||
|
## Zsh completion
|
||||||
|
|
||||||
|
Recommended one-shot install:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
flow completion install-zsh
|
||||||
|
```
|
||||||
|
|
||||||
|
Manual install (equivalent):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p ~/.zsh/completions
|
||||||
|
flow completion zsh > ~/.zsh/completions/_flow
|
||||||
|
```
|
||||||
|
|
||||||
|
Then ensure your `.zshrc` includes:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
fpath=(~/.zsh/completions $fpath)
|
||||||
|
autoload -Uz compinit && compinit
|
||||||
|
```
|
||||||
|
|
||||||
|
Completion is dynamic and pulls values from your current config/manifest/state
|
||||||
|
(for example bootstrap profiles, package names, dotfiles packages, and
|
||||||
|
configured `enter` targets).
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
Binary build (maintainers):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -m pip install pyinstaller
|
||||||
|
make build
|
||||||
|
make install-local
|
||||||
|
```
|
||||||
|
|
||||||
|
Useful targets:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make clean
|
||||||
|
```
|
||||||
|
|
||||||
|
Run a syntax check:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -m compileall .
|
||||||
|
```
|
||||||
|
|
||||||
|
Run tests (when `pytest` is available):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -m pytest
|
||||||
|
```
|
||||||
|
|
||||||
|
Optional local venv setup:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -m venv .venv
|
||||||
|
.venv/bin/pip install -U pip pytest pyyaml
|
||||||
|
PYTHONPATH=/path/to/src .venv/bin/pytest
|
||||||
|
```
|
||||||
1
__init__.py
Normal file
1
__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
__version__ = "0.1.0"
|
||||||
4
__main__.py
Normal file
4
__main__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from flow.cli import main
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
BIN
__pycache__/__init__.cpython-313.pyc
Normal file
BIN
__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
__pycache__/__main__.cpython-313.pyc
Normal file
BIN
__pycache__/__main__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
__pycache__/cli.cpython-313.pyc
Normal file
BIN
__pycache__/cli.cpython-313.pyc
Normal file
Binary file not shown.
92
cli.py
Normal file
92
cli.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
"""CLI entry point — argparse routing and context creation."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from flow import __version__
|
||||||
|
from flow.commands import bootstrap, completion, container, dotfiles, enter, package, sync
|
||||||
|
from flow.core.config import FlowContext, load_config, load_manifest
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
from flow.core.paths import ensure_dirs
|
||||||
|
from flow.core.platform import detect_platform
|
||||||
|
|
||||||
|
COMMAND_MODULES = [enter, container, dotfiles, bootstrap, package, sync, completion]
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="flow",
|
||||||
|
description="DevFlow - A unified toolkit for managing development instances, containers, and profiles",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-v", "--version", action="version", version=f"flow {__version__}"
|
||||||
|
)
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(dest="command")
|
||||||
|
|
||||||
|
for module in COMMAND_MODULES:
|
||||||
|
module.register(subparsers)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if not args.command:
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
if args.command == "completion":
|
||||||
|
handler = getattr(args, "handler", None)
|
||||||
|
if handler:
|
||||||
|
handler(None, args)
|
||||||
|
return
|
||||||
|
parser.print_help()
|
||||||
|
return
|
||||||
|
|
||||||
|
ensure_dirs()
|
||||||
|
console = ConsoleLogger()
|
||||||
|
|
||||||
|
try:
|
||||||
|
platform_info = detect_platform()
|
||||||
|
except RuntimeError as e:
|
||||||
|
console.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
config = load_config()
|
||||||
|
manifest = load_manifest()
|
||||||
|
except Exception as e:
|
||||||
|
console.error(f"Failed to load configuration: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
ctx = FlowContext(
|
||||||
|
config=config,
|
||||||
|
manifest=manifest,
|
||||||
|
platform=platform_info,
|
||||||
|
console=console,
|
||||||
|
)
|
||||||
|
|
||||||
|
handler = getattr(args, "handler", None)
|
||||||
|
if handler:
|
||||||
|
try:
|
||||||
|
handler(ctx, args)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
console.error("Interrupted")
|
||||||
|
sys.exit(130)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
detail = (e.stderr or "").strip() or (e.stdout or "").strip()
|
||||||
|
if detail:
|
||||||
|
console.error(detail.splitlines()[-1])
|
||||||
|
else:
|
||||||
|
console.error(f"Command failed with exit code {e.returncode}")
|
||||||
|
sys.exit(e.returncode or 1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
console.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
except OSError as e:
|
||||||
|
console.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
console.error(f"Unexpected error: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
parser.print_help()
|
||||||
0
commands/__init__.py
Normal file
0
commands/__init__.py
Normal file
BIN
commands/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
commands/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
commands/__pycache__/bootstrap.cpython-313.pyc
Normal file
BIN
commands/__pycache__/bootstrap.cpython-313.pyc
Normal file
Binary file not shown.
BIN
commands/__pycache__/completion.cpython-313.pyc
Normal file
BIN
commands/__pycache__/completion.cpython-313.pyc
Normal file
Binary file not shown.
BIN
commands/__pycache__/container.cpython-313.pyc
Normal file
BIN
commands/__pycache__/container.cpython-313.pyc
Normal file
Binary file not shown.
BIN
commands/__pycache__/dotfiles.cpython-313.pyc
Normal file
BIN
commands/__pycache__/dotfiles.cpython-313.pyc
Normal file
Binary file not shown.
BIN
commands/__pycache__/enter.cpython-313.pyc
Normal file
BIN
commands/__pycache__/enter.cpython-313.pyc
Normal file
Binary file not shown.
BIN
commands/__pycache__/package.cpython-313.pyc
Normal file
BIN
commands/__pycache__/package.cpython-313.pyc
Normal file
Binary file not shown.
BIN
commands/__pycache__/sync.cpython-313.pyc
Normal file
BIN
commands/__pycache__/sync.cpython-313.pyc
Normal file
Binary file not shown.
418
commands/bootstrap.py
Normal file
418
commands/bootstrap.py
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
"""flow bootstrap — environment provisioning with plan-then-execute model."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from flow.core.action import Action, ActionExecutor
|
||||||
|
from flow.core.config import FlowContext, load_manifest
|
||||||
|
from flow.core.paths import DOTFILES_DIR
|
||||||
|
from flow.core.process import run_command
|
||||||
|
from flow.core.variables import substitute
|
||||||
|
|
||||||
|
|
||||||
|
def register(subparsers):
|
||||||
|
p = subparsers.add_parser(
|
||||||
|
"bootstrap", aliases=["setup", "provision"],
|
||||||
|
help="Environment provisioning",
|
||||||
|
)
|
||||||
|
sub = p.add_subparsers(dest="bootstrap_command")
|
||||||
|
|
||||||
|
# run
|
||||||
|
run_p = sub.add_parser("run", help="Run bootstrap actions")
|
||||||
|
run_p.add_argument("--profile", help="Profile name to use")
|
||||||
|
run_p.add_argument("--dry-run", action="store_true", help="Show plan without executing")
|
||||||
|
run_p.add_argument("--var", action="append", default=[], help="Set variable KEY=VALUE")
|
||||||
|
run_p.set_defaults(handler=run_bootstrap)
|
||||||
|
|
||||||
|
# list
|
||||||
|
ls = sub.add_parser("list", help="List available profiles")
|
||||||
|
ls.set_defaults(handler=run_list)
|
||||||
|
|
||||||
|
# show
|
||||||
|
show = sub.add_parser("show", help="Show profile configuration")
|
||||||
|
show.add_argument("profile", help="Profile name")
|
||||||
|
show.set_defaults(handler=run_show)
|
||||||
|
|
||||||
|
p.set_defaults(handler=lambda ctx, args: p.print_help())
|
||||||
|
|
||||||
|
|
||||||
|
def _get_profiles(ctx: FlowContext) -> dict:
|
||||||
|
profiles = ctx.manifest.get("profiles")
|
||||||
|
if profiles is None:
|
||||||
|
if "environments" in ctx.manifest:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Manifest key 'environments' is no longer supported. Rename it to 'profiles'."
|
||||||
|
)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
if not isinstance(profiles, dict):
|
||||||
|
raise RuntimeError("Manifest key 'profiles' must be a mapping")
|
||||||
|
|
||||||
|
return profiles
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_variables(var_args: list) -> dict:
|
||||||
|
variables = {}
|
||||||
|
for v in var_args:
|
||||||
|
if "=" not in v:
|
||||||
|
raise ValueError(f"Invalid --var value '{v}'. Expected KEY=VALUE")
|
||||||
|
key, value = v.split("=", 1)
|
||||||
|
if not key:
|
||||||
|
raise ValueError(f"Invalid --var value '{v}'. KEY cannot be empty")
|
||||||
|
variables[key] = value
|
||||||
|
return variables
|
||||||
|
|
||||||
|
|
||||||
|
def _plan_actions(ctx: FlowContext, profile_name: str, env_config: dict, variables: dict) -> List[Action]:
|
||||||
|
"""Plan all actions from a profile configuration."""
|
||||||
|
actions = []
|
||||||
|
|
||||||
|
# Variable checks
|
||||||
|
for req_var in env_config.get("requires", []):
|
||||||
|
actions.append(Action(
|
||||||
|
type="check-variable",
|
||||||
|
description=f"Check required variable: {req_var}",
|
||||||
|
data={"variable": req_var, "variables": variables},
|
||||||
|
skip_on_error=False,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Hostname
|
||||||
|
if "hostname" in env_config:
|
||||||
|
hostname = substitute(env_config["hostname"], variables)
|
||||||
|
actions.append(Action(
|
||||||
|
type="set-hostname",
|
||||||
|
description=f"Set hostname to: {hostname}",
|
||||||
|
data={"hostname": hostname},
|
||||||
|
skip_on_error=False,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Locale
|
||||||
|
if "locale" in env_config:
|
||||||
|
actions.append(Action(
|
||||||
|
type="set-locale",
|
||||||
|
description=f"Set locale to: {env_config['locale']}",
|
||||||
|
data={"locale": env_config["locale"]},
|
||||||
|
skip_on_error=True,
|
||||||
|
os_filter="linux",
|
||||||
|
))
|
||||||
|
|
||||||
|
# Shell
|
||||||
|
if "shell" in env_config:
|
||||||
|
actions.append(Action(
|
||||||
|
type="set-shell",
|
||||||
|
description=f"Set shell to: {env_config['shell']}",
|
||||||
|
data={"shell": env_config["shell"]},
|
||||||
|
skip_on_error=True,
|
||||||
|
os_filter="linux",
|
||||||
|
))
|
||||||
|
|
||||||
|
# Packages
|
||||||
|
if "packages" in env_config:
|
||||||
|
packages_config = env_config["packages"]
|
||||||
|
pm = env_config.get("package-manager", "apt-get")
|
||||||
|
|
||||||
|
# Package manager update
|
||||||
|
actions.append(Action(
|
||||||
|
type="pm-update",
|
||||||
|
description=f"Update {pm} package repositories",
|
||||||
|
data={"pm": pm},
|
||||||
|
skip_on_error=False,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Standard packages
|
||||||
|
standard = []
|
||||||
|
for pkg in packages_config.get("standard", []) + packages_config.get("package", []):
|
||||||
|
if isinstance(pkg, str):
|
||||||
|
standard.append(pkg)
|
||||||
|
else:
|
||||||
|
standard.append(pkg["name"])
|
||||||
|
|
||||||
|
if standard:
|
||||||
|
actions.append(Action(
|
||||||
|
type="install-packages",
|
||||||
|
description=f"Install {len(standard)} packages via {pm}",
|
||||||
|
data={"pm": pm, "packages": standard, "type": "standard"},
|
||||||
|
skip_on_error=False,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Cask packages (macOS)
|
||||||
|
cask = []
|
||||||
|
for pkg in packages_config.get("cask", []):
|
||||||
|
if isinstance(pkg, str):
|
||||||
|
cask.append(pkg)
|
||||||
|
else:
|
||||||
|
cask.append(pkg["name"])
|
||||||
|
|
||||||
|
if cask:
|
||||||
|
actions.append(Action(
|
||||||
|
type="install-packages",
|
||||||
|
description=f"Install {len(cask)} cask packages via {pm}",
|
||||||
|
data={"pm": pm, "packages": cask, "type": "cask"},
|
||||||
|
skip_on_error=False,
|
||||||
|
os_filter="macos",
|
||||||
|
))
|
||||||
|
|
||||||
|
# Binary packages
|
||||||
|
binaries_manifest = ctx.manifest.get("binaries", {})
|
||||||
|
for pkg in packages_config.get("binary", []):
|
||||||
|
pkg_name = pkg if isinstance(pkg, str) else pkg["name"]
|
||||||
|
binary_def = binaries_manifest.get(pkg_name, {})
|
||||||
|
actions.append(Action(
|
||||||
|
type="install-binary",
|
||||||
|
description=f"Install binary: {pkg_name}",
|
||||||
|
data={"name": pkg_name, "definition": binary_def, "spec": pkg if isinstance(pkg, dict) else {}},
|
||||||
|
skip_on_error=True,
|
||||||
|
))
|
||||||
|
|
||||||
|
# SSH keygen
|
||||||
|
for ssh_config in env_config.get("ssh_keygen", []):
|
||||||
|
filename = ssh_config.get("filename", f"id_{ssh_config['type']}")
|
||||||
|
actions.append(Action(
|
||||||
|
type="generate-ssh-key",
|
||||||
|
description=f"Generate SSH key: {filename}",
|
||||||
|
data=ssh_config,
|
||||||
|
skip_on_error=True,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Config linking
|
||||||
|
for config in env_config.get("configs", []):
|
||||||
|
config_name = config if isinstance(config, str) else config["name"]
|
||||||
|
actions.append(Action(
|
||||||
|
type="link-config",
|
||||||
|
description=f"Link configuration: {config_name}",
|
||||||
|
data={"config_name": config_name},
|
||||||
|
skip_on_error=True,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Custom commands
|
||||||
|
for i, command in enumerate(env_config.get("runcmd", []), 1):
|
||||||
|
actions.append(Action(
|
||||||
|
type="run-command",
|
||||||
|
description=f"Run custom command {i}",
|
||||||
|
data={"command": command},
|
||||||
|
skip_on_error=True,
|
||||||
|
))
|
||||||
|
|
||||||
|
return actions
|
||||||
|
|
||||||
|
|
||||||
|
def _register_handlers(executor: ActionExecutor, ctx: FlowContext, variables: dict):
|
||||||
|
"""Register all action type handlers."""
|
||||||
|
|
||||||
|
def handle_check_variable(data):
|
||||||
|
var = data["variable"]
|
||||||
|
if var not in data.get("variables", {}):
|
||||||
|
raise RuntimeError(f"Required variable not set: {var}")
|
||||||
|
|
||||||
|
def handle_set_hostname(data):
|
||||||
|
hostname = data["hostname"]
|
||||||
|
if ctx.platform.os == "macos":
|
||||||
|
run_command(f"sudo scutil --set ComputerName '{hostname}'", ctx.console)
|
||||||
|
run_command(f"sudo scutil --set HostName '{hostname}'", ctx.console)
|
||||||
|
run_command(f"sudo scutil --set LocalHostName '{hostname}'", ctx.console)
|
||||||
|
else:
|
||||||
|
run_command(f"sudo hostnamectl set-hostname '{hostname}'", ctx.console)
|
||||||
|
|
||||||
|
def handle_set_locale(data):
|
||||||
|
locale = data["locale"]
|
||||||
|
run_command(f"sudo locale-gen {locale}", ctx.console)
|
||||||
|
run_command(f"sudo update-locale LANG={locale}", ctx.console)
|
||||||
|
|
||||||
|
def handle_set_shell(data):
|
||||||
|
shell = data["shell"]
|
||||||
|
shell_path = shutil.which(shell)
|
||||||
|
if not shell_path:
|
||||||
|
raise RuntimeError(f"Shell not found: {shell}")
|
||||||
|
try:
|
||||||
|
with open("/etc/shells") as f:
|
||||||
|
if shell_path not in f.read():
|
||||||
|
run_command(f"echo '{shell_path}' | sudo tee -a /etc/shells", ctx.console)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
run_command(f"chsh -s {shell_path}", ctx.console)
|
||||||
|
|
||||||
|
def handle_pm_update(data):
|
||||||
|
pm = data["pm"]
|
||||||
|
commands = {
|
||||||
|
"apt-get": "sudo apt-get update -qq",
|
||||||
|
"apt": "sudo apt update -qq",
|
||||||
|
"brew": "brew update",
|
||||||
|
}
|
||||||
|
cmd = commands.get(pm, f"sudo {pm} update")
|
||||||
|
run_command(cmd, ctx.console)
|
||||||
|
|
||||||
|
def handle_install_packages(data):
|
||||||
|
pm = data["pm"]
|
||||||
|
packages = data["packages"]
|
||||||
|
pkg_type = data.get("type", "standard")
|
||||||
|
pkg_str = " ".join(packages)
|
||||||
|
|
||||||
|
if pm in ("apt-get", "apt"):
|
||||||
|
cmd = f"sudo {pm} install -y {pkg_str}"
|
||||||
|
elif pm == "brew" and pkg_type == "cask":
|
||||||
|
cmd = f"brew install --cask {pkg_str}"
|
||||||
|
elif pm == "brew":
|
||||||
|
cmd = f"brew install {pkg_str}"
|
||||||
|
else:
|
||||||
|
cmd = f"sudo {pm} install {pkg_str}"
|
||||||
|
|
||||||
|
run_command(cmd, ctx.console)
|
||||||
|
|
||||||
|
def handle_install_binary(data):
|
||||||
|
from flow.core.variables import substitute_template
|
||||||
|
pkg_name = data["name"]
|
||||||
|
pkg_def = data["definition"]
|
||||||
|
if not pkg_def:
|
||||||
|
raise RuntimeError(f"No binary definition for: {pkg_name}")
|
||||||
|
|
||||||
|
source = pkg_def.get("source", "")
|
||||||
|
if not source.startswith("github:"):
|
||||||
|
raise RuntimeError(f"Unsupported source: {source}")
|
||||||
|
|
||||||
|
owner_repo = source[len("github:"):]
|
||||||
|
version = pkg_def.get("version", "")
|
||||||
|
asset_pattern = pkg_def.get("asset-pattern", "")
|
||||||
|
platform_map = pkg_def.get("platform-map", {})
|
||||||
|
mapping = platform_map.get(ctx.platform.platform)
|
||||||
|
if not mapping:
|
||||||
|
raise RuntimeError(f"No platform mapping for {ctx.platform.platform}")
|
||||||
|
|
||||||
|
template_ctx = {**mapping, "version": version}
|
||||||
|
asset = substitute_template(asset_pattern, template_ctx)
|
||||||
|
url = f"https://github.com/{owner_repo}/releases/download/v{version}/{asset}"
|
||||||
|
template_ctx["downloadUrl"] = url
|
||||||
|
|
||||||
|
install_script = pkg_def.get("install-script", "")
|
||||||
|
if install_script:
|
||||||
|
resolved = substitute_template(install_script, template_ctx)
|
||||||
|
run_command(resolved, ctx.console)
|
||||||
|
|
||||||
|
def handle_generate_ssh_key(data):
|
||||||
|
ssh_dir = Path.home() / ".ssh"
|
||||||
|
ssh_dir.mkdir(mode=0o700, exist_ok=True)
|
||||||
|
key_type = data["type"]
|
||||||
|
comment = substitute(data.get("comment", ""), variables)
|
||||||
|
filename = data.get("filename", f"id_{key_type}")
|
||||||
|
key_path = ssh_dir / filename
|
||||||
|
if key_path.exists():
|
||||||
|
ctx.console.warn(f"SSH key already exists: {key_path}")
|
||||||
|
return
|
||||||
|
run_command(f'ssh-keygen -t {key_type} -f "{key_path}" -N "" -C "{comment}"', ctx.console)
|
||||||
|
|
||||||
|
def handle_link_config(data):
|
||||||
|
config_name = data["config_name"]
|
||||||
|
ctx.console.info(f"Linking config: {config_name}")
|
||||||
|
|
||||||
|
def handle_run_command(data):
|
||||||
|
command = substitute(data["command"], variables)
|
||||||
|
run_command(command, ctx.console)
|
||||||
|
|
||||||
|
executor.register("check-variable", handle_check_variable)
|
||||||
|
executor.register("set-hostname", handle_set_hostname)
|
||||||
|
executor.register("set-locale", handle_set_locale)
|
||||||
|
executor.register("set-shell", handle_set_shell)
|
||||||
|
executor.register("pm-update", handle_pm_update)
|
||||||
|
executor.register("install-packages", handle_install_packages)
|
||||||
|
executor.register("install-binary", handle_install_binary)
|
||||||
|
executor.register("generate-ssh-key", handle_generate_ssh_key)
|
||||||
|
executor.register("link-config", handle_link_config)
|
||||||
|
executor.register("run-command", handle_run_command)
|
||||||
|
|
||||||
|
|
||||||
|
def run_bootstrap(ctx: FlowContext, args):
|
||||||
|
# Check if flow package exists in dotfiles and link it first
|
||||||
|
flow_pkg = DOTFILES_DIR / "common" / "flow"
|
||||||
|
if flow_pkg.exists() and (flow_pkg / ".config" / "flow").exists():
|
||||||
|
ctx.console.info("Found flow config in dotfiles, linking...")
|
||||||
|
# Link flow package first
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "dotfiles", "link", "flow"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
if result.returncode == 0:
|
||||||
|
ctx.console.success("Flow config linked from dotfiles")
|
||||||
|
# Reload manifest from newly linked location
|
||||||
|
ctx.manifest = load_manifest()
|
||||||
|
else:
|
||||||
|
detail = (result.stderr or "").strip() or (result.stdout or "").strip() or "unknown error"
|
||||||
|
ctx.console.warn(f"Failed to link flow config: {detail}")
|
||||||
|
|
||||||
|
profiles = _get_profiles(ctx)
|
||||||
|
if not profiles:
|
||||||
|
ctx.console.error("No profiles found in manifest.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
profile_name = args.profile
|
||||||
|
if not profile_name:
|
||||||
|
if len(profiles) == 1:
|
||||||
|
profile_name = next(iter(profiles))
|
||||||
|
else:
|
||||||
|
ctx.console.error(f"Multiple profiles available. Specify with --profile: {', '.join(profiles.keys())}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if profile_name not in profiles:
|
||||||
|
ctx.console.error(f"Profile not found: {profile_name}. Available: {', '.join(profiles.keys())}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
env_config = profiles[profile_name]
|
||||||
|
|
||||||
|
profile_os = env_config.get("os")
|
||||||
|
if profile_os and profile_os != ctx.platform.os:
|
||||||
|
ctx.console.error(
|
||||||
|
f"Profile '{profile_name}' targets '{profile_os}', current OS is '{ctx.platform.os}'"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
variables = _parse_variables(args.var)
|
||||||
|
except ValueError as e:
|
||||||
|
ctx.console.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
actions = _plan_actions(ctx, profile_name, env_config, variables)
|
||||||
|
executor = ActionExecutor(ctx.console)
|
||||||
|
_register_handlers(executor, ctx, variables)
|
||||||
|
executor.execute(actions, dry_run=args.dry_run, current_os=ctx.platform.os)
|
||||||
|
|
||||||
|
|
||||||
|
def run_list(ctx: FlowContext, args):
|
||||||
|
profiles = _get_profiles(ctx)
|
||||||
|
if not profiles:
|
||||||
|
ctx.console.info("No profiles defined in manifest.")
|
||||||
|
return
|
||||||
|
|
||||||
|
headers = ["PROFILE", "OS", "PACKAGES", "ACTIONS"]
|
||||||
|
rows = []
|
||||||
|
for name, config in sorted(profiles.items()):
|
||||||
|
os_name = config.get("os", "any")
|
||||||
|
pkg_count = 0
|
||||||
|
for section in config.get("packages", {}).values():
|
||||||
|
if isinstance(section, list):
|
||||||
|
pkg_count += len(section)
|
||||||
|
action_count = sum(1 for k in ("hostname", "locale", "shell") if k in config)
|
||||||
|
action_count += len(config.get("ssh_keygen", []))
|
||||||
|
action_count += len(config.get("configs", []))
|
||||||
|
action_count += len(config.get("runcmd", []))
|
||||||
|
rows.append([name, os_name, str(pkg_count), str(action_count)])
|
||||||
|
|
||||||
|
ctx.console.table(headers, rows)
|
||||||
|
|
||||||
|
|
||||||
|
def run_show(ctx: FlowContext, args):
|
||||||
|
profiles = _get_profiles(ctx)
|
||||||
|
profile_name = args.profile
|
||||||
|
|
||||||
|
if profile_name not in profiles:
|
||||||
|
ctx.console.error(f"Profile not found: {profile_name}. Available: {', '.join(profiles.keys())}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
env_config = profiles[profile_name]
|
||||||
|
variables = {}
|
||||||
|
actions = _plan_actions(ctx, profile_name, env_config, variables)
|
||||||
|
|
||||||
|
executor = ActionExecutor(ctx.console)
|
||||||
|
executor.execute(actions, dry_run=True)
|
||||||
525
commands/completion.py
Normal file
525
commands/completion.py
Normal file
@@ -0,0 +1,525 @@
|
|||||||
|
"""flow completion — shell completion support (dynamic zsh)."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Optional, Sequence, Set
|
||||||
|
|
||||||
|
from flow.commands.enter import HOST_TEMPLATES
|
||||||
|
from flow.core.config import load_config, load_manifest
|
||||||
|
from flow.core.paths import DOTFILES_DIR, INSTALLED_STATE
|
||||||
|
|
||||||
|
ZSH_RC_START = "# >>> flow completion >>>"
|
||||||
|
ZSH_RC_END = "# <<< flow completion <<<"
|
||||||
|
|
||||||
|
TOP_LEVEL_COMMANDS = [
|
||||||
|
"enter",
|
||||||
|
"dev",
|
||||||
|
"dotfiles",
|
||||||
|
"dot",
|
||||||
|
"bootstrap",
|
||||||
|
"setup",
|
||||||
|
"provision",
|
||||||
|
"package",
|
||||||
|
"pkg",
|
||||||
|
"sync",
|
||||||
|
"completion",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def register(subparsers):
|
||||||
|
p = subparsers.add_parser("completion", help="Shell completion helpers")
|
||||||
|
sub = p.add_subparsers(dest="completion_command")
|
||||||
|
|
||||||
|
zsh = sub.add_parser("zsh", help="Print zsh completion script")
|
||||||
|
zsh.set_defaults(handler=run_zsh_script)
|
||||||
|
|
||||||
|
install = sub.add_parser("install-zsh", help="Install zsh completion script")
|
||||||
|
install.add_argument(
|
||||||
|
"--dir",
|
||||||
|
default="~/.zsh/completions",
|
||||||
|
help="Directory where _flow completion file is written",
|
||||||
|
)
|
||||||
|
install.add_argument(
|
||||||
|
"--rc",
|
||||||
|
default="~/.zshrc",
|
||||||
|
help="Shell rc file to update with fpath/compinit snippet",
|
||||||
|
)
|
||||||
|
install.add_argument(
|
||||||
|
"--no-rc",
|
||||||
|
action="store_true",
|
||||||
|
help="Do not modify rc file; only write completion script",
|
||||||
|
)
|
||||||
|
install.set_defaults(handler=run_install_zsh)
|
||||||
|
|
||||||
|
hidden = sub.add_parser("_zsh_complete", help=argparse.SUPPRESS)
|
||||||
|
hidden.add_argument("--cword", type=int, required=True, help=argparse.SUPPRESS)
|
||||||
|
hidden.add_argument("words", nargs="*", help=argparse.SUPPRESS)
|
||||||
|
hidden.set_defaults(handler=run_zsh_complete)
|
||||||
|
|
||||||
|
p.set_defaults(handler=lambda _ctx, args: p.print_help())
|
||||||
|
|
||||||
|
|
||||||
|
def _canonical_command(command: str) -> str:
|
||||||
|
alias_map = {
|
||||||
|
"dot": "dotfiles",
|
||||||
|
"setup": "bootstrap",
|
||||||
|
"provision": "bootstrap",
|
||||||
|
"pkg": "package",
|
||||||
|
}
|
||||||
|
return alias_map.get(command, command)
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_config():
|
||||||
|
try:
|
||||||
|
return load_config()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_manifest():
|
||||||
|
try:
|
||||||
|
return load_manifest()
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _list_targets() -> List[str]:
|
||||||
|
cfg = _safe_config()
|
||||||
|
if cfg is None:
|
||||||
|
return []
|
||||||
|
return sorted({f"{t.namespace}@{t.platform}" for t in cfg.targets})
|
||||||
|
|
||||||
|
|
||||||
|
def _list_namespaces() -> List[str]:
|
||||||
|
cfg = _safe_config()
|
||||||
|
if cfg is None:
|
||||||
|
return []
|
||||||
|
return sorted({t.namespace for t in cfg.targets})
|
||||||
|
|
||||||
|
|
||||||
|
def _list_platforms() -> List[str]:
|
||||||
|
cfg = _safe_config()
|
||||||
|
config_platforms: Set[str] = set()
|
||||||
|
if cfg is not None:
|
||||||
|
config_platforms = {t.platform for t in cfg.targets}
|
||||||
|
return sorted(set(HOST_TEMPLATES.keys()) | config_platforms)
|
||||||
|
|
||||||
|
|
||||||
|
def _list_bootstrap_profiles() -> List[str]:
|
||||||
|
manifest = _safe_manifest()
|
||||||
|
return sorted(manifest.get("profiles", {}).keys())
|
||||||
|
|
||||||
|
|
||||||
|
def _list_manifest_packages() -> List[str]:
|
||||||
|
manifest = _safe_manifest()
|
||||||
|
return sorted(manifest.get("binaries", {}).keys())
|
||||||
|
|
||||||
|
|
||||||
|
def _list_installed_packages() -> List[str]:
|
||||||
|
if not INSTALLED_STATE.exists():
|
||||||
|
return []
|
||||||
|
try:
|
||||||
|
with open(INSTALLED_STATE) as f:
|
||||||
|
state = json.load(f)
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
if not isinstance(state, dict):
|
||||||
|
return []
|
||||||
|
return sorted(state.keys())
|
||||||
|
|
||||||
|
|
||||||
|
def _list_dotfiles_profiles() -> List[str]:
|
||||||
|
profiles_dir = DOTFILES_DIR / "profiles"
|
||||||
|
if not profiles_dir.is_dir():
|
||||||
|
return []
|
||||||
|
return sorted([p.name for p in profiles_dir.iterdir() if p.is_dir() and not p.name.startswith(".")])
|
||||||
|
|
||||||
|
|
||||||
|
def _list_dotfiles_packages(profile: Optional[str] = None) -> List[str]:
|
||||||
|
package_names: Set[str] = set()
|
||||||
|
|
||||||
|
common = DOTFILES_DIR / "common"
|
||||||
|
if common.is_dir():
|
||||||
|
for pkg in common.iterdir():
|
||||||
|
if pkg.is_dir() and not pkg.name.startswith("."):
|
||||||
|
package_names.add(pkg.name)
|
||||||
|
|
||||||
|
if profile:
|
||||||
|
profile_dir = DOTFILES_DIR / "profiles" / profile
|
||||||
|
if profile_dir.is_dir():
|
||||||
|
for pkg in profile_dir.iterdir():
|
||||||
|
if pkg.is_dir() and not pkg.name.startswith("."):
|
||||||
|
package_names.add(pkg.name)
|
||||||
|
else:
|
||||||
|
profiles_dir = DOTFILES_DIR / "profiles"
|
||||||
|
if profiles_dir.is_dir():
|
||||||
|
for profile_dir in profiles_dir.iterdir():
|
||||||
|
if not profile_dir.is_dir():
|
||||||
|
continue
|
||||||
|
for pkg in profile_dir.iterdir():
|
||||||
|
if pkg.is_dir() and not pkg.name.startswith("."):
|
||||||
|
package_names.add(pkg.name)
|
||||||
|
|
||||||
|
return sorted(package_names)
|
||||||
|
|
||||||
|
|
||||||
|
def _list_container_names() -> List[str]:
|
||||||
|
runtime = None
|
||||||
|
for rt in ("docker", "podman"):
|
||||||
|
if shutil.which(rt):
|
||||||
|
runtime = rt
|
||||||
|
break
|
||||||
|
|
||||||
|
if not runtime:
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
[
|
||||||
|
runtime,
|
||||||
|
"ps",
|
||||||
|
"-a",
|
||||||
|
"--filter",
|
||||||
|
"label=dev=true",
|
||||||
|
"--format",
|
||||||
|
'{{.Label "dev.name"}}',
|
||||||
|
],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=1,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
names = []
|
||||||
|
for line in result.stdout.splitlines():
|
||||||
|
line = line.strip()
|
||||||
|
if line:
|
||||||
|
names.append(line)
|
||||||
|
return sorted(set(names))
|
||||||
|
|
||||||
|
|
||||||
|
def _split_words(words: Sequence[str], cword: int):
|
||||||
|
tokens = list(words)
|
||||||
|
index = max(0, cword - 1)
|
||||||
|
|
||||||
|
if tokens:
|
||||||
|
tokens = tokens[1:]
|
||||||
|
index = max(0, cword - 2)
|
||||||
|
|
||||||
|
if index > len(tokens):
|
||||||
|
index = len(tokens)
|
||||||
|
|
||||||
|
current = tokens[index] if index < len(tokens) else ""
|
||||||
|
before = tokens[:index]
|
||||||
|
return before, current
|
||||||
|
|
||||||
|
|
||||||
|
def _filter(candidates: Sequence[str], prefix: str) -> List[str]:
|
||||||
|
unique = sorted(set(candidates))
|
||||||
|
if not prefix:
|
||||||
|
return unique
|
||||||
|
return [c for c in unique if c.startswith(prefix)]
|
||||||
|
|
||||||
|
|
||||||
|
def _profile_from_before(before: Sequence[str]) -> Optional[str]:
|
||||||
|
for i, token in enumerate(before):
|
||||||
|
if token == "--profile" and i + 1 < len(before):
|
||||||
|
return before[i + 1]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _complete_dev(before: Sequence[str], current: str) -> List[str]:
|
||||||
|
if len(before) <= 1:
|
||||||
|
return _filter(["create", "exec", "connect", "list", "stop", "remove", "rm", "respawn"], current)
|
||||||
|
|
||||||
|
sub = "remove" if before[1] == "rm" else before[1]
|
||||||
|
|
||||||
|
if sub in {"remove", "stop", "connect", "exec", "respawn"}:
|
||||||
|
options = {
|
||||||
|
"remove": ["-f", "--force", "-h", "--help"],
|
||||||
|
"stop": ["--kill", "-h", "--help"],
|
||||||
|
"exec": ["-h", "--help"],
|
||||||
|
"connect": ["-h", "--help"],
|
||||||
|
"respawn": ["-h", "--help"],
|
||||||
|
}[sub]
|
||||||
|
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(options, current)
|
||||||
|
|
||||||
|
non_opt = [t for t in before[2:] if not t.startswith("-")]
|
||||||
|
if len(non_opt) == 0:
|
||||||
|
return _filter(_list_container_names(), current)
|
||||||
|
return []
|
||||||
|
|
||||||
|
if sub == "create":
|
||||||
|
options = ["-i", "--image", "-p", "--project", "-h", "--help"]
|
||||||
|
if before and before[-1] in ("-i", "--image"):
|
||||||
|
return _filter(["tm0/node", "docker/python", "docker/alpine"], current)
|
||||||
|
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(options, current)
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
if sub == "list":
|
||||||
|
return []
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def _complete_dotfiles(before: Sequence[str], current: str) -> List[str]:
|
||||||
|
if len(before) <= 1:
|
||||||
|
return _filter(
|
||||||
|
["init", "link", "unlink", "status", "sync", "relink", "clean", "edit"],
|
||||||
|
current,
|
||||||
|
)
|
||||||
|
|
||||||
|
sub = before[1]
|
||||||
|
|
||||||
|
if sub == "init":
|
||||||
|
return _filter(["--repo", "-h", "--help"], current) if current.startswith("-") else []
|
||||||
|
|
||||||
|
if sub in {"link", "relink"}:
|
||||||
|
if before and before[-1] == "--profile":
|
||||||
|
return _filter(_list_dotfiles_profiles(), current)
|
||||||
|
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["--profile", "--copy", "--force", "--dry-run", "-h", "--help"], current)
|
||||||
|
|
||||||
|
profile = _profile_from_before(before)
|
||||||
|
return _filter(_list_dotfiles_packages(profile), current)
|
||||||
|
|
||||||
|
if sub == "unlink":
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["-h", "--help"], current)
|
||||||
|
return _filter(_list_dotfiles_packages(), current)
|
||||||
|
|
||||||
|
if sub == "edit":
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["--no-commit", "-h", "--help"], current)
|
||||||
|
non_opt = [t for t in before[2:] if not t.startswith("-")]
|
||||||
|
if len(non_opt) == 0:
|
||||||
|
return _filter(_list_dotfiles_packages(), current)
|
||||||
|
return []
|
||||||
|
|
||||||
|
if sub == "clean":
|
||||||
|
return _filter(["--dry-run", "-h", "--help"], current) if current.startswith("-") else []
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def _complete_bootstrap(before: Sequence[str], current: str) -> List[str]:
|
||||||
|
if len(before) <= 1:
|
||||||
|
return _filter(["run", "list", "show"], current)
|
||||||
|
|
||||||
|
sub = before[1]
|
||||||
|
|
||||||
|
if sub == "run":
|
||||||
|
if before and before[-1] == "--profile":
|
||||||
|
return _filter(_list_bootstrap_profiles(), current)
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["--profile", "--dry-run", "--var", "-h", "--help"], current)
|
||||||
|
return []
|
||||||
|
|
||||||
|
if sub == "show":
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["-h", "--help"], current)
|
||||||
|
non_opt = [t for t in before[2:] if not t.startswith("-")]
|
||||||
|
if len(non_opt) == 0:
|
||||||
|
return _filter(_list_bootstrap_profiles(), current)
|
||||||
|
return []
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def _complete_package(before: Sequence[str], current: str) -> List[str]:
|
||||||
|
if len(before) <= 1:
|
||||||
|
return _filter(["install", "list", "remove"], current)
|
||||||
|
|
||||||
|
sub = before[1]
|
||||||
|
|
||||||
|
if sub == "install":
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["--dry-run", "-h", "--help"], current)
|
||||||
|
return _filter(_list_manifest_packages(), current)
|
||||||
|
|
||||||
|
if sub == "remove":
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["-h", "--help"], current)
|
||||||
|
return _filter(_list_installed_packages(), current)
|
||||||
|
|
||||||
|
if sub == "list":
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["--all", "-h", "--help"], current)
|
||||||
|
return []
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def _complete_sync(before: Sequence[str], current: str) -> List[str]:
|
||||||
|
if len(before) <= 1:
|
||||||
|
return _filter(["check", "fetch", "summary"], current)
|
||||||
|
|
||||||
|
sub = before[1]
|
||||||
|
if sub == "check" and current.startswith("-"):
|
||||||
|
return _filter(["--fetch", "--no-fetch", "-h", "--help"], current)
|
||||||
|
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(["-h", "--help"], current)
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def complete(words: Sequence[str], cword: int) -> List[str]:
|
||||||
|
before, current = _split_words(words, cword)
|
||||||
|
|
||||||
|
if not before:
|
||||||
|
return _filter(TOP_LEVEL_COMMANDS + ["-h", "--help", "-v", "--version"], current)
|
||||||
|
|
||||||
|
command = _canonical_command(before[0])
|
||||||
|
|
||||||
|
if command == "enter":
|
||||||
|
if before and before[-1] in ("-p", "--platform"):
|
||||||
|
return _filter(_list_platforms(), current)
|
||||||
|
if before and before[-1] in ("-n", "--namespace"):
|
||||||
|
return _filter(_list_namespaces(), current)
|
||||||
|
if current.startswith("-"):
|
||||||
|
return _filter(
|
||||||
|
["-u", "--user", "-n", "--namespace", "-p", "--platform", "-s", "--session", "--no-tmux", "-d", "--dry-run", "-h", "--help"],
|
||||||
|
current,
|
||||||
|
)
|
||||||
|
return _filter(_list_targets(), current)
|
||||||
|
|
||||||
|
if command == "dev":
|
||||||
|
return _complete_dev(before, current)
|
||||||
|
|
||||||
|
if command == "dotfiles":
|
||||||
|
return _complete_dotfiles(before, current)
|
||||||
|
|
||||||
|
if command == "bootstrap":
|
||||||
|
return _complete_bootstrap(before, current)
|
||||||
|
|
||||||
|
if command == "package":
|
||||||
|
return _complete_package(before, current)
|
||||||
|
|
||||||
|
if command == "sync":
|
||||||
|
return _complete_sync(before, current)
|
||||||
|
|
||||||
|
if command == "completion":
|
||||||
|
if len(before) <= 1:
|
||||||
|
return _filter(["zsh", "install-zsh"], current)
|
||||||
|
|
||||||
|
sub = before[1]
|
||||||
|
if sub == "install-zsh" and current.startswith("-"):
|
||||||
|
return _filter(["--dir", "--rc", "--no-rc", "-h", "--help"], current)
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def run_zsh_complete(_ctx, args):
|
||||||
|
candidates = complete(args.words, args.cword)
|
||||||
|
for item in candidates:
|
||||||
|
print(item)
|
||||||
|
|
||||||
|
|
||||||
|
def _zsh_script_text() -> str:
|
||||||
|
return r'''#compdef flow
|
||||||
|
|
||||||
|
_flow() {
|
||||||
|
local -a suggestions
|
||||||
|
suggestions=("${(@f)$(flow completion _zsh_complete --cword "$CURRENT" -- "${words[@]}" 2>/dev/null)}")
|
||||||
|
|
||||||
|
if (( ${#suggestions[@]} > 0 )); then
|
||||||
|
compadd -Q -- "${suggestions[@]}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$words[CURRENT]" == */* || "$words[CURRENT]" == ./* || "$words[CURRENT]" == ~* ]]; then
|
||||||
|
_files
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
compdef _flow flow
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def _zsh_dir_for_rc(path: Path) -> str:
|
||||||
|
home = Path.home().resolve()
|
||||||
|
resolved = path.expanduser().resolve()
|
||||||
|
try:
|
||||||
|
rel = resolved.relative_to(home)
|
||||||
|
return f"~/{rel}" if str(rel) != "." else "~"
|
||||||
|
except ValueError:
|
||||||
|
return str(resolved)
|
||||||
|
|
||||||
|
|
||||||
|
def _zsh_rc_snippet(completions_dir: Path) -> str:
|
||||||
|
dir_expr = _zsh_dir_for_rc(completions_dir)
|
||||||
|
return (
|
||||||
|
f"{ZSH_RC_START}\n"
|
||||||
|
f"fpath=({dir_expr} $fpath)\n"
|
||||||
|
"autoload -Uz compinit && compinit\n"
|
||||||
|
f"{ZSH_RC_END}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_rc_snippet(rc_path: Path, completions_dir: Path) -> bool:
|
||||||
|
snippet = _zsh_rc_snippet(completions_dir)
|
||||||
|
if rc_path.exists():
|
||||||
|
content = rc_path.read_text()
|
||||||
|
else:
|
||||||
|
content = ""
|
||||||
|
|
||||||
|
if ZSH_RC_START in content and ZSH_RC_END in content:
|
||||||
|
start = content.find(ZSH_RC_START)
|
||||||
|
end = content.find(ZSH_RC_END, start)
|
||||||
|
if end >= 0:
|
||||||
|
end += len(ZSH_RC_END)
|
||||||
|
updated = content[:start] + snippet.rstrip("\n") + content[end:]
|
||||||
|
if updated == content:
|
||||||
|
return False
|
||||||
|
rc_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
rc_path.write_text(updated)
|
||||||
|
return True
|
||||||
|
|
||||||
|
sep = "" if content.endswith("\n") or content == "" else "\n"
|
||||||
|
rc_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
rc_path.write_text(content + sep + snippet)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def run_install_zsh(_ctx, args):
|
||||||
|
completions_dir = Path(args.dir).expanduser()
|
||||||
|
completions_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
completion_file = completions_dir / "_flow"
|
||||||
|
completion_file.write_text(_zsh_script_text())
|
||||||
|
print(f"Installed completion script: {completion_file}")
|
||||||
|
|
||||||
|
if args.no_rc:
|
||||||
|
print("Skipped rc file update (--no-rc)")
|
||||||
|
return
|
||||||
|
|
||||||
|
rc_path = Path(args.rc).expanduser()
|
||||||
|
changed = _ensure_rc_snippet(rc_path, completions_dir)
|
||||||
|
if changed:
|
||||||
|
print(f"Updated shell rc: {rc_path}")
|
||||||
|
else:
|
||||||
|
print(f"Shell rc already configured: {rc_path}")
|
||||||
|
|
||||||
|
print("Restart shell or run: autoload -Uz compinit && compinit")
|
||||||
|
|
||||||
|
|
||||||
|
def run_zsh_script(_ctx, _args):
|
||||||
|
print(_zsh_script_text())
|
||||||
349
commands/container.py
Normal file
349
commands/container.py
Normal file
@@ -0,0 +1,349 @@
|
|||||||
|
"""flow dev <subcommand> — container management."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from flow.core.config import FlowContext
|
||||||
|
|
||||||
|
DEFAULT_REGISTRY = "registry.tomastm.com"
|
||||||
|
DEFAULT_TAG = "latest"
|
||||||
|
CONTAINER_HOME = "/home/dev"
|
||||||
|
|
||||||
|
|
||||||
|
def register(subparsers):
|
||||||
|
p = subparsers.add_parser("dev", help="Manage development containers")
|
||||||
|
sub = p.add_subparsers(dest="dev_command")
|
||||||
|
|
||||||
|
# create
|
||||||
|
create = sub.add_parser("create", help="Create and start a development container")
|
||||||
|
create.add_argument("name", help="Container name")
|
||||||
|
create.add_argument("-i", "--image", required=True, help="Container image")
|
||||||
|
create.add_argument("-p", "--project", help="Path to project directory")
|
||||||
|
create.set_defaults(handler=run_create)
|
||||||
|
|
||||||
|
# exec
|
||||||
|
exec_cmd = sub.add_parser("exec", help="Execute command in a container")
|
||||||
|
exec_cmd.add_argument("name", help="Container name")
|
||||||
|
exec_cmd.add_argument("cmd", nargs="*", help="Command to run (default: interactive shell)")
|
||||||
|
exec_cmd.set_defaults(handler=run_exec)
|
||||||
|
|
||||||
|
# connect
|
||||||
|
connect = sub.add_parser("connect", help="Attach to container tmux session")
|
||||||
|
connect.add_argument("name", help="Container name")
|
||||||
|
connect.set_defaults(handler=run_connect)
|
||||||
|
|
||||||
|
# list
|
||||||
|
ls = sub.add_parser("list", help="List development containers")
|
||||||
|
ls.set_defaults(handler=run_list)
|
||||||
|
|
||||||
|
# stop
|
||||||
|
stop = sub.add_parser("stop", help="Stop a development container")
|
||||||
|
stop.add_argument("name", help="Container name")
|
||||||
|
stop.add_argument("--kill", action="store_true", help="Kill instead of graceful stop")
|
||||||
|
stop.set_defaults(handler=run_stop)
|
||||||
|
|
||||||
|
# remove
|
||||||
|
remove = sub.add_parser("remove", aliases=["rm"], help="Remove a development container")
|
||||||
|
remove.add_argument("name", help="Container name")
|
||||||
|
remove.add_argument("-f", "--force", action="store_true", help="Force removal")
|
||||||
|
remove.set_defaults(handler=run_remove)
|
||||||
|
|
||||||
|
# respawn
|
||||||
|
respawn = sub.add_parser("respawn", help="Respawn all tmux panes for a session")
|
||||||
|
respawn.add_argument("name", help="Session/container name")
|
||||||
|
respawn.set_defaults(handler=run_respawn)
|
||||||
|
|
||||||
|
p.set_defaults(handler=lambda ctx, args: p.print_help())
|
||||||
|
|
||||||
|
|
||||||
|
def _runtime():
|
||||||
|
for rt in ("docker", "podman"):
|
||||||
|
if shutil.which(rt):
|
||||||
|
return rt
|
||||||
|
raise RuntimeError("No container runtime found (docker or podman)")
|
||||||
|
|
||||||
|
|
||||||
|
def _cname(name: str) -> str:
|
||||||
|
"""Normalize to dev- prefix."""
|
||||||
|
return name if name.startswith("dev-") else f"dev-{name}"
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_image_ref(
|
||||||
|
image: str,
|
||||||
|
*,
|
||||||
|
default_registry: str = DEFAULT_REGISTRY,
|
||||||
|
default_tag: str = DEFAULT_TAG,
|
||||||
|
):
|
||||||
|
"""Parse image shorthand into (full_ref, repo, tag, label)."""
|
||||||
|
registry = default_registry
|
||||||
|
tag = default_tag
|
||||||
|
|
||||||
|
if image.startswith("docker/"):
|
||||||
|
registry = "docker.io"
|
||||||
|
image = f"library/{image.split('/', 1)[1]}"
|
||||||
|
elif image.startswith("tm0/"):
|
||||||
|
registry = default_registry
|
||||||
|
image = image.split("/", 1)[1]
|
||||||
|
elif "/" in image:
|
||||||
|
prefix, remainder = image.split("/", 1)
|
||||||
|
if "." in prefix or ":" in prefix or prefix == "localhost":
|
||||||
|
registry = prefix
|
||||||
|
image = remainder
|
||||||
|
|
||||||
|
if ":" in image.split("/")[-1]:
|
||||||
|
tag = image.rsplit(":", 1)[1]
|
||||||
|
image = image.rsplit(":", 1)[0]
|
||||||
|
|
||||||
|
repo = image
|
||||||
|
full_ref = f"{registry}/{repo}:{tag}"
|
||||||
|
label_prefix = registry.rsplit(".", 1)[0].rsplit(".", 1)[-1] if "." in registry else registry
|
||||||
|
label = f"{label_prefix}/{repo.split('/')[-1]}"
|
||||||
|
|
||||||
|
return full_ref, repo, tag, label
|
||||||
|
|
||||||
|
|
||||||
|
def _container_exists(rt: str, cname: str) -> bool:
|
||||||
|
result = subprocess.run(
|
||||||
|
[rt, "container", "ls", "-a", "--format", "{{.Names}}"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
return cname in result.stdout.strip().split("\n")
|
||||||
|
|
||||||
|
|
||||||
|
def _container_running(rt: str, cname: str) -> bool:
|
||||||
|
result = subprocess.run(
|
||||||
|
[rt, "container", "ls", "--format", "{{.Names}}"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
return cname in result.stdout.strip().split("\n")
|
||||||
|
|
||||||
|
|
||||||
|
def run_create(ctx: FlowContext, args):
|
||||||
|
rt = _runtime()
|
||||||
|
cname = _cname(args.name)
|
||||||
|
|
||||||
|
if _container_exists(rt, cname):
|
||||||
|
ctx.console.error(f"Container already exists: {cname}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
project_path = os.path.realpath(args.project) if args.project else None
|
||||||
|
if project_path and not os.path.isdir(project_path):
|
||||||
|
ctx.console.error(f"Invalid project path: {project_path}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
full_ref, _, _, _ = _parse_image_ref(
|
||||||
|
args.image,
|
||||||
|
default_registry=ctx.config.container_registry,
|
||||||
|
default_tag=ctx.config.container_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
rt, "run", "-d",
|
||||||
|
"--name", cname,
|
||||||
|
"--label", "dev=true",
|
||||||
|
"--label", f"dev.name={args.name}",
|
||||||
|
"--label", f"dev.image_ref={full_ref}",
|
||||||
|
"--network", "host",
|
||||||
|
"--init",
|
||||||
|
]
|
||||||
|
|
||||||
|
if project_path:
|
||||||
|
cmd.extend(["-v", f"{project_path}:/workspace"])
|
||||||
|
cmd.extend(["--label", f"dev.project_path={project_path}"])
|
||||||
|
|
||||||
|
docker_sock = "/var/run/docker.sock"
|
||||||
|
if os.path.exists(docker_sock):
|
||||||
|
cmd.extend(["-v", f"{docker_sock}:{docker_sock}"])
|
||||||
|
|
||||||
|
home = os.path.expanduser("~")
|
||||||
|
if os.path.isdir(f"{home}/.ssh"):
|
||||||
|
cmd.extend(["-v", f"{home}/.ssh:{CONTAINER_HOME}/.ssh:ro"])
|
||||||
|
if os.path.isfile(f"{home}/.npmrc"):
|
||||||
|
cmd.extend(["-v", f"{home}/.npmrc:{CONTAINER_HOME}/.npmrc:ro"])
|
||||||
|
if os.path.isdir(f"{home}/.npm"):
|
||||||
|
cmd.extend(["-v", f"{home}/.npm:{CONTAINER_HOME}/.npm"])
|
||||||
|
|
||||||
|
# Add docker group if available
|
||||||
|
try:
|
||||||
|
import grp
|
||||||
|
docker_gid = str(grp.getgrnam("docker").gr_gid)
|
||||||
|
cmd.extend(["--group-add", docker_gid])
|
||||||
|
except (KeyError, ImportError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
cmd.extend([full_ref, "sleep", "infinity"])
|
||||||
|
subprocess.run(cmd, check=True)
|
||||||
|
ctx.console.success(f"Created and started container: {cname}")
|
||||||
|
|
||||||
|
|
||||||
|
def run_exec(ctx: FlowContext, args):
|
||||||
|
rt = _runtime()
|
||||||
|
cname = _cname(args.name)
|
||||||
|
|
||||||
|
if not _container_running(rt, cname):
|
||||||
|
ctx.console.error(f"Container {cname} not running")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.cmd:
|
||||||
|
exec_cmd = [rt, "exec"]
|
||||||
|
if sys.stdin.isatty():
|
||||||
|
exec_cmd.extend(["-it"])
|
||||||
|
exec_cmd.append(cname)
|
||||||
|
exec_cmd.extend(args.cmd)
|
||||||
|
result = subprocess.run(exec_cmd)
|
||||||
|
sys.exit(result.returncode)
|
||||||
|
|
||||||
|
# No command — try shells in order
|
||||||
|
last_code = 0
|
||||||
|
for shell in ("zsh -l", "bash -l", "sh"):
|
||||||
|
parts = shell.split()
|
||||||
|
exec_cmd = [rt, "exec", "--detach-keys", "ctrl-q,ctrl-p", "-it", cname] + parts
|
||||||
|
result = subprocess.run(exec_cmd)
|
||||||
|
if result.returncode == 0:
|
||||||
|
return
|
||||||
|
last_code = result.returncode
|
||||||
|
|
||||||
|
ctx.console.error(f"Unable to start an interactive shell in {cname}")
|
||||||
|
sys.exit(last_code or 1)
|
||||||
|
|
||||||
|
|
||||||
|
def run_connect(ctx: FlowContext, args):
|
||||||
|
rt = _runtime()
|
||||||
|
cname = _cname(args.name)
|
||||||
|
|
||||||
|
if not _container_exists(rt, cname):
|
||||||
|
ctx.console.error(f"Container does not exist: {cname}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not _container_running(rt, cname):
|
||||||
|
subprocess.run([rt, "start", cname], capture_output=True)
|
||||||
|
|
||||||
|
if not shutil.which("tmux"):
|
||||||
|
ctx.console.warn("tmux not found; falling back to direct exec")
|
||||||
|
args.cmd = []
|
||||||
|
run_exec(ctx, args)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get image label for env
|
||||||
|
result = subprocess.run(
|
||||||
|
[rt, "container", "inspect", cname, "--format", "{{ .Config.Image }}"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
image_ref = result.stdout.strip()
|
||||||
|
_, _, _, image_label = _parse_image_ref(image_ref)
|
||||||
|
|
||||||
|
# Create tmux session if needed
|
||||||
|
check = subprocess.run(["tmux", "has-session", "-t", cname], capture_output=True)
|
||||||
|
if check.returncode != 0:
|
||||||
|
ns = os.environ.get("DF_NAMESPACE", "")
|
||||||
|
plat = os.environ.get("DF_PLATFORM", "")
|
||||||
|
subprocess.run([
|
||||||
|
"tmux", "new-session", "-ds", cname,
|
||||||
|
"-e", f"DF_IMAGE={image_label}",
|
||||||
|
"-e", f"DF_NAMESPACE={ns}",
|
||||||
|
"-e", f"DF_PLATFORM={plat}",
|
||||||
|
f"flow dev exec {args.name}",
|
||||||
|
])
|
||||||
|
subprocess.run([
|
||||||
|
"tmux", "set-option", "-t", cname,
|
||||||
|
"default-command", f"flow dev exec {args.name}",
|
||||||
|
])
|
||||||
|
|
||||||
|
if os.environ.get("TMUX"):
|
||||||
|
os.execvp("tmux", ["tmux", "switch-client", "-t", cname])
|
||||||
|
else:
|
||||||
|
os.execvp("tmux", ["tmux", "attach", "-t", cname])
|
||||||
|
|
||||||
|
|
||||||
|
def run_list(ctx: FlowContext, args):
|
||||||
|
rt = _runtime()
|
||||||
|
result = subprocess.run(
|
||||||
|
[rt, "ps", "-a", "--filter", "label=dev=true",
|
||||||
|
"--format", '{{.Label "dev.name"}}|{{.Image}}|{{.Label "dev.project_path"}}|{{.Status}}'],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
headers = ["NAME", "IMAGE", "PROJECT", "STATUS"]
|
||||||
|
rows = []
|
||||||
|
for line in result.stdout.strip().split("\n"):
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
parts = line.split("|")
|
||||||
|
if len(parts) >= 4:
|
||||||
|
name, image, project, status = parts[0], parts[1], parts[2], parts[3]
|
||||||
|
# Shorten paths
|
||||||
|
home = os.path.expanduser("~")
|
||||||
|
if project.startswith(home):
|
||||||
|
project = "~" + project[len(home):]
|
||||||
|
rows.append([name, image, project, status])
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
ctx.console.info("No development containers found.")
|
||||||
|
return
|
||||||
|
|
||||||
|
ctx.console.table(headers, rows)
|
||||||
|
|
||||||
|
|
||||||
|
def run_stop(ctx: FlowContext, args):
|
||||||
|
rt = _runtime()
|
||||||
|
cname = _cname(args.name)
|
||||||
|
|
||||||
|
if not _container_exists(rt, cname):
|
||||||
|
ctx.console.error(f"Container {cname} does not exist")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.kill:
|
||||||
|
ctx.console.info(f"Killing container {cname}...")
|
||||||
|
subprocess.run([rt, "kill", cname], check=True)
|
||||||
|
else:
|
||||||
|
ctx.console.info(f"Stopping container {cname}...")
|
||||||
|
subprocess.run([rt, "stop", cname], check=True)
|
||||||
|
|
||||||
|
_tmux_fallback(cname)
|
||||||
|
|
||||||
|
|
||||||
|
def run_remove(ctx: FlowContext, args):
|
||||||
|
rt = _runtime()
|
||||||
|
cname = _cname(args.name)
|
||||||
|
|
||||||
|
if not _container_exists(rt, cname):
|
||||||
|
ctx.console.error(f"Container {cname} does not exist")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.force:
|
||||||
|
ctx.console.info(f"Removing container {cname} (force)...")
|
||||||
|
subprocess.run([rt, "rm", "-f", cname], check=True)
|
||||||
|
else:
|
||||||
|
ctx.console.info(f"Removing container {cname}...")
|
||||||
|
subprocess.run([rt, "rm", cname], check=True)
|
||||||
|
|
||||||
|
_tmux_fallback(cname)
|
||||||
|
|
||||||
|
|
||||||
|
def run_respawn(ctx: FlowContext, args):
|
||||||
|
cname = _cname(args.name)
|
||||||
|
result = subprocess.run(
|
||||||
|
["tmux", "list-panes", "-t", cname, "-s",
|
||||||
|
"-F", "#{session_name}:#{window_index}.#{pane_index}"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
for pane in result.stdout.strip().split("\n"):
|
||||||
|
if pane:
|
||||||
|
ctx.console.info(f"Respawning {pane}...")
|
||||||
|
subprocess.run(["tmux", "respawn-pane", "-t", pane])
|
||||||
|
|
||||||
|
|
||||||
|
def _tmux_fallback(cname: str):
|
||||||
|
"""If inside tmux in the target session, switch to default."""
|
||||||
|
if not os.environ.get("TMUX"):
|
||||||
|
return
|
||||||
|
result = subprocess.run(
|
||||||
|
["tmux", "display-message", "-p", "#S"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
current = result.stdout.strip()
|
||||||
|
if current == cname:
|
||||||
|
subprocess.run(["tmux", "new-session", "-ds", "default"], capture_output=True)
|
||||||
|
subprocess.run(["tmux", "switch-client", "-t", "default"])
|
||||||
425
commands/dotfiles.py
Normal file
425
commands/dotfiles.py
Normal file
@@ -0,0 +1,425 @@
|
|||||||
|
"""flow dotfiles — dotfile management with GNU Stow-style symlinking."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shlex
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from flow.core.config import FlowContext
|
||||||
|
from flow.core.paths import DOTFILES_DIR, LINKED_STATE
|
||||||
|
from flow.core.stow import LinkTree, TreeFolder
|
||||||
|
|
||||||
|
|
||||||
|
def register(subparsers):
|
||||||
|
p = subparsers.add_parser("dotfiles", aliases=["dot"], help="Manage dotfiles")
|
||||||
|
sub = p.add_subparsers(dest="dotfiles_command")
|
||||||
|
|
||||||
|
# init
|
||||||
|
init = sub.add_parser("init", help="Clone dotfiles repository")
|
||||||
|
init.add_argument("--repo", help="Override repository URL")
|
||||||
|
init.set_defaults(handler=run_init)
|
||||||
|
|
||||||
|
# link
|
||||||
|
link = sub.add_parser("link", help="Create symlinks for dotfile packages")
|
||||||
|
link.add_argument("packages", nargs="*", help="Specific packages to link (default: all)")
|
||||||
|
link.add_argument("--profile", help="Profile to use for overrides")
|
||||||
|
link.add_argument("--copy", action="store_true", help="Copy instead of symlink")
|
||||||
|
link.add_argument("--force", action="store_true", help="Overwrite existing files")
|
||||||
|
link.add_argument("--dry-run", action="store_true", help="Show what would be done")
|
||||||
|
link.set_defaults(handler=run_link)
|
||||||
|
|
||||||
|
# unlink
|
||||||
|
unlink = sub.add_parser("unlink", help="Remove dotfile symlinks")
|
||||||
|
unlink.add_argument("packages", nargs="*", help="Specific packages to unlink (default: all)")
|
||||||
|
unlink.set_defaults(handler=run_unlink)
|
||||||
|
|
||||||
|
# status
|
||||||
|
status = sub.add_parser("status", help="Show dotfiles link status")
|
||||||
|
status.set_defaults(handler=run_status)
|
||||||
|
|
||||||
|
# sync
|
||||||
|
sync = sub.add_parser("sync", help="Pull latest dotfiles from remote")
|
||||||
|
sync.set_defaults(handler=run_sync)
|
||||||
|
|
||||||
|
# relink
|
||||||
|
relink = sub.add_parser("relink", help="Refresh symlinks after changes")
|
||||||
|
relink.add_argument("packages", nargs="*", help="Specific packages to relink (default: all)")
|
||||||
|
relink.add_argument("--profile", help="Profile to use for overrides")
|
||||||
|
relink.set_defaults(handler=run_relink)
|
||||||
|
|
||||||
|
# clean
|
||||||
|
clean = sub.add_parser("clean", help="Remove broken symlinks")
|
||||||
|
clean.add_argument("--dry-run", action="store_true", help="Show what would be done")
|
||||||
|
clean.set_defaults(handler=run_clean)
|
||||||
|
|
||||||
|
# edit
|
||||||
|
edit = sub.add_parser("edit", help="Edit package config with auto-commit")
|
||||||
|
edit.add_argument("package", help="Package name to edit")
|
||||||
|
edit.add_argument("--no-commit", action="store_true", help="Skip auto-commit")
|
||||||
|
edit.set_defaults(handler=run_edit)
|
||||||
|
|
||||||
|
p.set_defaults(handler=lambda ctx, args: p.print_help())
|
||||||
|
|
||||||
|
|
||||||
|
def _load_state() -> dict:
|
||||||
|
if LINKED_STATE.exists():
|
||||||
|
with open(LINKED_STATE) as f:
|
||||||
|
return json.load(f)
|
||||||
|
return {"links": {}}
|
||||||
|
|
||||||
|
|
||||||
|
def _save_state(state: dict):
|
||||||
|
LINKED_STATE.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(LINKED_STATE, "w") as f:
|
||||||
|
json.dump(state, f, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def _discover_packages(dotfiles_dir: Path, profile: Optional[str] = None) -> dict:
|
||||||
|
"""Discover packages from common/ and optionally profiles/<name>/.
|
||||||
|
|
||||||
|
Returns {package_name: source_dir} with profile dirs taking precedence.
|
||||||
|
"""
|
||||||
|
packages = {}
|
||||||
|
common = dotfiles_dir / "common"
|
||||||
|
if common.is_dir():
|
||||||
|
for pkg in sorted(common.iterdir()):
|
||||||
|
if pkg.is_dir() and not pkg.name.startswith("."):
|
||||||
|
packages[pkg.name] = pkg
|
||||||
|
|
||||||
|
if profile:
|
||||||
|
profile_dir = dotfiles_dir / "profiles" / profile
|
||||||
|
if profile_dir.is_dir():
|
||||||
|
for pkg in sorted(profile_dir.iterdir()):
|
||||||
|
if pkg.is_dir() and not pkg.name.startswith("."):
|
||||||
|
packages[pkg.name] = pkg # Override common
|
||||||
|
|
||||||
|
return packages
|
||||||
|
|
||||||
|
|
||||||
|
def _walk_package(source_dir: Path, home: Path):
|
||||||
|
"""Yield (source_file, target_file) pairs for a package directory.
|
||||||
|
|
||||||
|
Files in the package directory map relative to $HOME.
|
||||||
|
"""
|
||||||
|
for root, _dirs, files in os.walk(source_dir):
|
||||||
|
for fname in files:
|
||||||
|
src = Path(root) / fname
|
||||||
|
rel = src.relative_to(source_dir)
|
||||||
|
dst = home / rel
|
||||||
|
yield src, dst
|
||||||
|
|
||||||
|
|
||||||
|
def run_init(ctx: FlowContext, args):
|
||||||
|
repo_url = args.repo or ctx.config.dotfiles_url
|
||||||
|
if not repo_url:
|
||||||
|
ctx.console.error("No dotfiles repository URL. Set it in config or pass --repo.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if DOTFILES_DIR.exists():
|
||||||
|
ctx.console.warn(f"Dotfiles directory already exists: {DOTFILES_DIR}")
|
||||||
|
return
|
||||||
|
|
||||||
|
DOTFILES_DIR.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
branch = ctx.config.dotfiles_branch
|
||||||
|
cmd = ["git", "clone", "-b", branch, repo_url, str(DOTFILES_DIR)]
|
||||||
|
ctx.console.info(f"Cloning {repo_url} (branch: {branch})...")
|
||||||
|
subprocess.run(cmd, check=True)
|
||||||
|
ctx.console.success(f"Dotfiles cloned to {DOTFILES_DIR}")
|
||||||
|
|
||||||
|
|
||||||
|
def run_link(ctx: FlowContext, args):
|
||||||
|
if not DOTFILES_DIR.exists():
|
||||||
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
home = Path.home()
|
||||||
|
packages = _discover_packages(DOTFILES_DIR, args.profile)
|
||||||
|
|
||||||
|
# Filter to requested packages
|
||||||
|
if args.packages:
|
||||||
|
packages = {k: v for k, v in packages.items() if k in args.packages}
|
||||||
|
missing = set(args.packages) - set(packages.keys())
|
||||||
|
if missing:
|
||||||
|
ctx.console.warn(f"Packages not found: {', '.join(missing)}")
|
||||||
|
if not packages:
|
||||||
|
ctx.console.error("No valid packages selected")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Build current link tree from state
|
||||||
|
state = _load_state()
|
||||||
|
try:
|
||||||
|
tree = LinkTree.from_state(state)
|
||||||
|
except RuntimeError as e:
|
||||||
|
ctx.console.error(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
all_operations = []
|
||||||
|
copied_count = 0
|
||||||
|
|
||||||
|
for pkg_name, source_dir in packages.items():
|
||||||
|
ctx.console.info(f"[{pkg_name}]")
|
||||||
|
for src, dst in _walk_package(source_dir, home):
|
||||||
|
if args.copy:
|
||||||
|
if dst.exists() or dst.is_symlink():
|
||||||
|
if not args.force:
|
||||||
|
ctx.console.warn(f" Skipped (exists): {dst}")
|
||||||
|
continue
|
||||||
|
if dst.is_dir() and not dst.is_symlink():
|
||||||
|
ctx.console.error(f"Cannot overwrite directory with --copy: {dst}")
|
||||||
|
continue
|
||||||
|
if not args.dry_run:
|
||||||
|
dst.unlink()
|
||||||
|
|
||||||
|
if args.dry_run:
|
||||||
|
print(f" COPY: {src} -> {dst}")
|
||||||
|
else:
|
||||||
|
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
shutil.copy2(src, dst)
|
||||||
|
print(f" Copied: {src} -> {dst}")
|
||||||
|
copied_count += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
ops = folder.plan_link(src, dst, pkg_name)
|
||||||
|
all_operations.extend(ops)
|
||||||
|
|
||||||
|
if args.copy:
|
||||||
|
if args.dry_run:
|
||||||
|
return
|
||||||
|
ctx.console.success(f"Copied {copied_count} item(s)")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Conflict detection (two-phase)
|
||||||
|
conflicts = folder.detect_conflicts(all_operations)
|
||||||
|
if conflicts and not args.force:
|
||||||
|
for conflict in conflicts:
|
||||||
|
ctx.console.error(conflict)
|
||||||
|
ctx.console.error("\nUse --force to overwrite existing files")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Handle force mode: remove conflicting targets
|
||||||
|
if args.force and not args.dry_run:
|
||||||
|
for op in all_operations:
|
||||||
|
if op.type != "create_symlink":
|
||||||
|
continue
|
||||||
|
if not (op.target.exists() or op.target.is_symlink()):
|
||||||
|
continue
|
||||||
|
if op.target in tree.links:
|
||||||
|
continue
|
||||||
|
if op.target.is_dir() and not op.target.is_symlink():
|
||||||
|
ctx.console.error(f"Cannot overwrite directory with --force: {op.target}")
|
||||||
|
sys.exit(1)
|
||||||
|
op.target.unlink()
|
||||||
|
|
||||||
|
# Execute operations
|
||||||
|
if args.dry_run:
|
||||||
|
ctx.console.info("\nPlanned operations:")
|
||||||
|
for op in all_operations:
|
||||||
|
print(str(op))
|
||||||
|
else:
|
||||||
|
folder.execute_operations(all_operations, dry_run=False)
|
||||||
|
state = folder.to_state()
|
||||||
|
_save_state(state)
|
||||||
|
ctx.console.success(f"Linked {len(all_operations)} item(s)")
|
||||||
|
|
||||||
|
|
||||||
|
def run_unlink(ctx: FlowContext, args):
|
||||||
|
state = _load_state()
|
||||||
|
links_by_package = state.get("links", {})
|
||||||
|
if not links_by_package:
|
||||||
|
ctx.console.info("No linked dotfiles found.")
|
||||||
|
return
|
||||||
|
|
||||||
|
packages_to_unlink = args.packages if args.packages else list(links_by_package.keys())
|
||||||
|
removed = 0
|
||||||
|
|
||||||
|
for pkg_name in packages_to_unlink:
|
||||||
|
links = links_by_package.get(pkg_name, {})
|
||||||
|
if not links:
|
||||||
|
continue
|
||||||
|
|
||||||
|
ctx.console.info(f"[{pkg_name}]")
|
||||||
|
for dst_str in list(links.keys()):
|
||||||
|
dst = Path(dst_str)
|
||||||
|
if dst.is_symlink():
|
||||||
|
dst.unlink()
|
||||||
|
print(f" Removed: {dst}")
|
||||||
|
removed += 1
|
||||||
|
elif dst.exists():
|
||||||
|
ctx.console.warn(f" Not a symlink, skipping: {dst}")
|
||||||
|
else:
|
||||||
|
print(f" Already gone: {dst}")
|
||||||
|
|
||||||
|
links_by_package.pop(pkg_name, None)
|
||||||
|
|
||||||
|
_save_state(state)
|
||||||
|
ctx.console.success(f"Removed {removed} symlink(s)")
|
||||||
|
|
||||||
|
|
||||||
|
def run_status(ctx: FlowContext, args):
|
||||||
|
state = _load_state()
|
||||||
|
links_by_package = state.get("links", {})
|
||||||
|
if not links_by_package:
|
||||||
|
ctx.console.info("No linked dotfiles.")
|
||||||
|
return
|
||||||
|
|
||||||
|
for pkg_name, links in links_by_package.items():
|
||||||
|
ctx.console.info(f"[{pkg_name}]")
|
||||||
|
for dst_str, link_info in links.items():
|
||||||
|
dst = Path(dst_str)
|
||||||
|
|
||||||
|
if not isinstance(link_info, dict) or "source" not in link_info:
|
||||||
|
ctx.console.error(
|
||||||
|
"Unsupported linked state format. Remove linked.json and relink dotfiles."
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
src_str = link_info["source"]
|
||||||
|
is_dir_link = bool(link_info.get("is_directory_link", False))
|
||||||
|
|
||||||
|
link_type = "FOLDED" if is_dir_link else "OK"
|
||||||
|
|
||||||
|
if dst.is_symlink():
|
||||||
|
target = os.readlink(dst)
|
||||||
|
if target == src_str or str(dst.resolve()) == str(Path(src_str).resolve()):
|
||||||
|
print(f" {link_type}: {dst} -> {src_str}")
|
||||||
|
else:
|
||||||
|
print(f" CHANGED: {dst} -> {target} (expected {src_str})")
|
||||||
|
elif dst.exists():
|
||||||
|
print(f" NOT SYMLINK: {dst}")
|
||||||
|
else:
|
||||||
|
print(f" BROKEN: {dst} (missing)")
|
||||||
|
|
||||||
|
|
||||||
|
def run_sync(ctx: FlowContext, args):
|
||||||
|
if not DOTFILES_DIR.exists():
|
||||||
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
ctx.console.info("Pulling latest dotfiles...")
|
||||||
|
result = subprocess.run(
|
||||||
|
["git", "-C", str(DOTFILES_DIR), "pull", "--rebase"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
if result.returncode == 0:
|
||||||
|
if result.stdout.strip():
|
||||||
|
print(result.stdout.strip())
|
||||||
|
ctx.console.success("Dotfiles synced.")
|
||||||
|
else:
|
||||||
|
ctx.console.error(f"Git pull failed: {result.stderr.strip()}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def run_relink(ctx: FlowContext, args):
|
||||||
|
"""Refresh symlinks after changes (unlink + link)."""
|
||||||
|
if not DOTFILES_DIR.exists():
|
||||||
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# First unlink
|
||||||
|
ctx.console.info("Unlinking current symlinks...")
|
||||||
|
run_unlink(ctx, args)
|
||||||
|
|
||||||
|
# Then link again
|
||||||
|
ctx.console.info("Relinking with updated configuration...")
|
||||||
|
run_link(ctx, args)
|
||||||
|
|
||||||
|
|
||||||
|
def run_clean(ctx: FlowContext, args):
|
||||||
|
"""Remove broken symlinks."""
|
||||||
|
state = _load_state()
|
||||||
|
if not state.get("links"):
|
||||||
|
ctx.console.info("No linked dotfiles found.")
|
||||||
|
return
|
||||||
|
|
||||||
|
removed = 0
|
||||||
|
for pkg_name, links in state["links"].items():
|
||||||
|
for dst_str in list(links.keys()):
|
||||||
|
dst = Path(dst_str)
|
||||||
|
|
||||||
|
# Check if symlink is broken
|
||||||
|
if dst.is_symlink() and not dst.exists():
|
||||||
|
if args.dry_run:
|
||||||
|
print(f"Would remove broken symlink: {dst}")
|
||||||
|
else:
|
||||||
|
dst.unlink()
|
||||||
|
print(f"Removed broken symlink: {dst}")
|
||||||
|
del links[dst_str]
|
||||||
|
removed += 1
|
||||||
|
|
||||||
|
if not args.dry_run:
|
||||||
|
_save_state(state)
|
||||||
|
|
||||||
|
if removed > 0:
|
||||||
|
ctx.console.success(f"Cleaned {removed} broken symlink(s)")
|
||||||
|
else:
|
||||||
|
ctx.console.info("No broken symlinks found")
|
||||||
|
|
||||||
|
|
||||||
|
def run_edit(ctx: FlowContext, args):
|
||||||
|
"""Edit package config with auto-commit workflow."""
|
||||||
|
if not DOTFILES_DIR.exists():
|
||||||
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
package_name = args.package
|
||||||
|
|
||||||
|
# Find package directory
|
||||||
|
common_dir = DOTFILES_DIR / "common" / package_name
|
||||||
|
profile_dirs = list((DOTFILES_DIR / "profiles").glob(f"*/{package_name}"))
|
||||||
|
|
||||||
|
package_dir = None
|
||||||
|
if common_dir.exists():
|
||||||
|
package_dir = common_dir
|
||||||
|
elif profile_dirs:
|
||||||
|
package_dir = profile_dirs[0]
|
||||||
|
else:
|
||||||
|
ctx.console.error(f"Package not found: {package_name}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Git pull before editing
|
||||||
|
ctx.console.info("Pulling latest changes...")
|
||||||
|
result = subprocess.run(
|
||||||
|
["git", "-C", str(DOTFILES_DIR), "pull", "--rebase"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
if result.returncode != 0:
|
||||||
|
ctx.console.warn(f"Git pull failed: {result.stderr.strip()}")
|
||||||
|
|
||||||
|
# Open editor
|
||||||
|
editor = os.environ.get("EDITOR", "vim")
|
||||||
|
ctx.console.info(f"Opening {package_dir} in {editor}...")
|
||||||
|
edit_result = subprocess.run(shlex.split(editor) + [str(package_dir)])
|
||||||
|
if edit_result.returncode != 0:
|
||||||
|
ctx.console.warn(f"Editor exited with status {edit_result.returncode}")
|
||||||
|
|
||||||
|
# Check for changes
|
||||||
|
result = subprocess.run(
|
||||||
|
["git", "-C", str(DOTFILES_DIR), "status", "--porcelain"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.stdout.strip() and not args.no_commit:
|
||||||
|
# Auto-commit changes
|
||||||
|
ctx.console.info("Changes detected, committing...")
|
||||||
|
subprocess.run(["git", "-C", str(DOTFILES_DIR), "add", "."], check=True)
|
||||||
|
subprocess.run(
|
||||||
|
["git", "-C", str(DOTFILES_DIR), "commit", "-m", f"Update {package_name} config"],
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ask before pushing
|
||||||
|
response = input("Push changes to remote? [Y/n] ")
|
||||||
|
if response.lower() != "n":
|
||||||
|
subprocess.run(["git", "-C", str(DOTFILES_DIR), "push"], check=True)
|
||||||
|
ctx.console.success("Changes committed and pushed")
|
||||||
|
else:
|
||||||
|
ctx.console.info("Changes committed locally (not pushed)")
|
||||||
|
elif result.stdout.strip() and args.no_commit:
|
||||||
|
ctx.console.info("Changes detected; skipped commit (--no-commit)")
|
||||||
|
else:
|
||||||
|
ctx.console.info("No changes to commit")
|
||||||
127
commands/enter.py
Normal file
127
commands/enter.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
"""flow enter — connect to a development instance via SSH."""
|
||||||
|
|
||||||
|
import getpass
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from flow.core.config import FlowContext
|
||||||
|
|
||||||
|
# Default host templates per platform
|
||||||
|
HOST_TEMPLATES = {
|
||||||
|
"orb": "<namespace>.orb",
|
||||||
|
"utm": "<namespace>.utm.local",
|
||||||
|
"core": "<namespace>.core.lan",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def register(subparsers):
|
||||||
|
p = subparsers.add_parser("enter", help="Connect to a development instance via SSH")
|
||||||
|
p.add_argument("target", help="Target: [user@]namespace@platform")
|
||||||
|
p.add_argument("-u", "--user", help="SSH user (overrides target)")
|
||||||
|
p.add_argument("-n", "--namespace", help="Namespace (overrides target)")
|
||||||
|
p.add_argument("-p", "--platform", help="Platform (overrides target)")
|
||||||
|
p.add_argument("-s", "--session", default="default", help="Tmux session name (default: 'default')")
|
||||||
|
p.add_argument("--no-tmux", action="store_true", help="Skip tmux attachment")
|
||||||
|
p.add_argument("-d", "--dry-run", action="store_true", help="Show command without executing")
|
||||||
|
p.set_defaults(handler=run)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_target(target: str):
|
||||||
|
"""Parse [user@]namespace@platform into (user, namespace, platform)."""
|
||||||
|
user = None
|
||||||
|
namespace = None
|
||||||
|
platform = None
|
||||||
|
|
||||||
|
if "@" in target:
|
||||||
|
platform = target.rsplit("@", 1)[1]
|
||||||
|
rest = target.rsplit("@", 1)[0]
|
||||||
|
else:
|
||||||
|
rest = target
|
||||||
|
|
||||||
|
if "@" in rest:
|
||||||
|
user = rest.rsplit("@", 1)[0]
|
||||||
|
namespace = rest.rsplit("@", 1)[1]
|
||||||
|
else:
|
||||||
|
namespace = rest
|
||||||
|
|
||||||
|
return user, namespace, platform
|
||||||
|
|
||||||
|
|
||||||
|
def _build_destination(user: str, host: str, preserve_host_user: bool = False) -> str:
|
||||||
|
if "@" in host:
|
||||||
|
host_user, host_name = host.rsplit("@", 1)
|
||||||
|
effective_user = host_user if preserve_host_user else (user or host_user)
|
||||||
|
return f"{effective_user}@{host_name}"
|
||||||
|
if not user:
|
||||||
|
return host
|
||||||
|
return f"{user}@{host}"
|
||||||
|
|
||||||
|
|
||||||
|
def run(ctx: FlowContext, args):
|
||||||
|
# Warn if already inside an instance
|
||||||
|
if os.environ.get("DF_NAMESPACE") and os.environ.get("DF_PLATFORM"):
|
||||||
|
ns = os.environ["DF_NAMESPACE"]
|
||||||
|
plat = os.environ["DF_PLATFORM"]
|
||||||
|
ctx.console.error(
|
||||||
|
f"Not recommended inside an instance. Currently in: {ns}@{plat}"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
user, namespace, platform = _parse_target(args.target)
|
||||||
|
|
||||||
|
# Apply overrides
|
||||||
|
if args.user:
|
||||||
|
user = args.user
|
||||||
|
if args.namespace:
|
||||||
|
namespace = args.namespace
|
||||||
|
if args.platform:
|
||||||
|
platform = args.platform
|
||||||
|
|
||||||
|
user_was_explicit = bool(user)
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
user = os.environ.get("USER") or getpass.getuser()
|
||||||
|
if not namespace:
|
||||||
|
ctx.console.error("Namespace is required in target")
|
||||||
|
sys.exit(1)
|
||||||
|
if not platform:
|
||||||
|
ctx.console.error("Platform is required in target")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Resolve SSH host from template or config
|
||||||
|
host_template = HOST_TEMPLATES.get(platform)
|
||||||
|
ssh_identity = None
|
||||||
|
|
||||||
|
# Check config targets for override
|
||||||
|
for tc in ctx.config.targets:
|
||||||
|
if tc.namespace == namespace and tc.platform == platform:
|
||||||
|
host_template = tc.ssh_host
|
||||||
|
ssh_identity = tc.ssh_identity
|
||||||
|
break
|
||||||
|
|
||||||
|
if not host_template:
|
||||||
|
ctx.console.error(f"Unknown platform: {platform}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
ssh_host = host_template.replace("<namespace>", namespace)
|
||||||
|
destination = _build_destination(user, ssh_host, preserve_host_user=not user_was_explicit)
|
||||||
|
|
||||||
|
# Build SSH command
|
||||||
|
ssh_cmd = ["ssh", "-tt"]
|
||||||
|
if ssh_identity:
|
||||||
|
ssh_cmd.extend(["-i", os.path.expanduser(ssh_identity)])
|
||||||
|
ssh_cmd.append(destination)
|
||||||
|
|
||||||
|
if not args.no_tmux:
|
||||||
|
ssh_cmd.extend([
|
||||||
|
"tmux", "new-session", "-As", args.session,
|
||||||
|
"-e", f"DF_NAMESPACE={namespace}",
|
||||||
|
"-e", f"DF_PLATFORM={platform}",
|
||||||
|
])
|
||||||
|
|
||||||
|
if args.dry_run:
|
||||||
|
ctx.console.info("Dry run command:")
|
||||||
|
print(" " + " ".join(ssh_cmd))
|
||||||
|
return
|
||||||
|
|
||||||
|
os.execvp("ssh", ssh_cmd)
|
||||||
181
commands/package.py
Normal file
181
commands/package.py
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
"""flow package — binary package management from manifest definitions."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from typing import Any, Dict, Optional, Tuple
|
||||||
|
|
||||||
|
from flow.core.config import FlowContext
|
||||||
|
from flow.core.paths import INSTALLED_STATE
|
||||||
|
from flow.core.variables import substitute_template
|
||||||
|
|
||||||
|
|
||||||
|
def register(subparsers):
|
||||||
|
p = subparsers.add_parser("package", aliases=["pkg"], help="Manage binary packages")
|
||||||
|
sub = p.add_subparsers(dest="package_command")
|
||||||
|
|
||||||
|
# install
|
||||||
|
inst = sub.add_parser("install", help="Install packages from manifest")
|
||||||
|
inst.add_argument("packages", nargs="+", help="Package names to install")
|
||||||
|
inst.add_argument("--dry-run", action="store_true", help="Show what would be done")
|
||||||
|
inst.set_defaults(handler=run_install)
|
||||||
|
|
||||||
|
# list
|
||||||
|
ls = sub.add_parser("list", help="List installed and available packages")
|
||||||
|
ls.add_argument("--all", action="store_true", help="Show all available packages")
|
||||||
|
ls.set_defaults(handler=run_list)
|
||||||
|
|
||||||
|
# remove
|
||||||
|
rm = sub.add_parser("remove", help="Remove installed packages")
|
||||||
|
rm.add_argument("packages", nargs="+", help="Package names to remove")
|
||||||
|
rm.set_defaults(handler=run_remove)
|
||||||
|
|
||||||
|
p.set_defaults(handler=lambda ctx, args: p.print_help())
|
||||||
|
|
||||||
|
|
||||||
|
def _load_installed() -> dict:
|
||||||
|
if INSTALLED_STATE.exists():
|
||||||
|
with open(INSTALLED_STATE) as f:
|
||||||
|
return json.load(f)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _save_installed(state: dict):
|
||||||
|
INSTALLED_STATE.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(INSTALLED_STATE, "w") as f:
|
||||||
|
json.dump(state, f, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_definitions(ctx: FlowContext) -> dict:
|
||||||
|
"""Get package definitions from manifest (binaries section)."""
|
||||||
|
return ctx.manifest.get("binaries", {})
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_download_url(
|
||||||
|
pkg_def: Dict[str, Any],
|
||||||
|
platform_str: str,
|
||||||
|
) -> Optional[Tuple[str, Dict[str, str]]]:
|
||||||
|
"""Build GitHub release download URL from package definition."""
|
||||||
|
source = pkg_def.get("source", "")
|
||||||
|
if not source.startswith("github:"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
owner_repo = source[len("github:"):]
|
||||||
|
version = pkg_def.get("version", "")
|
||||||
|
asset_pattern = pkg_def.get("asset-pattern", "")
|
||||||
|
platform_map = pkg_def.get("platform-map", {})
|
||||||
|
|
||||||
|
mapping = platform_map.get(platform_str)
|
||||||
|
if not mapping:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Build template context
|
||||||
|
template_ctx = {**mapping, "version": version}
|
||||||
|
asset = substitute_template(asset_pattern, template_ctx)
|
||||||
|
url = f"https://github.com/{owner_repo}/releases/download/v{version}/{asset}"
|
||||||
|
|
||||||
|
template_ctx["downloadUrl"] = url
|
||||||
|
return url, template_ctx
|
||||||
|
|
||||||
|
|
||||||
|
def run_install(ctx: FlowContext, args):
|
||||||
|
definitions = _get_definitions(ctx)
|
||||||
|
installed = _load_installed()
|
||||||
|
platform_str = ctx.platform.platform
|
||||||
|
had_error = False
|
||||||
|
|
||||||
|
for pkg_name in args.packages:
|
||||||
|
pkg_def = definitions.get(pkg_name)
|
||||||
|
if not pkg_def:
|
||||||
|
ctx.console.error(f"Package not found in manifest: {pkg_name}")
|
||||||
|
had_error = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
ctx.console.info(f"Installing {pkg_name} v{pkg_def.get('version', '?')}...")
|
||||||
|
|
||||||
|
result = _resolve_download_url(pkg_def, platform_str)
|
||||||
|
if not result:
|
||||||
|
ctx.console.error(f"No download available for {pkg_name} on {platform_str}")
|
||||||
|
had_error = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
url, template_ctx = result
|
||||||
|
|
||||||
|
if args.dry_run:
|
||||||
|
ctx.console.info(f"[{pkg_name}] Would download: {url}")
|
||||||
|
install_script = pkg_def.get("install-script", "")
|
||||||
|
if install_script:
|
||||||
|
ctx.console.info(f"[{pkg_name}] Would run install script")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Run install script with template vars resolved
|
||||||
|
install_script = pkg_def.get("install-script", "")
|
||||||
|
if not install_script:
|
||||||
|
ctx.console.error(f"Package '{pkg_name}' has no install-script")
|
||||||
|
had_error = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
resolved_script = substitute_template(install_script, template_ctx)
|
||||||
|
ctx.console.info(f"Running install script for {pkg_name}...")
|
||||||
|
proc = subprocess.run(
|
||||||
|
resolved_script, shell=True,
|
||||||
|
capture_output=False,
|
||||||
|
)
|
||||||
|
if proc.returncode != 0:
|
||||||
|
ctx.console.error(f"Install script failed for {pkg_name}")
|
||||||
|
had_error = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
installed[pkg_name] = {
|
||||||
|
"version": pkg_def.get("version", ""),
|
||||||
|
"source": pkg_def.get("source", ""),
|
||||||
|
}
|
||||||
|
ctx.console.success(f"Installed {pkg_name} v{pkg_def.get('version', '')}")
|
||||||
|
|
||||||
|
_save_installed(installed)
|
||||||
|
if had_error:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def run_list(ctx: FlowContext, args):
|
||||||
|
definitions = _get_definitions(ctx)
|
||||||
|
installed = _load_installed()
|
||||||
|
|
||||||
|
headers = ["PACKAGE", "INSTALLED", "AVAILABLE"]
|
||||||
|
rows = []
|
||||||
|
|
||||||
|
if args.all:
|
||||||
|
# Show all defined packages
|
||||||
|
if not definitions:
|
||||||
|
ctx.console.info("No packages defined in manifest.")
|
||||||
|
return
|
||||||
|
for name, pkg_def in sorted(definitions.items()):
|
||||||
|
inst_ver = installed.get(name, {}).get("version", "-")
|
||||||
|
avail_ver = pkg_def.get("version", "?")
|
||||||
|
rows.append([name, inst_ver, avail_ver])
|
||||||
|
else:
|
||||||
|
# Show installed only
|
||||||
|
if not installed:
|
||||||
|
ctx.console.info("No packages installed.")
|
||||||
|
return
|
||||||
|
for name, info in sorted(installed.items()):
|
||||||
|
avail = definitions.get(name, {}).get("version", "?")
|
||||||
|
rows.append([name, info.get("version", "?"), avail])
|
||||||
|
|
||||||
|
ctx.console.table(headers, rows)
|
||||||
|
|
||||||
|
|
||||||
|
def run_remove(ctx: FlowContext, args):
|
||||||
|
installed = _load_installed()
|
||||||
|
|
||||||
|
for pkg_name in args.packages:
|
||||||
|
if pkg_name not in installed:
|
||||||
|
ctx.console.warn(f"Package not installed: {pkg_name}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Remove from installed state
|
||||||
|
del installed[pkg_name]
|
||||||
|
ctx.console.success(f"Removed {pkg_name} from installed packages")
|
||||||
|
ctx.console.warn("Note: binary files were not automatically deleted. Remove manually if needed.")
|
||||||
|
|
||||||
|
_save_installed(installed)
|
||||||
199
commands/sync.py
Normal file
199
commands/sync.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
"""flow sync — check git sync status of all projects."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from flow.core.config import FlowContext
|
||||||
|
|
||||||
|
|
||||||
|
def register(subparsers):
|
||||||
|
p = subparsers.add_parser("sync", help="Git sync tools for projects")
|
||||||
|
sub = p.add_subparsers(dest="sync_command")
|
||||||
|
|
||||||
|
check = sub.add_parser("check", help="Check all projects status")
|
||||||
|
check.add_argument(
|
||||||
|
"--fetch",
|
||||||
|
dest="fetch",
|
||||||
|
action="store_true",
|
||||||
|
help="Run git fetch before checking (default)",
|
||||||
|
)
|
||||||
|
check.add_argument(
|
||||||
|
"--no-fetch",
|
||||||
|
dest="fetch",
|
||||||
|
action="store_false",
|
||||||
|
help="Skip git fetch",
|
||||||
|
)
|
||||||
|
check.set_defaults(fetch=True)
|
||||||
|
check.set_defaults(handler=run_check)
|
||||||
|
|
||||||
|
fetch = sub.add_parser("fetch", help="Fetch all project remotes")
|
||||||
|
fetch.set_defaults(handler=run_fetch)
|
||||||
|
|
||||||
|
summary = sub.add_parser("summary", help="Quick overview of project status")
|
||||||
|
summary.set_defaults(handler=run_summary)
|
||||||
|
|
||||||
|
p.set_defaults(handler=lambda ctx, args: p.print_help())
|
||||||
|
|
||||||
|
|
||||||
|
def _git(repo: str, *cmd, capture: bool = True) -> subprocess.CompletedProcess:
|
||||||
|
return subprocess.run(
|
||||||
|
["git", "-C", repo] + list(cmd),
|
||||||
|
capture_output=capture, text=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_repo(repo_path: str, do_fetch: bool = True):
|
||||||
|
"""Check a single repo, return (name, issues list)."""
|
||||||
|
name = os.path.basename(repo_path)
|
||||||
|
git_dir = os.path.join(repo_path, ".git")
|
||||||
|
if not os.path.isdir(git_dir):
|
||||||
|
return name, None # Not a git repo
|
||||||
|
|
||||||
|
issues = []
|
||||||
|
|
||||||
|
if do_fetch:
|
||||||
|
fetch_result = _git(repo_path, "fetch", "--all", "--quiet")
|
||||||
|
if fetch_result.returncode != 0:
|
||||||
|
issues.append("git fetch failed")
|
||||||
|
|
||||||
|
# Current branch
|
||||||
|
result = _git(repo_path, "rev-parse", "--abbrev-ref", "HEAD")
|
||||||
|
branch = result.stdout.strip() if result.returncode == 0 else "HEAD"
|
||||||
|
|
||||||
|
# Uncommitted changes
|
||||||
|
diff_result = _git(repo_path, "diff", "--quiet")
|
||||||
|
cached_result = _git(repo_path, "diff", "--cached", "--quiet")
|
||||||
|
if diff_result.returncode != 0 or cached_result.returncode != 0:
|
||||||
|
issues.append("uncommitted changes")
|
||||||
|
else:
|
||||||
|
untracked = _git(repo_path, "ls-files", "--others", "--exclude-standard")
|
||||||
|
if untracked.stdout.strip():
|
||||||
|
issues.append("untracked files")
|
||||||
|
|
||||||
|
# Unpushed commits
|
||||||
|
upstream_check = _git(repo_path, "rev-parse", "--abbrev-ref", f"{branch}@{{u}}")
|
||||||
|
if upstream_check.returncode == 0:
|
||||||
|
unpushed = _git(repo_path, "rev-list", "--oneline", f"{branch}@{{u}}..{branch}")
|
||||||
|
if unpushed.stdout.strip():
|
||||||
|
count = len(unpushed.stdout.strip().split("\n"))
|
||||||
|
issues.append(f"{count} unpushed commit(s) on {branch}")
|
||||||
|
else:
|
||||||
|
issues.append(f"no upstream for {branch}")
|
||||||
|
|
||||||
|
# Unpushed branches
|
||||||
|
branches_result = _git(repo_path, "for-each-ref", "--format=%(refname:short)", "refs/heads")
|
||||||
|
for b in branches_result.stdout.strip().split("\n"):
|
||||||
|
if not b or b == branch:
|
||||||
|
continue
|
||||||
|
up = _git(repo_path, "rev-parse", "--abbrev-ref", f"{b}@{{u}}")
|
||||||
|
if up.returncode == 0:
|
||||||
|
ahead = _git(repo_path, "rev-list", "--count", f"{b}@{{u}}..{b}")
|
||||||
|
if ahead.stdout.strip() != "0":
|
||||||
|
issues.append(f"branch {b}: {ahead.stdout.strip()} ahead")
|
||||||
|
else:
|
||||||
|
issues.append(f"branch {b}: no upstream")
|
||||||
|
|
||||||
|
return name, issues
|
||||||
|
|
||||||
|
|
||||||
|
def run_check(ctx: FlowContext, args):
|
||||||
|
projects_dir = os.path.expanduser(ctx.config.projects_dir)
|
||||||
|
if not os.path.isdir(projects_dir):
|
||||||
|
ctx.console.error(f"Projects directory not found: {projects_dir}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
needs_action = []
|
||||||
|
not_git = []
|
||||||
|
checked = 0
|
||||||
|
|
||||||
|
for entry in sorted(os.listdir(projects_dir)):
|
||||||
|
repo_path = os.path.join(projects_dir, entry)
|
||||||
|
if not os.path.isdir(repo_path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
name, issues = _check_repo(repo_path, do_fetch=args.fetch)
|
||||||
|
if issues is None:
|
||||||
|
not_git.append(name)
|
||||||
|
continue
|
||||||
|
checked += 1
|
||||||
|
if issues:
|
||||||
|
needs_action.append(name)
|
||||||
|
rows.append([name, "; ".join(issues)])
|
||||||
|
else:
|
||||||
|
rows.append([name, "clean and synced"])
|
||||||
|
|
||||||
|
if checked == 0:
|
||||||
|
ctx.console.info("No git repositories found in projects directory.")
|
||||||
|
if not_git:
|
||||||
|
ctx.console.info(f"Skipped non-git directories: {', '.join(sorted(not_git))}")
|
||||||
|
return
|
||||||
|
|
||||||
|
ctx.console.table(["PROJECT", "STATUS"], rows)
|
||||||
|
|
||||||
|
if needs_action:
|
||||||
|
ctx.console.warn(f"Projects needing action: {', '.join(sorted(needs_action))}")
|
||||||
|
else:
|
||||||
|
ctx.console.success("All repositories clean and synced.")
|
||||||
|
|
||||||
|
if not_git:
|
||||||
|
ctx.console.info(f"Skipped non-git directories: {', '.join(sorted(not_git))}")
|
||||||
|
|
||||||
|
|
||||||
|
def run_fetch(ctx: FlowContext, args):
|
||||||
|
projects_dir = os.path.expanduser(ctx.config.projects_dir)
|
||||||
|
if not os.path.isdir(projects_dir):
|
||||||
|
ctx.console.error(f"Projects directory not found: {projects_dir}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
had_error = False
|
||||||
|
fetched = 0
|
||||||
|
for entry in sorted(os.listdir(projects_dir)):
|
||||||
|
repo_path = os.path.join(projects_dir, entry)
|
||||||
|
if not os.path.isdir(os.path.join(repo_path, ".git")):
|
||||||
|
continue
|
||||||
|
ctx.console.info(f"Fetching {entry}...")
|
||||||
|
result = _git(repo_path, "fetch", "--all", "--quiet")
|
||||||
|
fetched += 1
|
||||||
|
if result.returncode != 0:
|
||||||
|
ctx.console.error(f"Failed to fetch {entry}")
|
||||||
|
had_error = True
|
||||||
|
|
||||||
|
if fetched == 0:
|
||||||
|
ctx.console.info("No git repositories found in projects directory.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if had_error:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
ctx.console.success("All remotes fetched.")
|
||||||
|
|
||||||
|
|
||||||
|
def run_summary(ctx: FlowContext, args):
|
||||||
|
projects_dir = os.path.expanduser(ctx.config.projects_dir)
|
||||||
|
if not os.path.isdir(projects_dir):
|
||||||
|
ctx.console.error(f"Projects directory not found: {projects_dir}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
headers = ["PROJECT", "STATUS"]
|
||||||
|
rows = []
|
||||||
|
|
||||||
|
for entry in sorted(os.listdir(projects_dir)):
|
||||||
|
repo_path = os.path.join(projects_dir, entry)
|
||||||
|
if not os.path.isdir(repo_path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
name, issues = _check_repo(repo_path, do_fetch=False)
|
||||||
|
if issues is None:
|
||||||
|
rows.append([name, "not a git repo"])
|
||||||
|
elif issues:
|
||||||
|
rows.append([name, "; ".join(issues)])
|
||||||
|
else:
|
||||||
|
rows.append([name, "clean"])
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
ctx.console.info("No projects found.")
|
||||||
|
return
|
||||||
|
|
||||||
|
ctx.console.table(headers, rows)
|
||||||
0
core/__init__.py
Normal file
0
core/__init__.py
Normal file
BIN
core/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
core/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/action.cpython-313.pyc
Normal file
BIN
core/__pycache__/action.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/config.cpython-313.pyc
Normal file
BIN
core/__pycache__/config.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/console.cpython-313.pyc
Normal file
BIN
core/__pycache__/console.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/paths.cpython-313.pyc
Normal file
BIN
core/__pycache__/paths.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/platform.cpython-313.pyc
Normal file
BIN
core/__pycache__/platform.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/process.cpython-313.pyc
Normal file
BIN
core/__pycache__/process.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/stow.cpython-313.pyc
Normal file
BIN
core/__pycache__/stow.cpython-313.pyc
Normal file
Binary file not shown.
BIN
core/__pycache__/variables.cpython-313.pyc
Normal file
BIN
core/__pycache__/variables.cpython-313.pyc
Normal file
Binary file not shown.
120
core/action.py
Normal file
120
core/action.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
"""Action dataclass and ActionExecutor for plan-then-execute workflows."""
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Action:
|
||||||
|
type: str
|
||||||
|
description: str
|
||||||
|
data: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
skip_on_error: bool = True
|
||||||
|
os_filter: Optional[str] = None
|
||||||
|
status: str = "pending"
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ActionExecutor:
|
||||||
|
"""Register handlers for action types, then execute a plan."""
|
||||||
|
|
||||||
|
def __init__(self, console: ConsoleLogger):
|
||||||
|
self.console = console
|
||||||
|
self._handlers: Dict[str, Callable] = {}
|
||||||
|
self.post_comments: List[str] = []
|
||||||
|
|
||||||
|
def register(self, action_type: str, handler: Callable) -> None:
|
||||||
|
self._handlers[action_type] = handler
|
||||||
|
|
||||||
|
def execute(self, actions: List[Action], *, dry_run: bool = False, current_os: str = "") -> None:
|
||||||
|
if dry_run:
|
||||||
|
self._print_plan(actions)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Filter OS-incompatible actions
|
||||||
|
compatible = [a for a in actions if a.os_filter is None or a.os_filter == current_os]
|
||||||
|
skipped_count = len(actions) - len(compatible)
|
||||||
|
if skipped_count:
|
||||||
|
self.console.info(f"Skipped {skipped_count} OS-incompatible actions")
|
||||||
|
|
||||||
|
self.console.section_header(f"EXECUTING {len(compatible)} ACTIONS")
|
||||||
|
|
||||||
|
for i, action in enumerate(compatible, 1):
|
||||||
|
self.console.step_start(i, len(compatible), action.description)
|
||||||
|
|
||||||
|
handler = self._handlers.get(action.type)
|
||||||
|
if not handler:
|
||||||
|
action.status = "skipped"
|
||||||
|
self.console.step_skip(f"No handler for action type: {action.type}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
handler(action.data)
|
||||||
|
action.status = "completed"
|
||||||
|
self.console.step_complete()
|
||||||
|
except Exception as e:
|
||||||
|
action.error = str(e)
|
||||||
|
if action.skip_on_error:
|
||||||
|
action.status = "skipped"
|
||||||
|
self.console.step_skip(str(e))
|
||||||
|
else:
|
||||||
|
action.status = "failed"
|
||||||
|
self.console.step_fail(str(e))
|
||||||
|
print(f"\n{self.console.RED}Critical action failed, stopping execution{self.console.RESET}")
|
||||||
|
break
|
||||||
|
|
||||||
|
self._print_summary(compatible)
|
||||||
|
|
||||||
|
def _print_plan(self, actions: List[Action]) -> None:
|
||||||
|
self.console.plan_header("EXECUTION PLAN", len(actions))
|
||||||
|
|
||||||
|
grouped: Dict[str, List[Action]] = {}
|
||||||
|
for action in actions:
|
||||||
|
category = action.type.split("-")[0]
|
||||||
|
grouped.setdefault(category, []).append(action)
|
||||||
|
|
||||||
|
for category, category_actions in grouped.items():
|
||||||
|
self.console.plan_category(category)
|
||||||
|
for i, action in enumerate(category_actions, 1):
|
||||||
|
self.console.plan_item(
|
||||||
|
i,
|
||||||
|
action.description,
|
||||||
|
action.os_filter,
|
||||||
|
not action.skip_on_error,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.console.plan_legend()
|
||||||
|
|
||||||
|
def _print_summary(self, actions: List[Action]) -> None:
|
||||||
|
completed = sum(1 for a in actions if a.status == "completed")
|
||||||
|
failed = sum(1 for a in actions if a.status == "failed")
|
||||||
|
skipped = sum(1 for a in actions if a.status == "skipped")
|
||||||
|
|
||||||
|
self.console.section_summary("EXECUTION SUMMARY")
|
||||||
|
c = self.console
|
||||||
|
|
||||||
|
print(f"Total actions: {c.BOLD}{len(actions)}{c.RESET}")
|
||||||
|
print(f"Completed: {c.GREEN}{completed}{c.RESET}")
|
||||||
|
if failed:
|
||||||
|
print(f"Failed: {c.RED}{failed}{c.RESET}")
|
||||||
|
if skipped:
|
||||||
|
print(f"Skipped: {c.YELLOW}{skipped}{c.RESET}")
|
||||||
|
|
||||||
|
if self.post_comments:
|
||||||
|
print(f"\n{c.BOLD}POST-INSTALL NOTES{c.RESET}")
|
||||||
|
print(f"{c.CYAN}{'-' * 25}{c.RESET}")
|
||||||
|
for i, comment in enumerate(self.post_comments, 1):
|
||||||
|
print(f"{i}. {comment}")
|
||||||
|
|
||||||
|
if failed:
|
||||||
|
print(f"\n{c.BOLD}FAILED ACTIONS{c.RESET}")
|
||||||
|
print(f"{c.RED}{'-' * 20}{c.RESET}")
|
||||||
|
for action in actions:
|
||||||
|
if action.status == "failed":
|
||||||
|
print(f"{c.RED}>{c.RESET} {action.description}")
|
||||||
|
print(f" {c.GRAY}Error: {action.error}{c.RESET}")
|
||||||
|
print(f"\n{c.RED}{failed} action(s) failed. Check the errors above.{c.RESET}")
|
||||||
|
else:
|
||||||
|
print(f"\n{c.GREEN}All actions completed successfully!{c.RESET}")
|
||||||
151
core/config.py
Normal file
151
core/config.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
"""Configuration loading (INI config + YAML manifest) and FlowContext."""
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
from flow.core import paths
|
||||||
|
from flow.core.platform import PlatformInfo
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TargetConfig:
|
||||||
|
namespace: str
|
||||||
|
platform: str
|
||||||
|
ssh_host: str
|
||||||
|
ssh_identity: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AppConfig:
|
||||||
|
dotfiles_url: str = ""
|
||||||
|
dotfiles_branch: str = "main"
|
||||||
|
projects_dir: str = "~/projects"
|
||||||
|
container_registry: str = "registry.tomastm.com"
|
||||||
|
container_tag: str = "latest"
|
||||||
|
tmux_session: str = "default"
|
||||||
|
targets: List[TargetConfig] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_target_config(key: str, value: str) -> Optional[TargetConfig]:
|
||||||
|
"""Parse a target line from config.
|
||||||
|
|
||||||
|
Supported formats:
|
||||||
|
1) namespace = platform ssh_host [ssh_identity]
|
||||||
|
2) namespace@platform = ssh_host [ssh_identity]
|
||||||
|
"""
|
||||||
|
parts = value.split()
|
||||||
|
if not parts:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if "@" in key:
|
||||||
|
namespace, platform = key.split("@", 1)
|
||||||
|
ssh_host = parts[0]
|
||||||
|
ssh_identity = parts[1] if len(parts) > 1 else None
|
||||||
|
if not namespace or not platform:
|
||||||
|
return None
|
||||||
|
return TargetConfig(
|
||||||
|
namespace=namespace,
|
||||||
|
platform=platform,
|
||||||
|
ssh_host=ssh_host,
|
||||||
|
ssh_identity=ssh_identity,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(parts) < 2:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return TargetConfig(
|
||||||
|
namespace=key,
|
||||||
|
platform=parts[0],
|
||||||
|
ssh_host=parts[1],
|
||||||
|
ssh_identity=parts[2] if len(parts) > 2 else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_config(path: Optional[Path] = None) -> AppConfig:
|
||||||
|
"""Load INI config file into AppConfig with cascading priority.
|
||||||
|
|
||||||
|
Priority:
|
||||||
|
1. Dotfiles repo (self-hosted): ~/.local/share/devflow/dotfiles/flow/.config/flow/config
|
||||||
|
2. Local override: ~/.config/devflow/config
|
||||||
|
3. Empty fallback
|
||||||
|
"""
|
||||||
|
cfg = AppConfig()
|
||||||
|
|
||||||
|
if path is None:
|
||||||
|
# Priority 1: Check dotfiles repo for self-hosted config
|
||||||
|
if paths.DOTFILES_CONFIG.exists():
|
||||||
|
path = paths.DOTFILES_CONFIG
|
||||||
|
# Priority 2: Fall back to local config
|
||||||
|
else:
|
||||||
|
path = paths.CONFIG_FILE
|
||||||
|
|
||||||
|
assert path is not None
|
||||||
|
|
||||||
|
if not path.exists():
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
parser = configparser.ConfigParser()
|
||||||
|
parser.read(path)
|
||||||
|
|
||||||
|
if parser.has_section("repository"):
|
||||||
|
cfg.dotfiles_url = parser.get("repository", "dotfiles_url", fallback=cfg.dotfiles_url)
|
||||||
|
cfg.dotfiles_branch = parser.get("repository", "dotfiles_branch", fallback=cfg.dotfiles_branch)
|
||||||
|
|
||||||
|
if parser.has_section("paths"):
|
||||||
|
cfg.projects_dir = parser.get("paths", "projects_dir", fallback=cfg.projects_dir)
|
||||||
|
|
||||||
|
if parser.has_section("defaults"):
|
||||||
|
cfg.container_registry = parser.get("defaults", "container_registry", fallback=cfg.container_registry)
|
||||||
|
cfg.container_tag = parser.get("defaults", "container_tag", fallback=cfg.container_tag)
|
||||||
|
cfg.tmux_session = parser.get("defaults", "tmux_session", fallback=cfg.tmux_session)
|
||||||
|
|
||||||
|
if parser.has_section("targets"):
|
||||||
|
for key in parser.options("targets"):
|
||||||
|
raw_value = parser.get("targets", key)
|
||||||
|
tc = _parse_target_config(key, raw_value)
|
||||||
|
if tc is not None:
|
||||||
|
cfg.targets.append(tc)
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
def load_manifest(path: Optional[Path] = None) -> Dict[str, Any]:
|
||||||
|
"""Load YAML manifest file with cascading priority.
|
||||||
|
|
||||||
|
Priority:
|
||||||
|
1. Dotfiles repo (self-hosted): ~/.local/share/devflow/dotfiles/flow/.config/flow/manifest.yaml
|
||||||
|
2. Local override: ~/.config/devflow/manifest.yaml
|
||||||
|
3. Empty fallback
|
||||||
|
"""
|
||||||
|
if path is None:
|
||||||
|
# Priority 1: Check dotfiles repo for self-hosted manifest
|
||||||
|
if paths.DOTFILES_MANIFEST.exists():
|
||||||
|
path = paths.DOTFILES_MANIFEST
|
||||||
|
# Priority 2: Fall back to local manifest
|
||||||
|
else:
|
||||||
|
path = paths.MANIFEST_FILE
|
||||||
|
|
||||||
|
assert path is not None
|
||||||
|
|
||||||
|
if not path.exists():
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "r") as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
raise RuntimeError(f"Invalid YAML in {path}: {e}") from e
|
||||||
|
return data if isinstance(data, dict) else {}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FlowContext:
|
||||||
|
config: AppConfig
|
||||||
|
manifest: Dict[str, Any]
|
||||||
|
platform: PlatformInfo
|
||||||
|
console: ConsoleLogger
|
||||||
138
core/console.py
Normal file
138
core/console.py
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
"""Console output formatting — ported from dotfiles_v2/src/console_logger.py."""
|
||||||
|
|
||||||
|
import time
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class ConsoleLogger:
|
||||||
|
# Color constants
|
||||||
|
BLUE = "\033[34m"
|
||||||
|
GREEN = "\033[32m"
|
||||||
|
YELLOW = "\033[33m"
|
||||||
|
RED = "\033[31m"
|
||||||
|
CYAN = "\033[36m"
|
||||||
|
GRAY = "\033[90m"
|
||||||
|
DARK_GRAY = "\033[2;37m"
|
||||||
|
BOLD = "\033[1m"
|
||||||
|
DIM = "\033[2m"
|
||||||
|
RESET = "\033[0m"
|
||||||
|
|
||||||
|
# Box drawing characters
|
||||||
|
BOX_VERTICAL = "\u2502"
|
||||||
|
BOX_HORIZONTAL = "\u2500"
|
||||||
|
BOX_TOP_LEFT = "\u250c"
|
||||||
|
BOX_TOP_RIGHT = "\u2510"
|
||||||
|
BOX_BOTTOM_LEFT = "\u2514"
|
||||||
|
BOX_BOTTOM_RIGHT = "\u2518"
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.step_counter = 0
|
||||||
|
self.start_time = None
|
||||||
|
|
||||||
|
def info(self, message: str):
|
||||||
|
print(f"{self.CYAN}[INFO]{self.RESET} {message}")
|
||||||
|
|
||||||
|
def warn(self, message: str):
|
||||||
|
print(f"{self.YELLOW}[WARN]{self.RESET} {message}")
|
||||||
|
|
||||||
|
def error(self, message: str):
|
||||||
|
print(f"{self.RED}[ERROR]{self.RESET} {message}")
|
||||||
|
|
||||||
|
def success(self, message: str):
|
||||||
|
print(f"{self.GREEN}[SUCCESS]{self.RESET} {message}")
|
||||||
|
|
||||||
|
def step_start(self, current: int, total: int, description: str):
|
||||||
|
print(
|
||||||
|
f"\n{self.BOLD}{self.BLUE}Step {current}/{total}:{self.RESET} "
|
||||||
|
f"{self.BOLD}{description}{self.RESET}"
|
||||||
|
)
|
||||||
|
print(f"{self.BLUE}{self.BOX_HORIZONTAL * 4}{self.RESET} {self.GRAY}Starting...{self.RESET}")
|
||||||
|
self.start_time = time.time()
|
||||||
|
|
||||||
|
def step_command(self, command: str):
|
||||||
|
print(f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}{self.GRAY}$ {command}{self.RESET}")
|
||||||
|
|
||||||
|
def step_output(self, line: str):
|
||||||
|
if line.strip():
|
||||||
|
print(f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}{self.DARK_GRAY} {line.rstrip()}{self.RESET}")
|
||||||
|
|
||||||
|
def step_complete(self, message: str = "Completed successfully"):
|
||||||
|
elapsed = time.time() - self.start_time if self.start_time else 0
|
||||||
|
print(f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}{self.GREEN}> {message} ({elapsed:.1f}s){self.RESET}")
|
||||||
|
|
||||||
|
def step_skip(self, message: str):
|
||||||
|
elapsed = time.time() - self.start_time if self.start_time else 0
|
||||||
|
print(
|
||||||
|
f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}"
|
||||||
|
f"{self.YELLOW}> Skipped: {message} ({elapsed:.1f}s){self.RESET}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def step_fail(self, message: str):
|
||||||
|
elapsed = time.time() - self.start_time if self.start_time else 0
|
||||||
|
print(
|
||||||
|
f"{self.BLUE}{self.BOX_VERTICAL} {self.RESET}"
|
||||||
|
f"{self.RED}> Failed: {message} ({elapsed:.1f}s){self.RESET}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def section_header(self, title: str, subtitle: str = ""):
|
||||||
|
width = 70
|
||||||
|
print(f"\n{self.BOLD}{self.BLUE}{'=' * width}{self.RESET}")
|
||||||
|
if subtitle:
|
||||||
|
print(f"{self.BOLD}{self.BLUE} {title.upper()} - {subtitle}{self.RESET}")
|
||||||
|
else:
|
||||||
|
print(f"{self.BOLD}{self.BLUE} {title.upper()}{self.RESET}")
|
||||||
|
print(f"{self.BOLD}{self.BLUE}{'=' * width}{self.RESET}")
|
||||||
|
|
||||||
|
def section_summary(self, title: str):
|
||||||
|
width = 70
|
||||||
|
print(f"\n{self.BOLD}{self.GREEN}{'=' * width}{self.RESET}")
|
||||||
|
print(f"{self.BOLD}{self.GREEN} {title.upper()}{self.RESET}")
|
||||||
|
print(f"{self.BOLD}{self.GREEN}{'=' * width}{self.RESET}")
|
||||||
|
|
||||||
|
def plan_header(self, title: str, count: int):
|
||||||
|
width = 70
|
||||||
|
print(f"\n{self.BOLD}{self.CYAN}{'=' * width}{self.RESET}")
|
||||||
|
print(f"{self.BOLD}{self.CYAN} {title.upper()} ({count} actions){self.RESET}")
|
||||||
|
print(f"{self.BOLD}{self.CYAN}{'=' * width}{self.RESET}")
|
||||||
|
|
||||||
|
def plan_category(self, category: str):
|
||||||
|
print(f"\n{self.BOLD}{self.CYAN}{category.upper()}{self.RESET}")
|
||||||
|
print(f"{self.CYAN}{'-' * 20}{self.RESET}")
|
||||||
|
|
||||||
|
def plan_item(self, number: int, description: str, os_filter: Optional[str] = None, critical: bool = False):
|
||||||
|
os_indicator = f" {self.GRAY}({os_filter}){self.RESET}" if os_filter else ""
|
||||||
|
error_indicator = f" {self.RED}(critical){self.RESET}" if critical else ""
|
||||||
|
print(f" {number:2d}. {description}{os_indicator}{error_indicator}")
|
||||||
|
|
||||||
|
def plan_legend(self):
|
||||||
|
print(
|
||||||
|
f"\n{self.GRAY}Legend: {self.RED}(critical){self.GRAY} = stops on failure, "
|
||||||
|
f"{self.GRAY}(os){self.GRAY} = OS-specific{self.RESET}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def table(self, headers: list[str], rows: list[list[str]]):
|
||||||
|
"""Print a formatted table."""
|
||||||
|
if not rows:
|
||||||
|
return
|
||||||
|
|
||||||
|
normalized_headers = [str(h) for h in headers]
|
||||||
|
normalized_rows = [[str(cell) for cell in row] for row in rows]
|
||||||
|
|
||||||
|
# Calculate column widths
|
||||||
|
widths = [len(h) for h in normalized_headers]
|
||||||
|
for row in normalized_rows:
|
||||||
|
for i, cell in enumerate(row):
|
||||||
|
if i < len(widths):
|
||||||
|
widths[i] = max(widths[i], len(cell))
|
||||||
|
|
||||||
|
# Header
|
||||||
|
header_line = " ".join(
|
||||||
|
f"{self.BOLD}{h:<{widths[i]}}{self.RESET}" for i, h in enumerate(normalized_headers)
|
||||||
|
)
|
||||||
|
print(header_line)
|
||||||
|
print(self.GRAY + " ".join("-" * w for w in widths) + self.RESET)
|
||||||
|
|
||||||
|
# Rows
|
||||||
|
for row in normalized_rows:
|
||||||
|
line = " ".join(f"{cell:<{widths[i]}}" for i, cell in enumerate(row))
|
||||||
|
print(line)
|
||||||
37
core/paths.py
Normal file
37
core/paths.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
"""XDG-compliant path constants for DevFlow."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def _xdg(env_var: str, fallback: str) -> Path:
|
||||||
|
return Path(os.environ.get(env_var, fallback))
|
||||||
|
|
||||||
|
|
||||||
|
HOME = Path.home()
|
||||||
|
|
||||||
|
CONFIG_DIR = _xdg("XDG_CONFIG_HOME", str(HOME / ".config")) / "devflow"
|
||||||
|
DATA_DIR = _xdg("XDG_DATA_HOME", str(HOME / ".local" / "share")) / "devflow"
|
||||||
|
STATE_DIR = _xdg("XDG_STATE_HOME", str(HOME / ".local" / "state")) / "devflow"
|
||||||
|
|
||||||
|
MANIFEST_FILE = CONFIG_DIR / "manifest.yaml"
|
||||||
|
CONFIG_FILE = CONFIG_DIR / "config"
|
||||||
|
|
||||||
|
DOTFILES_DIR = DATA_DIR / "dotfiles"
|
||||||
|
PACKAGES_DIR = DATA_DIR / "packages"
|
||||||
|
SCRATCH_DIR = DATA_DIR / "scratch"
|
||||||
|
PROJECTS_DIR = HOME / "projects"
|
||||||
|
|
||||||
|
LINKED_STATE = STATE_DIR / "linked.json"
|
||||||
|
INSTALLED_STATE = STATE_DIR / "installed.json"
|
||||||
|
|
||||||
|
# Self-hosted flow config paths (from dotfiles repo)
|
||||||
|
DOTFILES_FLOW_CONFIG = DOTFILES_DIR / "flow" / ".config" / "flow"
|
||||||
|
DOTFILES_MANIFEST = DOTFILES_FLOW_CONFIG / "manifest.yaml"
|
||||||
|
DOTFILES_CONFIG = DOTFILES_FLOW_CONFIG / "config"
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_dirs() -> None:
|
||||||
|
"""Create all required directories if they don't exist."""
|
||||||
|
for d in (CONFIG_DIR, DATA_DIR, STATE_DIR, PACKAGES_DIR, SCRATCH_DIR):
|
||||||
|
d.mkdir(parents=True, exist_ok=True)
|
||||||
43
core/platform.py
Normal file
43
core/platform.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
"""OS and architecture detection."""
|
||||||
|
|
||||||
|
import platform as _platform
|
||||||
|
import shutil
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PlatformInfo:
|
||||||
|
os: str = "linux" # "linux" or "macos"
|
||||||
|
arch: str = "amd64" # "amd64" or "arm64"
|
||||||
|
platform: str = "" # "linux-amd64", etc.
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
if not self.platform:
|
||||||
|
self.platform = f"{self.os}-{self.arch}"
|
||||||
|
|
||||||
|
|
||||||
|
_OS_MAP = {"Darwin": "macos", "Linux": "linux"}
|
||||||
|
_ARCH_MAP = {"x86_64": "amd64", "aarch64": "arm64", "arm64": "arm64"}
|
||||||
|
|
||||||
|
|
||||||
|
def detect_platform() -> PlatformInfo:
|
||||||
|
raw_os = _platform.system()
|
||||||
|
os_name = _OS_MAP.get(raw_os)
|
||||||
|
if os_name is None:
|
||||||
|
raise RuntimeError(f"Unsupported operating system: {raw_os}")
|
||||||
|
|
||||||
|
raw_arch = _platform.machine().lower()
|
||||||
|
arch = _ARCH_MAP.get(raw_arch)
|
||||||
|
if arch is None:
|
||||||
|
raise RuntimeError(f"Unsupported architecture: {raw_arch}")
|
||||||
|
|
||||||
|
return PlatformInfo(os=os_name, arch=arch, platform=f"{os_name}-{arch}")
|
||||||
|
|
||||||
|
|
||||||
|
def detect_container_runtime() -> Optional[str]:
|
||||||
|
"""Return 'docker' or 'podman' if available, else None."""
|
||||||
|
for runtime in ("docker", "podman"):
|
||||||
|
if shutil.which(runtime):
|
||||||
|
return runtime
|
||||||
|
return None
|
||||||
45
core/process.py
Normal file
45
core/process.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
"""Command execution with streaming output."""
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(
|
||||||
|
command: str,
|
||||||
|
console: ConsoleLogger,
|
||||||
|
*,
|
||||||
|
check: bool = True,
|
||||||
|
shell: bool = True,
|
||||||
|
capture: bool = False,
|
||||||
|
) -> subprocess.CompletedProcess:
|
||||||
|
"""Run a command with real-time streamed output."""
|
||||||
|
console.step_command(command)
|
||||||
|
|
||||||
|
process = subprocess.Popen(
|
||||||
|
command,
|
||||||
|
shell=shell,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
universal_newlines=True,
|
||||||
|
bufsize=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
output_lines = []
|
||||||
|
for line in process.stdout:
|
||||||
|
line = line.rstrip()
|
||||||
|
if line:
|
||||||
|
if not capture:
|
||||||
|
console.step_output(line)
|
||||||
|
output_lines.append(line)
|
||||||
|
|
||||||
|
process.wait()
|
||||||
|
|
||||||
|
if check and process.returncode != 0:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Command failed (exit {process.returncode}): {command}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return subprocess.CompletedProcess(
|
||||||
|
command, process.returncode, stdout="\n".join(output_lines), stderr=""
|
||||||
|
)
|
||||||
358
core/stow.py
Normal file
358
core/stow.py
Normal file
@@ -0,0 +1,358 @@
|
|||||||
|
"""GNU Stow-style tree folding/unfolding for efficient symlink management."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Optional, Set
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LinkOperation:
|
||||||
|
"""Represents a single operation to perform during linking."""
|
||||||
|
|
||||||
|
type: str # "create_symlink" | "create_dir" | "unfold" | "remove" | "remove_dir"
|
||||||
|
source: Path
|
||||||
|
target: Path
|
||||||
|
package: str
|
||||||
|
is_directory_link: bool = False
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
if self.type == "create_symlink":
|
||||||
|
link_type = "DIR" if self.is_directory_link else "FILE"
|
||||||
|
return f" {link_type} LINK: {self.target} -> {self.source}"
|
||||||
|
elif self.type == "create_dir":
|
||||||
|
return f" CREATE DIR: {self.target}"
|
||||||
|
elif self.type == "unfold":
|
||||||
|
return f" UNFOLD: {self.target} (directory symlink -> individual file symlinks)"
|
||||||
|
elif self.type == "remove":
|
||||||
|
return f" REMOVE: {self.target}"
|
||||||
|
elif self.type == "remove_dir":
|
||||||
|
return f" REMOVE DIR: {self.target}"
|
||||||
|
return f" {self.type}: {self.target}"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LinkTree:
|
||||||
|
"""Represents the current state of symlinks."""
|
||||||
|
|
||||||
|
links: Dict[Path, Path] = field(default_factory=dict) # target -> source
|
||||||
|
packages: Dict[Path, str] = field(default_factory=dict) # target -> package_name
|
||||||
|
directory_links: Set[Path] = field(default_factory=set) # targets that are directory links
|
||||||
|
|
||||||
|
def add_link(self, target: Path, source: Path, package: str, is_dir_link: bool = False):
|
||||||
|
"""Add a link to the tree."""
|
||||||
|
self.links[target] = source
|
||||||
|
self.packages[target] = package
|
||||||
|
if is_dir_link:
|
||||||
|
self.directory_links.add(target)
|
||||||
|
|
||||||
|
def remove_link(self, target: Path):
|
||||||
|
"""Remove a link from the tree."""
|
||||||
|
self.links.pop(target, None)
|
||||||
|
self.packages.pop(target, None)
|
||||||
|
self.directory_links.discard(target)
|
||||||
|
|
||||||
|
def is_directory_link(self, target: Path) -> bool:
|
||||||
|
"""Check if a target is a directory symlink."""
|
||||||
|
return target in self.directory_links
|
||||||
|
|
||||||
|
def get_package(self, target: Path) -> Optional[str]:
|
||||||
|
"""Get the package that owns a link."""
|
||||||
|
return self.packages.get(target)
|
||||||
|
|
||||||
|
def can_fold(self, target_dir: Path, package: str) -> bool:
|
||||||
|
"""Check if all links in target_dir belong to the same package.
|
||||||
|
|
||||||
|
Returns True if we can create a single directory symlink instead of
|
||||||
|
individual file symlinks.
|
||||||
|
"""
|
||||||
|
# Check all direct children of target_dir
|
||||||
|
for link_target, link_package in self.packages.items():
|
||||||
|
# If link_target is a child of target_dir
|
||||||
|
try:
|
||||||
|
link_target.relative_to(target_dir)
|
||||||
|
# If parent is target_dir and package differs, cannot fold
|
||||||
|
if link_target.parent == target_dir and link_package != package:
|
||||||
|
return False
|
||||||
|
except ValueError:
|
||||||
|
# link_target is not under target_dir, skip
|
||||||
|
continue
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_state(cls, state: dict) -> "LinkTree":
|
||||||
|
"""Build a LinkTree from the linked.json state format (v2 only)."""
|
||||||
|
tree = cls()
|
||||||
|
links_dict = state.get("links", {})
|
||||||
|
|
||||||
|
for package_name, pkg_links in links_dict.items():
|
||||||
|
for target_str, link_info in pkg_links.items():
|
||||||
|
target = Path(target_str)
|
||||||
|
if not isinstance(link_info, dict) or "source" not in link_info:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Unsupported linked state format. Remove linked.json and relink dotfiles."
|
||||||
|
)
|
||||||
|
|
||||||
|
source = Path(link_info["source"])
|
||||||
|
is_dir_link = bool(link_info.get("is_directory_link", False))
|
||||||
|
|
||||||
|
tree.add_link(target, source, package_name, is_dir_link)
|
||||||
|
|
||||||
|
return tree
|
||||||
|
|
||||||
|
def to_state(self) -> dict:
|
||||||
|
"""Convert LinkTree to linked.json state format."""
|
||||||
|
state = {"version": 2, "links": {}}
|
||||||
|
|
||||||
|
# Group links by package
|
||||||
|
package_links: Dict[str, Dict[str, dict]] = {}
|
||||||
|
for target, source in self.links.items():
|
||||||
|
package = self.packages[target]
|
||||||
|
if package not in package_links:
|
||||||
|
package_links[package] = {}
|
||||||
|
|
||||||
|
package_links[package][str(target)] = {
|
||||||
|
"source": str(source),
|
||||||
|
"is_directory_link": target in self.directory_links,
|
||||||
|
}
|
||||||
|
|
||||||
|
state["links"] = package_links
|
||||||
|
return state
|
||||||
|
|
||||||
|
|
||||||
|
class TreeFolder:
|
||||||
|
"""Implements GNU Stow tree folding/unfolding algorithm."""
|
||||||
|
|
||||||
|
def __init__(self, tree: LinkTree):
|
||||||
|
self.tree = tree
|
||||||
|
|
||||||
|
def plan_link(
|
||||||
|
self, source: Path, target: Path, package: str, is_dir_link: bool = False
|
||||||
|
) -> List[LinkOperation]:
|
||||||
|
"""Plan operations needed to create a link (may include unfolding).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source: Source path (file or directory in dotfiles)
|
||||||
|
target: Target path (where symlink should be created)
|
||||||
|
package: Package name
|
||||||
|
is_dir_link: Whether this is a directory symlink (folded)
|
||||||
|
|
||||||
|
Returns a list of operations to execute in order.
|
||||||
|
"""
|
||||||
|
operations = []
|
||||||
|
|
||||||
|
# Check if parent is a directory symlink that needs unfolding
|
||||||
|
parent = target.parent
|
||||||
|
if parent in self.tree.links and self.tree.is_directory_link(parent):
|
||||||
|
# Parent is a folded directory symlink, need to unfold
|
||||||
|
unfold_ops = self._plan_unfold(parent)
|
||||||
|
operations.extend(unfold_ops)
|
||||||
|
|
||||||
|
# Create symlink operation (conflict detection will handle existing links)
|
||||||
|
operations.append(
|
||||||
|
LinkOperation(
|
||||||
|
type="create_symlink",
|
||||||
|
source=source,
|
||||||
|
target=target,
|
||||||
|
package=package,
|
||||||
|
is_directory_link=is_dir_link,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return operations
|
||||||
|
|
||||||
|
def _find_fold_point(
|
||||||
|
self, source: Path, target: Path, package: str
|
||||||
|
) -> Path:
|
||||||
|
"""Find the deepest directory level where we can create a folder symlink.
|
||||||
|
|
||||||
|
Returns the target path where the symlink should be created.
|
||||||
|
For single files, this should just return the file path (no folding).
|
||||||
|
Folding only makes sense when linking entire directories.
|
||||||
|
"""
|
||||||
|
# For now, disable automatic folding at the plan_link level
|
||||||
|
# Folding should be done at a higher level when we know we're
|
||||||
|
# linking an entire directory tree from a package
|
||||||
|
return target
|
||||||
|
|
||||||
|
def _plan_unfold(self, folded_dir: Path) -> List[LinkOperation]:
|
||||||
|
"""Plan operations to unfold a directory symlink.
|
||||||
|
|
||||||
|
When unfolding:
|
||||||
|
1. Remove the directory symlink
|
||||||
|
2. Create a real directory
|
||||||
|
3. Create individual file symlinks for all files
|
||||||
|
"""
|
||||||
|
operations = []
|
||||||
|
|
||||||
|
# Get the source of the folded directory
|
||||||
|
source_dir = self.tree.links.get(folded_dir)
|
||||||
|
if not source_dir:
|
||||||
|
return operations
|
||||||
|
|
||||||
|
package = self.tree.packages.get(folded_dir, "")
|
||||||
|
|
||||||
|
# Remove the directory symlink
|
||||||
|
operations.append(
|
||||||
|
LinkOperation(
|
||||||
|
type="remove",
|
||||||
|
source=source_dir,
|
||||||
|
target=folded_dir,
|
||||||
|
package=package,
|
||||||
|
is_directory_link=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create real directory
|
||||||
|
operations.append(
|
||||||
|
LinkOperation(
|
||||||
|
type="create_dir",
|
||||||
|
source=source_dir,
|
||||||
|
target=folded_dir,
|
||||||
|
package=package,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create individual file symlinks for all files in source
|
||||||
|
if source_dir.exists() and source_dir.is_dir():
|
||||||
|
for root, _dirs, files in os.walk(source_dir):
|
||||||
|
for fname in files:
|
||||||
|
src_file = Path(root) / fname
|
||||||
|
rel = src_file.relative_to(source_dir)
|
||||||
|
dst_file = folded_dir / rel
|
||||||
|
|
||||||
|
operations.append(
|
||||||
|
LinkOperation(
|
||||||
|
type="create_symlink",
|
||||||
|
source=src_file,
|
||||||
|
target=dst_file,
|
||||||
|
package=package,
|
||||||
|
is_directory_link=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return operations
|
||||||
|
|
||||||
|
def plan_unlink(self, target: Path, package: str) -> List[LinkOperation]:
|
||||||
|
"""Plan operations to remove a link (may include refolding)."""
|
||||||
|
operations = []
|
||||||
|
|
||||||
|
# Check if this is a directory link
|
||||||
|
if self.tree.is_directory_link(target):
|
||||||
|
# Remove all file links under this directory
|
||||||
|
to_remove = []
|
||||||
|
for link_target in self.tree.links.keys():
|
||||||
|
try:
|
||||||
|
link_target.relative_to(target)
|
||||||
|
to_remove.append(link_target)
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for link_target in to_remove:
|
||||||
|
operations.append(
|
||||||
|
LinkOperation(
|
||||||
|
type="remove",
|
||||||
|
source=self.tree.links[link_target],
|
||||||
|
target=link_target,
|
||||||
|
package=self.tree.packages[link_target],
|
||||||
|
is_directory_link=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove the link itself
|
||||||
|
if target in self.tree.links:
|
||||||
|
operations.append(
|
||||||
|
LinkOperation(
|
||||||
|
type="remove",
|
||||||
|
source=self.tree.links[target],
|
||||||
|
target=target,
|
||||||
|
package=package,
|
||||||
|
is_directory_link=self.tree.is_directory_link(target),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return operations
|
||||||
|
|
||||||
|
def detect_conflicts(self, operations: List[LinkOperation]) -> List[str]:
|
||||||
|
"""Detect conflicts before executing operations.
|
||||||
|
|
||||||
|
Returns a list of conflict error messages.
|
||||||
|
"""
|
||||||
|
conflicts = []
|
||||||
|
|
||||||
|
for op in operations:
|
||||||
|
if op.type == "create_symlink":
|
||||||
|
# Check if target already exists in tree (managed by flow)
|
||||||
|
if op.target in self.tree.links:
|
||||||
|
existing_pkg = self.tree.packages[op.target]
|
||||||
|
if existing_pkg != op.package:
|
||||||
|
conflicts.append(
|
||||||
|
f"Conflict: {op.target} is already linked by package '{existing_pkg}'"
|
||||||
|
)
|
||||||
|
# Check if target exists on disk but not managed by flow
|
||||||
|
elif op.target.exists() or op.target.is_symlink():
|
||||||
|
conflicts.append(
|
||||||
|
f"Conflict: {op.target} already exists and is not managed by flow"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if target's parent is a file (can't create file in file)
|
||||||
|
if op.target.parent.exists() and op.target.parent.is_file():
|
||||||
|
conflicts.append(
|
||||||
|
f"Conflict: {op.target.parent} is a file, cannot create {op.target}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return conflicts
|
||||||
|
|
||||||
|
def execute_operations(
|
||||||
|
self, operations: List[LinkOperation], dry_run: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Execute a list of operations atomically.
|
||||||
|
|
||||||
|
If dry_run is True, only print what would be done.
|
||||||
|
"""
|
||||||
|
if dry_run:
|
||||||
|
for op in operations:
|
||||||
|
print(str(op))
|
||||||
|
return
|
||||||
|
|
||||||
|
# Execute operations
|
||||||
|
for op in operations:
|
||||||
|
if op.type == "create_symlink":
|
||||||
|
# Create parent directories
|
||||||
|
op.target.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
if op.target.is_symlink():
|
||||||
|
current = op.target.resolve(strict=False)
|
||||||
|
desired = op.source.resolve(strict=False)
|
||||||
|
if current == desired:
|
||||||
|
self.tree.add_link(op.target, op.source, op.package, op.is_directory_link)
|
||||||
|
continue
|
||||||
|
op.target.unlink()
|
||||||
|
elif op.target.exists():
|
||||||
|
if op.target.is_file():
|
||||||
|
op.target.unlink()
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Cannot overwrite directory: {op.target}")
|
||||||
|
|
||||||
|
# Create symlink
|
||||||
|
op.target.symlink_to(op.source)
|
||||||
|
|
||||||
|
# Update tree
|
||||||
|
self.tree.add_link(op.target, op.source, op.package, op.is_directory_link)
|
||||||
|
|
||||||
|
elif op.type == "create_dir":
|
||||||
|
op.target.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
elif op.type == "remove":
|
||||||
|
if op.target.exists() or op.target.is_symlink():
|
||||||
|
op.target.unlink()
|
||||||
|
self.tree.remove_link(op.target)
|
||||||
|
|
||||||
|
elif op.type == "remove_dir":
|
||||||
|
if op.target.exists() and op.target.is_dir():
|
||||||
|
op.target.rmdir()
|
||||||
|
|
||||||
|
def to_state(self) -> dict:
|
||||||
|
"""Convert current tree to state format for persistence."""
|
||||||
|
return self.tree.to_state()
|
||||||
38
core/variables.py
Normal file
38
core/variables.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""Variable substitution for $VAR/${VAR} and {{var}} templates."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
|
||||||
|
def substitute(text: str, variables: Dict[str, str]) -> str:
|
||||||
|
"""Replace $VAR and ${VAR} with values from variables dict or env."""
|
||||||
|
if not isinstance(text, str):
|
||||||
|
return text
|
||||||
|
|
||||||
|
pattern = re.compile(r"\$(\w+)|\$\{([^}]+)\}")
|
||||||
|
|
||||||
|
def _replace(match: re.Match[str]) -> str:
|
||||||
|
key = match.group(1) or match.group(2) or ""
|
||||||
|
if key in variables:
|
||||||
|
return str(variables[key])
|
||||||
|
if key == "HOME":
|
||||||
|
return str(Path.home())
|
||||||
|
if key in os.environ:
|
||||||
|
return os.environ[key]
|
||||||
|
return match.group(0)
|
||||||
|
|
||||||
|
return pattern.sub(_replace, text)
|
||||||
|
|
||||||
|
|
||||||
|
def substitute_template(text: str, context: Dict[str, str]) -> str:
|
||||||
|
"""Replace {{key}} placeholders with values from context dict."""
|
||||||
|
if not isinstance(text, str):
|
||||||
|
return text
|
||||||
|
|
||||||
|
def _replace(match: re.Match[str]) -> str:
|
||||||
|
key = match.group(1).strip()
|
||||||
|
return context.get(key, match.group(0))
|
||||||
|
|
||||||
|
return re.sub(r"\{\{(\w+)\}\}", _replace, text)
|
||||||
19
pyproject.toml
Normal file
19
pyproject.toml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "flow"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "DevFlow - A unified toolkit for managing development instances, containers, and profiles"
|
||||||
|
requires-python = ">=3.9"
|
||||||
|
dependencies = ["pyyaml>=6.0"]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
build = ["pyinstaller>=6.0"]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
flow = "flow.cli:main"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["src/flow"]
|
||||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
BIN
tests/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
tests/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_action.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_action.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_action.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_action.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_bootstrap.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_bootstrap.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_bootstrap.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_bootstrap.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_cli.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_cli.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_cli.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_cli.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_commands.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_commands.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_commands.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_commands.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_completion.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_completion.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_completion.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_completion.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_config.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_config.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_config.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_config.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_console.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_console.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_console.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_console.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_dotfiles.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_dotfiles.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_dotfiles.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_dotfiles.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
tests/__pycache__/test_dotfiles_folding.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_dotfiles_folding.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_paths.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_paths.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_paths.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_paths.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_platform.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_platform.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_platform.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_platform.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_self_hosting.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_self_hosting.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_self_hosting.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_self_hosting.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_stow.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_stow.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_stow.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_stow.cpython-313.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_variables.cpython-313-pytest-9.0.2.pyc
Normal file
BIN
tests/__pycache__/test_variables.cpython-313-pytest-9.0.2.pyc
Normal file
Binary file not shown.
BIN
tests/__pycache__/test_variables.cpython-313.pyc
Normal file
BIN
tests/__pycache__/test_variables.cpython-313.pyc
Normal file
Binary file not shown.
115
tests/test_action.py
Normal file
115
tests/test_action.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
"""Tests for flow.core.action."""
|
||||||
|
|
||||||
|
from flow.core.action import Action, ActionExecutor
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
|
||||||
|
|
||||||
|
def test_action_defaults():
|
||||||
|
a = Action(type="test", description="Test action")
|
||||||
|
assert a.status == "pending"
|
||||||
|
assert a.error is None
|
||||||
|
assert a.skip_on_error is True
|
||||||
|
assert a.os_filter is None
|
||||||
|
assert a.data == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_executor_register_and_execute(capsys):
|
||||||
|
console = ConsoleLogger()
|
||||||
|
executor = ActionExecutor(console)
|
||||||
|
results = []
|
||||||
|
|
||||||
|
def handler(data):
|
||||||
|
results.append(data["key"])
|
||||||
|
|
||||||
|
executor.register("test-action", handler)
|
||||||
|
|
||||||
|
actions = [
|
||||||
|
Action(type="test-action", description="Do thing", data={"key": "value1"}),
|
||||||
|
Action(type="test-action", description="Do another", data={"key": "value2"}),
|
||||||
|
]
|
||||||
|
|
||||||
|
executor.execute(actions, current_os="linux")
|
||||||
|
assert results == ["value1", "value2"]
|
||||||
|
assert actions[0].status == "completed"
|
||||||
|
assert actions[1].status == "completed"
|
||||||
|
|
||||||
|
|
||||||
|
def test_executor_dry_run(capsys):
|
||||||
|
console = ConsoleLogger()
|
||||||
|
executor = ActionExecutor(console)
|
||||||
|
executed = []
|
||||||
|
|
||||||
|
executor.register("test", lambda data: executed.append(1))
|
||||||
|
|
||||||
|
actions = [Action(type="test", description="Should not run")]
|
||||||
|
executor.execute(actions, dry_run=True)
|
||||||
|
assert executed == [] # Nothing executed
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "EXECUTION PLAN" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_executor_skip_on_error(capsys):
|
||||||
|
console = ConsoleLogger()
|
||||||
|
executor = ActionExecutor(console)
|
||||||
|
|
||||||
|
def failing_handler(data):
|
||||||
|
raise RuntimeError("boom")
|
||||||
|
|
||||||
|
executor.register("fail", failing_handler)
|
||||||
|
|
||||||
|
actions = [
|
||||||
|
Action(type="fail", description="Will fail", skip_on_error=True),
|
||||||
|
Action(type="fail", description="Should still run", skip_on_error=True),
|
||||||
|
]
|
||||||
|
|
||||||
|
executor.execute(actions, current_os="linux")
|
||||||
|
assert actions[0].status == "skipped"
|
||||||
|
assert actions[1].status == "skipped"
|
||||||
|
|
||||||
|
|
||||||
|
def test_executor_critical_failure_stops(capsys):
|
||||||
|
console = ConsoleLogger()
|
||||||
|
executor = ActionExecutor(console)
|
||||||
|
|
||||||
|
def failing_handler(data):
|
||||||
|
raise RuntimeError("critical failure")
|
||||||
|
|
||||||
|
executor.register("fail", failing_handler)
|
||||||
|
executor.register("ok", lambda data: None)
|
||||||
|
|
||||||
|
actions = [
|
||||||
|
Action(type="fail", description="Critical", skip_on_error=False),
|
||||||
|
Action(type="ok", description="Should not run"),
|
||||||
|
]
|
||||||
|
|
||||||
|
executor.execute(actions, current_os="linux")
|
||||||
|
assert actions[0].status == "failed"
|
||||||
|
assert actions[1].status == "pending" # Never reached
|
||||||
|
|
||||||
|
|
||||||
|
def test_executor_os_filter(capsys):
|
||||||
|
console = ConsoleLogger()
|
||||||
|
executor = ActionExecutor(console)
|
||||||
|
executed = []
|
||||||
|
|
||||||
|
executor.register("test", lambda data: executed.append(data.get("name")))
|
||||||
|
|
||||||
|
actions = [
|
||||||
|
Action(type="test", description="Linux only", data={"name": "linux"}, os_filter="linux"),
|
||||||
|
Action(type="test", description="macOS only", data={"name": "macos"}, os_filter="macos"),
|
||||||
|
Action(type="test", description="Any OS", data={"name": "any"}),
|
||||||
|
]
|
||||||
|
|
||||||
|
executor.execute(actions, current_os="linux")
|
||||||
|
assert "linux" in executed
|
||||||
|
assert "any" in executed
|
||||||
|
assert "macos" not in executed
|
||||||
|
|
||||||
|
|
||||||
|
def test_executor_no_handler(capsys):
|
||||||
|
console = ConsoleLogger()
|
||||||
|
executor = ActionExecutor(console)
|
||||||
|
|
||||||
|
actions = [Action(type="unknown", description="No handler registered")]
|
||||||
|
executor.execute(actions, current_os="linux")
|
||||||
|
assert actions[0].status == "skipped"
|
||||||
129
tests/test_bootstrap.py
Normal file
129
tests/test_bootstrap.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
"""Tests for flow.commands.bootstrap — action planning."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from flow.commands.bootstrap import _get_profiles, _plan_actions
|
||||||
|
from flow.core.config import AppConfig, FlowContext
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
from flow.core.platform import PlatformInfo
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ctx():
|
||||||
|
return FlowContext(
|
||||||
|
config=AppConfig(),
|
||||||
|
manifest={
|
||||||
|
"binaries": {
|
||||||
|
"neovim": {
|
||||||
|
"version": "0.10.4",
|
||||||
|
"source": "github:neovim/neovim",
|
||||||
|
"asset-pattern": "nvim-{{os}}-{{arch}}.tar.gz",
|
||||||
|
"platform-map": {"linux-arm64": {"os": "linux", "arch": "arm64"}},
|
||||||
|
"install-script": "echo install",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
platform=PlatformInfo(os="linux", arch="arm64", platform="linux-arm64"),
|
||||||
|
console=ConsoleLogger(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_empty_profile(ctx):
|
||||||
|
actions = _plan_actions(ctx, "test", {}, {})
|
||||||
|
assert actions == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_hostname(ctx):
|
||||||
|
actions = _plan_actions(ctx, "test", {"hostname": "myhost"}, {})
|
||||||
|
types = [a.type for a in actions]
|
||||||
|
assert "set-hostname" in types
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_locale_and_shell(ctx):
|
||||||
|
actions = _plan_actions(ctx, "test", {"locale": "en_US.UTF-8", "shell": "zsh"}, {})
|
||||||
|
types = [a.type for a in actions]
|
||||||
|
assert "set-locale" in types
|
||||||
|
assert "set-shell" in types
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_packages(ctx):
|
||||||
|
env_config = {
|
||||||
|
"packages": {
|
||||||
|
"standard": ["git", "zsh", "tmux"],
|
||||||
|
"binary": ["neovim"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
actions = _plan_actions(ctx, "test", env_config, {})
|
||||||
|
types = [a.type for a in actions]
|
||||||
|
assert "pm-update" in types
|
||||||
|
assert "install-packages" in types
|
||||||
|
assert "install-binary" in types
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_ssh_keygen(ctx):
|
||||||
|
env_config = {
|
||||||
|
"ssh_keygen": [
|
||||||
|
{"type": "ed25519", "comment": "test@host", "filename": "id_ed25519"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
actions = _plan_actions(ctx, "test", env_config, {})
|
||||||
|
types = [a.type for a in actions]
|
||||||
|
assert "generate-ssh-key" in types
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_runcmd(ctx):
|
||||||
|
env_config = {"runcmd": ["echo hello", "mkdir -p ~/tmp"]}
|
||||||
|
actions = _plan_actions(ctx, "test", env_config, {})
|
||||||
|
run_cmds = [a for a in actions if a.type == "run-command"]
|
||||||
|
assert len(run_cmds) == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_requires(ctx):
|
||||||
|
env_config = {"requires": ["VAR1", "VAR2"]}
|
||||||
|
actions = _plan_actions(ctx, "test", env_config, {})
|
||||||
|
checks = [a for a in actions if a.type == "check-variable"]
|
||||||
|
assert len(checks) == 2
|
||||||
|
assert all(not a.skip_on_error for a in checks)
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_full_profile(ctx):
|
||||||
|
"""Test planning with a realistic linux-vm profile."""
|
||||||
|
env_config = {
|
||||||
|
"requires": ["TARGET_HOSTNAME"],
|
||||||
|
"os": "linux",
|
||||||
|
"hostname": "$TARGET_HOSTNAME",
|
||||||
|
"shell": "zsh",
|
||||||
|
"locale": "en_US.UTF-8",
|
||||||
|
"packages": {
|
||||||
|
"standard": ["zsh", "tmux", "git"],
|
||||||
|
"binary": ["neovim"],
|
||||||
|
},
|
||||||
|
"ssh_keygen": [{"type": "ed25519", "comment": "test"}],
|
||||||
|
"configs": ["bin"],
|
||||||
|
"runcmd": ["mkdir -p ~/projects"],
|
||||||
|
}
|
||||||
|
actions = _plan_actions(ctx, "linux-vm", env_config, {"TARGET_HOSTNAME": "myvm"})
|
||||||
|
assert len(actions) >= 8
|
||||||
|
|
||||||
|
types = [a.type for a in actions]
|
||||||
|
assert "check-variable" in types
|
||||||
|
assert "set-hostname" in types
|
||||||
|
assert "set-locale" in types
|
||||||
|
assert "set-shell" in types
|
||||||
|
assert "pm-update" in types
|
||||||
|
assert "install-packages" in types
|
||||||
|
assert "install-binary" in types
|
||||||
|
assert "generate-ssh-key" in types
|
||||||
|
assert "link-config" in types
|
||||||
|
assert "run-command" in types
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_profiles_from_manifest(ctx):
|
||||||
|
ctx.manifest = {"profiles": {"linux": {"os": "linux"}}}
|
||||||
|
assert "linux" in _get_profiles(ctx)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_profiles_rejects_environments(ctx):
|
||||||
|
ctx.manifest = {"environments": {"legacy": {"os": "linux"}}}
|
||||||
|
with pytest.raises(RuntimeError, match="no longer supported"):
|
||||||
|
_get_profiles(ctx)
|
||||||
153
tests/test_cli.py
Normal file
153
tests/test_cli.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
"""Tests for CLI routing and command registration."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_env():
|
||||||
|
"""Return env dict without DF_* variables that trigger enter's guard."""
|
||||||
|
return {k: v for k, v in os.environ.items() if not k.startswith("DF_")}
|
||||||
|
|
||||||
|
|
||||||
|
def test_version():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "--version"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "0.1.0" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_help():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "enter" in result.stdout
|
||||||
|
assert "dev" in result.stdout
|
||||||
|
assert "dotfiles" in result.stdout
|
||||||
|
assert "bootstrap" in result.stdout
|
||||||
|
assert "package" in result.stdout
|
||||||
|
assert "sync" in result.stdout
|
||||||
|
assert "completion" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_enter_help():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "enter", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "target" in result.stdout
|
||||||
|
assert "--dry-run" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_dotfiles_help():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "dotfiles", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "init" in result.stdout
|
||||||
|
assert "link" in result.stdout
|
||||||
|
assert "unlink" in result.stdout
|
||||||
|
assert "status" in result.stdout
|
||||||
|
assert "sync" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_bootstrap_help():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "bootstrap", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "run" in result.stdout
|
||||||
|
assert "list" in result.stdout
|
||||||
|
assert "show" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_help():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "package", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "install" in result.stdout
|
||||||
|
assert "list" in result.stdout
|
||||||
|
assert "remove" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_sync_help():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "sync", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "check" in result.stdout
|
||||||
|
assert "fetch" in result.stdout
|
||||||
|
assert "summary" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_dev_help():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "dev", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "create" in result.stdout
|
||||||
|
assert "exec" in result.stdout
|
||||||
|
assert "connect" in result.stdout
|
||||||
|
assert "list" in result.stdout
|
||||||
|
assert "stop" in result.stdout
|
||||||
|
assert "remove" in result.stdout
|
||||||
|
assert "respawn" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_enter_dry_run():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "enter", "--dry-run", "personal@orb"],
|
||||||
|
capture_output=True, text=True, env=_clean_env(),
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "ssh" in result.stdout
|
||||||
|
assert "personal.orb" in result.stdout
|
||||||
|
assert "tmux" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_enter_dry_run_no_tmux():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "enter", "--dry-run", "--no-tmux", "personal@orb"],
|
||||||
|
capture_output=True, text=True, env=_clean_env(),
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "ssh" in result.stdout
|
||||||
|
assert "tmux" not in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_enter_dry_run_with_user():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "enter", "--dry-run", "root@personal@orb"],
|
||||||
|
capture_output=True, text=True, env=_clean_env(),
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
|
assert "root@personal.orb" in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def test_aliases():
|
||||||
|
"""Test that command aliases work."""
|
||||||
|
for alias, cmd in [("dot", "dotfiles"), ("pkg", "package"), ("setup", "bootstrap")]:
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", alias, "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0, f"Alias '{alias}' failed"
|
||||||
|
|
||||||
|
|
||||||
|
def test_dev_remove_alias():
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "flow", "dev", "rm", "--help"],
|
||||||
|
capture_output=True, text=True,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0
|
||||||
59
tests/test_commands.py
Normal file
59
tests/test_commands.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""Tests for command modules — registration and target parsing."""
|
||||||
|
|
||||||
|
from flow.commands.enter import _parse_target
|
||||||
|
from flow.commands.container import _cname, _parse_image_ref
|
||||||
|
|
||||||
|
|
||||||
|
class TestParseTarget:
|
||||||
|
def test_full_target(self):
|
||||||
|
user, ns, plat = _parse_target("root@personal@orb")
|
||||||
|
assert user == "root"
|
||||||
|
assert ns == "personal"
|
||||||
|
assert plat == "orb"
|
||||||
|
|
||||||
|
def test_no_user(self):
|
||||||
|
user, ns, plat = _parse_target("personal@orb")
|
||||||
|
assert user is None
|
||||||
|
assert ns == "personal"
|
||||||
|
assert plat == "orb"
|
||||||
|
|
||||||
|
def test_namespace_only(self):
|
||||||
|
user, ns, plat = _parse_target("personal")
|
||||||
|
assert user is None
|
||||||
|
assert ns == "personal"
|
||||||
|
assert plat is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestCname:
|
||||||
|
def test_adds_prefix(self):
|
||||||
|
assert _cname("api") == "dev-api"
|
||||||
|
|
||||||
|
def test_no_double_prefix(self):
|
||||||
|
assert _cname("dev-api") == "dev-api"
|
||||||
|
|
||||||
|
|
||||||
|
class TestParseImageRef:
|
||||||
|
def test_simple_image(self):
|
||||||
|
ref, repo, tag, label = _parse_image_ref("node")
|
||||||
|
assert ref == "registry.tomastm.com/node:latest"
|
||||||
|
assert tag == "latest"
|
||||||
|
|
||||||
|
def test_tm0_shorthand(self):
|
||||||
|
ref, repo, tag, label = _parse_image_ref("tm0/node")
|
||||||
|
assert "registry.tomastm.com" in ref
|
||||||
|
assert "node" in ref
|
||||||
|
|
||||||
|
def test_docker_shorthand(self):
|
||||||
|
ref, repo, tag, label = _parse_image_ref("docker/python")
|
||||||
|
assert "docker.io" in ref
|
||||||
|
assert "python" in ref
|
||||||
|
|
||||||
|
def test_with_tag(self):
|
||||||
|
ref, repo, tag, label = _parse_image_ref("node:20")
|
||||||
|
assert tag == "20"
|
||||||
|
assert ":20" in ref
|
||||||
|
|
||||||
|
def test_full_registry(self):
|
||||||
|
ref, repo, tag, label = _parse_image_ref("ghcr.io/user/image:v1")
|
||||||
|
assert ref == "ghcr.io/user/image:v1"
|
||||||
|
assert tag == "v1"
|
||||||
63
tests/test_completion.py
Normal file
63
tests/test_completion.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
"""Tests for flow.commands.completion dynamic suggestions."""
|
||||||
|
|
||||||
|
from flow.commands import completion
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_top_level_prefix():
|
||||||
|
out = completion.complete(["flow", "do"], 2)
|
||||||
|
assert "dotfiles" in out
|
||||||
|
assert "dot" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_bootstrap_profiles(monkeypatch):
|
||||||
|
monkeypatch.setattr(completion, "_list_bootstrap_profiles", lambda: ["linux-vm", "macos-host"])
|
||||||
|
out = completion.complete(["flow", "bootstrap", "show", "li"], 4)
|
||||||
|
assert out == ["linux-vm"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_package_install(monkeypatch):
|
||||||
|
monkeypatch.setattr(completion, "_list_manifest_packages", lambda: ["neovim", "fzf"])
|
||||||
|
out = completion.complete(["flow", "package", "install", "n"], 4)
|
||||||
|
assert out == ["neovim"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_package_remove(monkeypatch):
|
||||||
|
monkeypatch.setattr(completion, "_list_installed_packages", lambda: ["hello", "jq"])
|
||||||
|
out = completion.complete(["flow", "package", "remove", "h"], 4)
|
||||||
|
assert out == ["hello"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_dotfiles_profile_value(monkeypatch):
|
||||||
|
monkeypatch.setattr(completion, "_list_dotfiles_profiles", lambda: ["work", "personal"])
|
||||||
|
out = completion.complete(["flow", "dotfiles", "link", "--profile", "w"], 5)
|
||||||
|
assert out == ["work"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_enter_targets(monkeypatch):
|
||||||
|
monkeypatch.setattr(completion, "_list_targets", lambda: ["personal@orb", "work@ec2"])
|
||||||
|
out = completion.complete(["flow", "enter", "p"], 3)
|
||||||
|
assert out == ["personal@orb"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_dev_subcommands():
|
||||||
|
out = completion.complete(["flow", "dev", "c"], 3)
|
||||||
|
assert out == ["connect", "create"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_complete_completion_subcommands():
|
||||||
|
out = completion.complete(["flow", "completion", "i"], 3)
|
||||||
|
assert out == ["install-zsh"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_rc_snippet_is_idempotent(tmp_path):
|
||||||
|
rc_path = tmp_path / ".zshrc"
|
||||||
|
completion_dir = tmp_path / "completions"
|
||||||
|
|
||||||
|
first = completion._ensure_rc_snippet(rc_path, completion_dir)
|
||||||
|
second = completion._ensure_rc_snippet(rc_path, completion_dir)
|
||||||
|
|
||||||
|
assert first is True
|
||||||
|
assert second is False
|
||||||
|
text = rc_path.read_text()
|
||||||
|
assert text.count(completion.ZSH_RC_START) == 1
|
||||||
|
assert text.count(completion.ZSH_RC_END) == 1
|
||||||
70
tests/test_config.py
Normal file
70
tests/test_config.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
"""Tests for flow.core.config."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from flow.core.config import AppConfig, FlowContext, load_config, load_manifest
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_config_missing_file(tmp_path):
|
||||||
|
cfg = load_config(tmp_path / "nonexistent")
|
||||||
|
assert isinstance(cfg, AppConfig)
|
||||||
|
assert cfg.dotfiles_url == ""
|
||||||
|
assert cfg.container_registry == "registry.tomastm.com"
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_config_ini(tmp_path):
|
||||||
|
config_file = tmp_path / "config"
|
||||||
|
config_file.write_text("""
|
||||||
|
[repository]
|
||||||
|
dotfiles_url=git@github.com:user/dots.git
|
||||||
|
dotfiles_branch=dev
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
projects_dir=~/code
|
||||||
|
|
||||||
|
[defaults]
|
||||||
|
container_registry=my.registry.com
|
||||||
|
container_tag=v1
|
||||||
|
tmux_session=main
|
||||||
|
|
||||||
|
[targets]
|
||||||
|
personal=orb personal@orb
|
||||||
|
work=ec2 work.ec2.internal ~/.ssh/id_work
|
||||||
|
""")
|
||||||
|
cfg = load_config(config_file)
|
||||||
|
assert cfg.dotfiles_url == "git@github.com:user/dots.git"
|
||||||
|
assert cfg.dotfiles_branch == "dev"
|
||||||
|
assert cfg.projects_dir == "~/code"
|
||||||
|
assert cfg.container_registry == "my.registry.com"
|
||||||
|
assert cfg.container_tag == "v1"
|
||||||
|
assert cfg.tmux_session == "main"
|
||||||
|
assert len(cfg.targets) == 2
|
||||||
|
assert cfg.targets[0].namespace == "personal"
|
||||||
|
assert cfg.targets[0].platform == "orb"
|
||||||
|
assert cfg.targets[0].ssh_host == "personal@orb"
|
||||||
|
assert cfg.targets[1].ssh_identity == "~/.ssh/id_work"
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_manifest_missing_file(tmp_path):
|
||||||
|
result = load_manifest(tmp_path / "nonexistent.yaml")
|
||||||
|
assert result == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_manifest_valid(tmp_path):
|
||||||
|
manifest = tmp_path / "manifest.yaml"
|
||||||
|
manifest.write_text("""
|
||||||
|
profiles:
|
||||||
|
linux-vm:
|
||||||
|
os: linux
|
||||||
|
hostname: test
|
||||||
|
""")
|
||||||
|
result = load_manifest(manifest)
|
||||||
|
assert "profiles" in result
|
||||||
|
assert result["profiles"]["linux-vm"]["os"] == "linux"
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_manifest_non_dict(tmp_path):
|
||||||
|
manifest = tmp_path / "manifest.yaml"
|
||||||
|
manifest.write_text("- a\n- b\n")
|
||||||
|
result = load_manifest(manifest)
|
||||||
|
assert result == {}
|
||||||
95
tests/test_console.py
Normal file
95
tests/test_console.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
"""Tests for flow.core.console."""
|
||||||
|
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_info(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.info("hello")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "[INFO]" in out
|
||||||
|
assert "hello" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_warn(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.warn("caution")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "[WARN]" in out
|
||||||
|
assert "caution" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_error(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.error("bad thing")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "[ERROR]" in out
|
||||||
|
assert "bad thing" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_success(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.success("done")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "[SUCCESS]" in out
|
||||||
|
assert "done" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_step_lifecycle(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.step_start(1, 3, "Test step")
|
||||||
|
c.step_command("echo hi")
|
||||||
|
c.step_output("hi")
|
||||||
|
c.step_complete("Done")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "Step 1/3" in out
|
||||||
|
assert "$ echo hi" in out
|
||||||
|
assert "Done" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_step_skip(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.start_time = 0
|
||||||
|
c.step_skip("not needed")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "Skipped" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_step_fail(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.start_time = 0
|
||||||
|
c.step_fail("exploded")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "Failed" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_table(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.table(["NAME", "VALUE"], [["foo", "bar"], ["baz", "qux"]])
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "NAME" in out
|
||||||
|
assert "foo" in out
|
||||||
|
assert "baz" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_table_empty(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.table(["NAME"], [])
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert out == ""
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_section_header(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.section_header("Test", "sub")
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "TEST" in out
|
||||||
|
assert "sub" in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_console_plan_header(capsys):
|
||||||
|
c = ConsoleLogger()
|
||||||
|
c.plan_header("My Plan", 5)
|
||||||
|
out = capsys.readouterr().out
|
||||||
|
assert "MY PLAN" in out
|
||||||
|
assert "5 actions" in out
|
||||||
67
tests/test_dotfiles.py
Normal file
67
tests/test_dotfiles.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""Tests for flow.commands.dotfiles — link/unlink/status logic."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from flow.commands.dotfiles import _discover_packages, _walk_package
|
||||||
|
from flow.core.config import AppConfig, FlowContext
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
from flow.core.platform import PlatformInfo
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def dotfiles_tree(tmp_path):
|
||||||
|
"""Create a sample dotfiles directory structure."""
|
||||||
|
common = tmp_path / "common"
|
||||||
|
(common / "zsh").mkdir(parents=True)
|
||||||
|
(common / "zsh" / ".zshrc").write_text("# zshrc")
|
||||||
|
(common / "zsh" / ".zshenv").write_text("# zshenv")
|
||||||
|
(common / "tmux").mkdir(parents=True)
|
||||||
|
(common / "tmux" / ".tmux.conf").write_text("# tmux")
|
||||||
|
|
||||||
|
profiles = tmp_path / "profiles" / "work"
|
||||||
|
(profiles / "git").mkdir(parents=True)
|
||||||
|
(profiles / "git" / ".gitconfig").write_text("[user]\nname = Work")
|
||||||
|
|
||||||
|
return tmp_path
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_packages_common(dotfiles_tree):
|
||||||
|
packages = _discover_packages(dotfiles_tree)
|
||||||
|
assert "zsh" in packages
|
||||||
|
assert "tmux" in packages
|
||||||
|
assert "git" not in packages # git is only in profiles
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_packages_with_profile(dotfiles_tree):
|
||||||
|
packages = _discover_packages(dotfiles_tree, profile="work")
|
||||||
|
assert "zsh" in packages
|
||||||
|
assert "tmux" in packages
|
||||||
|
assert "git" in packages
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_packages_profile_overrides(dotfiles_tree):
|
||||||
|
# Add zsh to work profile
|
||||||
|
work_zsh = dotfiles_tree / "profiles" / "work" / "zsh"
|
||||||
|
work_zsh.mkdir(parents=True)
|
||||||
|
(work_zsh / ".zshrc").write_text("# work zshrc")
|
||||||
|
|
||||||
|
packages = _discover_packages(dotfiles_tree, profile="work")
|
||||||
|
# Profile should override common
|
||||||
|
assert packages["zsh"] == work_zsh
|
||||||
|
|
||||||
|
|
||||||
|
def test_walk_package(dotfiles_tree):
|
||||||
|
home = Path("/tmp/fakehome")
|
||||||
|
source = dotfiles_tree / "common" / "zsh"
|
||||||
|
pairs = list(_walk_package(source, home))
|
||||||
|
assert len(pairs) == 2
|
||||||
|
sources = {str(s.name) for s, _ in pairs}
|
||||||
|
assert ".zshrc" in sources
|
||||||
|
assert ".zshenv" in sources
|
||||||
|
targets = {str(t) for _, t in pairs}
|
||||||
|
assert str(home / ".zshrc") in targets
|
||||||
|
assert str(home / ".zshenv") in targets
|
||||||
300
tests/test_dotfiles_folding.py
Normal file
300
tests/test_dotfiles_folding.py
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
"""Integration tests for dotfiles tree folding behavior."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from flow.commands.dotfiles import _discover_packages, _walk_package, run_link, run_status
|
||||||
|
from flow.core.config import AppConfig, FlowContext
|
||||||
|
from flow.core.console import ConsoleLogger
|
||||||
|
from flow.core.paths import LINKED_STATE
|
||||||
|
from flow.core.platform import PlatformInfo
|
||||||
|
from flow.core.stow import LinkTree, TreeFolder
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ctx():
|
||||||
|
"""Create a mock FlowContext."""
|
||||||
|
return FlowContext(
|
||||||
|
config=AppConfig(),
|
||||||
|
manifest={},
|
||||||
|
platform=PlatformInfo(),
|
||||||
|
console=ConsoleLogger(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def dotfiles_with_nested(tmp_path):
|
||||||
|
"""Create dotfiles with nested directory structure for folding tests."""
|
||||||
|
common = tmp_path / "common"
|
||||||
|
|
||||||
|
# nvim package with nested config
|
||||||
|
nvim = common / "nvim" / ".config" / "nvim"
|
||||||
|
nvim.mkdir(parents=True)
|
||||||
|
(nvim / "init.lua").write_text("-- init")
|
||||||
|
(nvim / "lua").mkdir()
|
||||||
|
(nvim / "lua" / "config.lua").write_text("-- config")
|
||||||
|
(nvim / "lua" / "plugins.lua").write_text("-- plugins")
|
||||||
|
|
||||||
|
# zsh package with flat structure
|
||||||
|
zsh = common / "zsh"
|
||||||
|
zsh.mkdir(parents=True)
|
||||||
|
(zsh / ".zshrc").write_text("# zshrc")
|
||||||
|
(zsh / ".zshenv").write_text("# zshenv")
|
||||||
|
|
||||||
|
return tmp_path
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def home_dir(tmp_path):
|
||||||
|
"""Create a temporary home directory."""
|
||||||
|
home = tmp_path / "home"
|
||||||
|
home.mkdir()
|
||||||
|
return home
|
||||||
|
|
||||||
|
|
||||||
|
def test_tree_folding_single_package(dotfiles_with_nested, home_dir):
|
||||||
|
"""Test that a single package can be folded into directory symlink."""
|
||||||
|
# Discover nvim package
|
||||||
|
packages = _discover_packages(dotfiles_with_nested)
|
||||||
|
nvim_source = packages["nvim"]
|
||||||
|
|
||||||
|
# Build link tree
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
# Plan links for all nvim files
|
||||||
|
operations = []
|
||||||
|
for src, dst in _walk_package(nvim_source, home_dir):
|
||||||
|
ops = folder.plan_link(src, dst, "nvim")
|
||||||
|
operations.extend(ops)
|
||||||
|
|
||||||
|
# Execute operations
|
||||||
|
folder.execute_operations(operations, dry_run=False)
|
||||||
|
|
||||||
|
# Check that we created efficient symlinks
|
||||||
|
# In ideal case, we'd have one directory symlink instead of 3 file symlinks
|
||||||
|
nvim_config = home_dir / ".config" / "nvim"
|
||||||
|
|
||||||
|
# Verify links work
|
||||||
|
assert (nvim_config / "init.lua").exists()
|
||||||
|
assert (nvim_config / "lua" / "config.lua").exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_tree_unfolding_conflict(dotfiles_with_nested, home_dir):
|
||||||
|
"""Test that tree unfolds when second package needs same directory."""
|
||||||
|
common = dotfiles_with_nested / "common"
|
||||||
|
|
||||||
|
# Create second package that shares .config
|
||||||
|
tmux = common / "tmux" / ".config" / "tmux"
|
||||||
|
tmux.mkdir(parents=True)
|
||||||
|
(tmux / "tmux.conf").write_text("# tmux")
|
||||||
|
|
||||||
|
# First, link nvim (can fold .config/nvim)
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
nvim_source = common / "nvim"
|
||||||
|
for src, dst in _walk_package(nvim_source, home_dir):
|
||||||
|
ops = folder.plan_link(src, dst, "nvim")
|
||||||
|
folder.execute_operations(ops, dry_run=False)
|
||||||
|
|
||||||
|
# Now link tmux (should unfold if needed)
|
||||||
|
tmux_source = common / "tmux"
|
||||||
|
for src, dst in _walk_package(tmux_source, home_dir):
|
||||||
|
ops = folder.plan_link(src, dst, "tmux")
|
||||||
|
folder.execute_operations(ops, dry_run=False)
|
||||||
|
|
||||||
|
# Both packages should be linked
|
||||||
|
assert (home_dir / ".config" / "nvim" / "init.lua").exists()
|
||||||
|
assert (home_dir / ".config" / "tmux" / "tmux.conf").exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_state_format_with_directory_links(dotfiles_with_nested, home_dir):
|
||||||
|
"""Test that state file correctly tracks directory vs file links."""
|
||||||
|
tree = LinkTree()
|
||||||
|
|
||||||
|
# Add a directory link
|
||||||
|
tree.add_link(
|
||||||
|
home_dir / ".config" / "nvim",
|
||||||
|
dotfiles_with_nested / "common" / "nvim" / ".config" / "nvim",
|
||||||
|
"nvim",
|
||||||
|
is_dir_link=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add a file link
|
||||||
|
tree.add_link(
|
||||||
|
home_dir / ".zshrc",
|
||||||
|
dotfiles_with_nested / "common" / "zsh" / ".zshrc",
|
||||||
|
"zsh",
|
||||||
|
is_dir_link=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert to state
|
||||||
|
state = tree.to_state()
|
||||||
|
|
||||||
|
# Verify format
|
||||||
|
assert state["version"] == 2
|
||||||
|
nvim_link = state["links"]["nvim"][str(home_dir / ".config" / "nvim")]
|
||||||
|
assert nvim_link["is_directory_link"] is True
|
||||||
|
|
||||||
|
zsh_link = state["links"]["zsh"][str(home_dir / ".zshrc")]
|
||||||
|
assert zsh_link["is_directory_link"] is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_state_backward_compatibility_rejected(home_dir):
|
||||||
|
"""Old state format should be rejected (no backward compatibility)."""
|
||||||
|
old_state = {
|
||||||
|
"links": {
|
||||||
|
"zsh": {
|
||||||
|
str(home_dir / ".zshrc"): str(home_dir.parent / "dotfiles" / "zsh" / ".zshrc"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match="Unsupported linked state format"):
|
||||||
|
LinkTree.from_state(old_state)
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_packages_with_flow_package(tmp_path):
|
||||||
|
"""Test discovering the flow package itself from dotfiles."""
|
||||||
|
common = tmp_path / "common"
|
||||||
|
|
||||||
|
# Create flow package
|
||||||
|
flow_pkg = common / "flow" / ".config" / "flow"
|
||||||
|
flow_pkg.mkdir(parents=True)
|
||||||
|
(flow_pkg / "manifest.yaml").write_text("profiles: {}")
|
||||||
|
(flow_pkg / "config").write_text("[repository]\n")
|
||||||
|
|
||||||
|
packages = _discover_packages(tmp_path)
|
||||||
|
|
||||||
|
# Flow package should be discovered like any other
|
||||||
|
assert "flow" in packages
|
||||||
|
assert packages["flow"] == common / "flow"
|
||||||
|
|
||||||
|
|
||||||
|
def test_walk_flow_package(tmp_path):
|
||||||
|
"""Test walking the flow package structure."""
|
||||||
|
flow_pkg = tmp_path / "flow"
|
||||||
|
flow_config = flow_pkg / ".config" / "flow"
|
||||||
|
flow_config.mkdir(parents=True)
|
||||||
|
(flow_config / "manifest.yaml").write_text("profiles: {}")
|
||||||
|
(flow_config / "config").write_text("[repository]\n")
|
||||||
|
|
||||||
|
home = Path("/tmp/fakehome")
|
||||||
|
pairs = list(_walk_package(flow_pkg, home))
|
||||||
|
|
||||||
|
# Should find both files
|
||||||
|
assert len(pairs) == 2
|
||||||
|
targets = [str(t) for _, t in pairs]
|
||||||
|
assert str(home / ".config" / "flow" / "manifest.yaml") in targets
|
||||||
|
assert str(home / ".config" / "flow" / "config") in targets
|
||||||
|
|
||||||
|
|
||||||
|
def test_conflict_detection_before_execution(dotfiles_with_nested, home_dir):
|
||||||
|
"""Test that conflicts are detected before any changes are made."""
|
||||||
|
# Create existing file that conflicts
|
||||||
|
existing = home_dir / ".zshrc"
|
||||||
|
existing.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
existing.write_text("# existing zshrc")
|
||||||
|
|
||||||
|
# Try to link package that wants .zshrc
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
zsh_source = dotfiles_with_nested / "common" / "zsh"
|
||||||
|
operations = []
|
||||||
|
for src, dst in _walk_package(zsh_source, home_dir):
|
||||||
|
ops = folder.plan_link(src, dst, "zsh")
|
||||||
|
operations.extend(ops)
|
||||||
|
|
||||||
|
# Should detect conflict
|
||||||
|
conflicts = folder.detect_conflicts(operations)
|
||||||
|
assert len(conflicts) > 0
|
||||||
|
assert any("already exists" in c for c in conflicts)
|
||||||
|
|
||||||
|
# Original file should be unchanged
|
||||||
|
assert existing.read_text() == "# existing zshrc"
|
||||||
|
|
||||||
|
|
||||||
|
def test_profile_switching_relink(tmp_path):
|
||||||
|
"""Test switching between profiles maintains correct links."""
|
||||||
|
# Create profiles
|
||||||
|
common = tmp_path / "common"
|
||||||
|
profiles = tmp_path / "profiles"
|
||||||
|
|
||||||
|
# Common zsh
|
||||||
|
(common / "zsh").mkdir(parents=True)
|
||||||
|
(common / "zsh" / ".zshrc").write_text("# common zsh")
|
||||||
|
|
||||||
|
# Work profile override
|
||||||
|
(profiles / "work" / "zsh").mkdir(parents=True)
|
||||||
|
(profiles / "work" / "zsh" / ".zshrc").write_text("# work zsh")
|
||||||
|
|
||||||
|
# Personal profile override
|
||||||
|
(profiles / "personal" / "zsh").mkdir(parents=True)
|
||||||
|
(profiles / "personal" / "zsh" / ".zshrc").write_text("# personal zsh")
|
||||||
|
|
||||||
|
# Test that profile discovery works correctly
|
||||||
|
work_packages = _discover_packages(tmp_path, profile="work")
|
||||||
|
personal_packages = _discover_packages(tmp_path, profile="personal")
|
||||||
|
|
||||||
|
# Both should find zsh, but from different sources
|
||||||
|
assert "zsh" in work_packages
|
||||||
|
assert "zsh" in personal_packages
|
||||||
|
assert work_packages["zsh"] != personal_packages["zsh"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_can_fold_empty_directory():
|
||||||
|
"""Test can_fold with empty directory."""
|
||||||
|
tree = LinkTree()
|
||||||
|
target_dir = Path("/home/user/.config/nvim")
|
||||||
|
|
||||||
|
# Empty directory - should be able to fold
|
||||||
|
assert tree.can_fold(target_dir, "nvim")
|
||||||
|
|
||||||
|
|
||||||
|
def test_can_fold_with_subdirectories():
|
||||||
|
"""Test can_fold with nested directory structure."""
|
||||||
|
tree = LinkTree()
|
||||||
|
base = Path("/home/user/.config/nvim")
|
||||||
|
|
||||||
|
# Add nested files from same package
|
||||||
|
tree.add_link(base / "init.lua", Path("/dotfiles/nvim/init.lua"), "nvim")
|
||||||
|
tree.add_link(base / "lua" / "config.lua", Path("/dotfiles/nvim/lua/config.lua"), "nvim")
|
||||||
|
tree.add_link(base / "lua" / "plugins" / "init.lua", Path("/dotfiles/nvim/lua/plugins/init.lua"), "nvim")
|
||||||
|
|
||||||
|
# Should be able to fold at base level
|
||||||
|
assert tree.can_fold(base, "nvim")
|
||||||
|
|
||||||
|
# Add file from different package
|
||||||
|
tree.add_link(base / "other.lua", Path("/dotfiles/other/other.lua"), "other")
|
||||||
|
|
||||||
|
# Now cannot fold
|
||||||
|
assert not tree.can_fold(base, "nvim")
|
||||||
|
|
||||||
|
|
||||||
|
def test_execute_operations_creates_parent_dirs(tmp_path):
|
||||||
|
"""Test that execute_operations creates necessary parent directories."""
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
source = tmp_path / "dotfiles" / "nvim" / ".config" / "nvim" / "init.lua"
|
||||||
|
target = tmp_path / "home" / ".config" / "nvim" / "init.lua"
|
||||||
|
|
||||||
|
# Create source
|
||||||
|
source.parent.mkdir(parents=True)
|
||||||
|
source.write_text("-- init")
|
||||||
|
|
||||||
|
# Target parent doesn't exist yet
|
||||||
|
assert not target.parent.exists()
|
||||||
|
|
||||||
|
# Plan and execute
|
||||||
|
ops = folder.plan_link(source, target, "nvim")
|
||||||
|
folder.execute_operations(ops, dry_run=False)
|
||||||
|
|
||||||
|
# Parent should be created
|
||||||
|
assert target.parent.exists()
|
||||||
|
assert target.is_symlink()
|
||||||
70
tests/test_paths.py
Normal file
70
tests/test_paths.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
"""Tests for flow.core.paths."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from flow.core.paths import (
|
||||||
|
CONFIG_DIR,
|
||||||
|
CONFIG_FILE,
|
||||||
|
DATA_DIR,
|
||||||
|
DOTFILES_DIR,
|
||||||
|
INSTALLED_STATE,
|
||||||
|
LINKED_STATE,
|
||||||
|
MANIFEST_FILE,
|
||||||
|
PACKAGES_DIR,
|
||||||
|
SCRATCH_DIR,
|
||||||
|
STATE_DIR,
|
||||||
|
ensure_dirs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_dir_under_home():
|
||||||
|
assert ".config/devflow" in str(CONFIG_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
def test_data_dir_under_home():
|
||||||
|
assert ".local/share/devflow" in str(DATA_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
def test_state_dir_under_home():
|
||||||
|
assert ".local/state/devflow" in str(STATE_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
def test_manifest_file_in_config_dir():
|
||||||
|
assert MANIFEST_FILE == CONFIG_DIR / "manifest.yaml"
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_file_in_config_dir():
|
||||||
|
assert CONFIG_FILE == CONFIG_DIR / "config"
|
||||||
|
|
||||||
|
|
||||||
|
def test_dotfiles_dir():
|
||||||
|
assert DOTFILES_DIR == DATA_DIR / "dotfiles"
|
||||||
|
|
||||||
|
|
||||||
|
def test_packages_dir():
|
||||||
|
assert PACKAGES_DIR == DATA_DIR / "packages"
|
||||||
|
|
||||||
|
|
||||||
|
def test_scratch_dir():
|
||||||
|
assert SCRATCH_DIR == DATA_DIR / "scratch"
|
||||||
|
|
||||||
|
|
||||||
|
def test_state_files():
|
||||||
|
assert LINKED_STATE == STATE_DIR / "linked.json"
|
||||||
|
assert INSTALLED_STATE == STATE_DIR / "installed.json"
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_dirs(tmp_path, monkeypatch):
|
||||||
|
monkeypatch.setattr("flow.core.paths.CONFIG_DIR", tmp_path / "config")
|
||||||
|
monkeypatch.setattr("flow.core.paths.DATA_DIR", tmp_path / "data")
|
||||||
|
monkeypatch.setattr("flow.core.paths.STATE_DIR", tmp_path / "state")
|
||||||
|
monkeypatch.setattr("flow.core.paths.PACKAGES_DIR", tmp_path / "data" / "packages")
|
||||||
|
monkeypatch.setattr("flow.core.paths.SCRATCH_DIR", tmp_path / "data" / "scratch")
|
||||||
|
|
||||||
|
ensure_dirs()
|
||||||
|
|
||||||
|
assert (tmp_path / "config").is_dir()
|
||||||
|
assert (tmp_path / "data").is_dir()
|
||||||
|
assert (tmp_path / "state").is_dir()
|
||||||
|
assert (tmp_path / "data" / "packages").is_dir()
|
||||||
|
assert (tmp_path / "data" / "scratch").is_dir()
|
||||||
32
tests/test_platform.py
Normal file
32
tests/test_platform.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
"""Tests for flow.core.platform."""
|
||||||
|
|
||||||
|
import platform as _platform
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from flow.core.platform import PlatformInfo, detect_container_runtime, detect_platform
|
||||||
|
|
||||||
|
|
||||||
|
def test_detect_platform_returns_platforminfo():
|
||||||
|
info = detect_platform()
|
||||||
|
assert isinstance(info, PlatformInfo)
|
||||||
|
assert info.os in ("linux", "macos")
|
||||||
|
assert info.arch in ("amd64", "arm64")
|
||||||
|
assert info.platform == f"{info.os}-{info.arch}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_detect_platform_unsupported_os(monkeypatch):
|
||||||
|
monkeypatch.setattr(_platform, "system", lambda: "FreeBSD")
|
||||||
|
with pytest.raises(RuntimeError, match="Unsupported operating system"):
|
||||||
|
detect_platform()
|
||||||
|
|
||||||
|
|
||||||
|
def test_detect_platform_unsupported_arch(monkeypatch):
|
||||||
|
monkeypatch.setattr(_platform, "machine", lambda: "mips")
|
||||||
|
with pytest.raises(RuntimeError, match="Unsupported architecture"):
|
||||||
|
detect_platform()
|
||||||
|
|
||||||
|
|
||||||
|
def test_detect_container_runtime_returns_string_or_none():
|
||||||
|
result = detect_container_runtime()
|
||||||
|
assert result is None or result in ("docker", "podman")
|
||||||
215
tests/test_self_hosting.py
Normal file
215
tests/test_self_hosting.py
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
"""Tests for self-hosting flow config from dotfiles repository."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from flow.core import paths as paths_module
|
||||||
|
from flow.core.config import load_config, load_manifest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_paths(tmp_path, monkeypatch):
|
||||||
|
"""Mock path constants for testing."""
|
||||||
|
config_dir = tmp_path / "config"
|
||||||
|
dotfiles_dir = tmp_path / "dotfiles"
|
||||||
|
|
||||||
|
config_dir.mkdir()
|
||||||
|
dotfiles_dir.mkdir()
|
||||||
|
|
||||||
|
test_paths = {
|
||||||
|
"config_dir": config_dir,
|
||||||
|
"dotfiles_dir": dotfiles_dir,
|
||||||
|
"local_config": config_dir / "config",
|
||||||
|
"local_manifest": config_dir / "manifest.yaml",
|
||||||
|
"dotfiles_config": dotfiles_dir / "flow" / ".config" / "flow" / "config",
|
||||||
|
"dotfiles_manifest": dotfiles_dir / "flow" / ".config" / "flow" / "manifest.yaml",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Patch at the paths module level
|
||||||
|
monkeypatch.setattr(paths_module, "CONFIG_FILE", test_paths["local_config"])
|
||||||
|
monkeypatch.setattr(paths_module, "MANIFEST_FILE", test_paths["local_manifest"])
|
||||||
|
monkeypatch.setattr(paths_module, "DOTFILES_CONFIG", test_paths["dotfiles_config"])
|
||||||
|
monkeypatch.setattr(paths_module, "DOTFILES_MANIFEST", test_paths["dotfiles_manifest"])
|
||||||
|
|
||||||
|
return test_paths
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_manifest_priority_dotfiles_first(mock_paths):
|
||||||
|
"""Test that dotfiles manifest takes priority over local."""
|
||||||
|
# Create both manifests
|
||||||
|
local_manifest = mock_paths["local_manifest"]
|
||||||
|
dotfiles_manifest = mock_paths["dotfiles_manifest"]
|
||||||
|
|
||||||
|
local_manifest.write_text("profiles:\n local:\n os: linux")
|
||||||
|
|
||||||
|
dotfiles_manifest.parent.mkdir(parents=True)
|
||||||
|
dotfiles_manifest.write_text("profiles:\n dotfiles:\n os: macos")
|
||||||
|
|
||||||
|
# Should load from dotfiles
|
||||||
|
manifest = load_manifest()
|
||||||
|
assert "dotfiles" in manifest.get("profiles", {})
|
||||||
|
assert "local" not in manifest.get("profiles", {})
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_manifest_fallback_to_local(mock_paths):
|
||||||
|
"""Test fallback to local manifest when dotfiles doesn't exist."""
|
||||||
|
local_manifest = mock_paths["local_manifest"]
|
||||||
|
local_manifest.write_text("profiles:\n local:\n os: linux")
|
||||||
|
|
||||||
|
# Dotfiles manifest doesn't exist
|
||||||
|
manifest = load_manifest()
|
||||||
|
assert "local" in manifest.get("profiles", {})
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_manifest_empty_when_none_exist(mock_paths):
|
||||||
|
"""Test empty dict returned when no manifests exist."""
|
||||||
|
manifest = load_manifest()
|
||||||
|
assert manifest == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_config_priority_dotfiles_first(mock_paths):
|
||||||
|
"""Test that dotfiles config takes priority over local."""
|
||||||
|
local_config = mock_paths["local_config"]
|
||||||
|
dotfiles_config = mock_paths["dotfiles_config"]
|
||||||
|
|
||||||
|
# Create local config
|
||||||
|
local_config.write_text(
|
||||||
|
"[repository]\n"
|
||||||
|
"dotfiles_url = https://github.com/user/dotfiles-local.git\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create dotfiles config
|
||||||
|
dotfiles_config.parent.mkdir(parents=True)
|
||||||
|
dotfiles_config.write_text(
|
||||||
|
"[repository]\n"
|
||||||
|
"dotfiles_url = https://github.com/user/dotfiles-from-repo.git\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should load from dotfiles
|
||||||
|
config = load_config()
|
||||||
|
assert "dotfiles-from-repo" in config.dotfiles_url
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_config_fallback_to_local(mock_paths):
|
||||||
|
"""Test fallback to local config when dotfiles doesn't exist."""
|
||||||
|
local_config = mock_paths["local_config"]
|
||||||
|
local_config.write_text(
|
||||||
|
"[repository]\n"
|
||||||
|
"dotfiles_url = https://github.com/user/dotfiles-local.git\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Dotfiles config doesn't exist
|
||||||
|
config = load_config()
|
||||||
|
assert "dotfiles-local" in config.dotfiles_url
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_config_empty_when_none_exist(mock_paths):
|
||||||
|
"""Test default config returned when no configs exist."""
|
||||||
|
config = load_config()
|
||||||
|
assert config.dotfiles_url == ""
|
||||||
|
assert config.dotfiles_branch == "main"
|
||||||
|
|
||||||
|
|
||||||
|
def test_self_hosting_workflow(tmp_path, monkeypatch):
|
||||||
|
"""Test complete self-hosting workflow.
|
||||||
|
|
||||||
|
Simulates:
|
||||||
|
1. User has dotfiles repo with flow config
|
||||||
|
2. Flow links its own config from dotfiles
|
||||||
|
3. Flow reads from self-hosted location
|
||||||
|
"""
|
||||||
|
# Setup paths
|
||||||
|
home = tmp_path / "home"
|
||||||
|
dotfiles = tmp_path / "dotfiles"
|
||||||
|
home.mkdir()
|
||||||
|
dotfiles.mkdir()
|
||||||
|
|
||||||
|
# Create flow package in dotfiles
|
||||||
|
flow_pkg = dotfiles / "flow" / ".config" / "flow"
|
||||||
|
flow_pkg.mkdir(parents=True)
|
||||||
|
|
||||||
|
# Create manifest in dotfiles
|
||||||
|
manifest_content = {
|
||||||
|
"profiles": {
|
||||||
|
"test-env": {
|
||||||
|
"os": "linux",
|
||||||
|
"packages": {"standard": ["git", "vim"]},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(flow_pkg / "manifest.yaml").write_text(yaml.dump(manifest_content))
|
||||||
|
|
||||||
|
# Create config in dotfiles
|
||||||
|
(flow_pkg / "config").write_text(
|
||||||
|
"[repository]\n"
|
||||||
|
"dotfiles_url = https://github.com/user/dotfiles.git\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock paths to use our temp directories
|
||||||
|
monkeypatch.setattr(paths_module, "DOTFILES_MANIFEST", flow_pkg / "manifest.yaml")
|
||||||
|
monkeypatch.setattr(paths_module, "DOTFILES_CONFIG", flow_pkg / "config")
|
||||||
|
monkeypatch.setattr(paths_module, "MANIFEST_FILE", home / ".config" / "devflow" / "manifest.yaml")
|
||||||
|
monkeypatch.setattr(paths_module, "CONFIG_FILE", home / ".config" / "devflow" / "config")
|
||||||
|
|
||||||
|
# Load config and manifest - should come from dotfiles
|
||||||
|
manifest = load_manifest()
|
||||||
|
config = load_config()
|
||||||
|
|
||||||
|
assert "test-env" in manifest.get("profiles", {})
|
||||||
|
assert "github.com/user/dotfiles.git" in config.dotfiles_url
|
||||||
|
|
||||||
|
|
||||||
|
def test_manifest_cascade_with_symlink(tmp_path, monkeypatch):
|
||||||
|
"""Test that loading works correctly when symlink is used."""
|
||||||
|
# Setup
|
||||||
|
dotfiles = tmp_path / "dotfiles"
|
||||||
|
home_config = tmp_path / "home" / ".config" / "flow"
|
||||||
|
flow_pkg = dotfiles / "flow" / ".config" / "flow"
|
||||||
|
|
||||||
|
flow_pkg.mkdir(parents=True)
|
||||||
|
home_config.mkdir(parents=True)
|
||||||
|
|
||||||
|
# Create manifest in dotfiles
|
||||||
|
manifest_content = {"profiles": {"from-dotfiles": {"os": "linux"}}}
|
||||||
|
(flow_pkg / "manifest.yaml").write_text(yaml.dump(manifest_content))
|
||||||
|
|
||||||
|
# Create symlink from home config to dotfiles
|
||||||
|
manifest_link = home_config / "manifest.yaml"
|
||||||
|
manifest_link.symlink_to(flow_pkg / "manifest.yaml")
|
||||||
|
|
||||||
|
# Mock paths
|
||||||
|
monkeypatch.setattr(paths_module, "DOTFILES_MANIFEST", flow_pkg / "manifest.yaml")
|
||||||
|
monkeypatch.setattr(paths_module, "MANIFEST_FILE", manifest_link)
|
||||||
|
|
||||||
|
# Load - should work through symlink
|
||||||
|
manifest = load_manifest()
|
||||||
|
assert "from-dotfiles" in manifest.get("profiles", {})
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_priority_documentation(mock_paths):
|
||||||
|
"""Document the config loading priority for users."""
|
||||||
|
# This test serves as documentation of the cascade behavior
|
||||||
|
|
||||||
|
# Priority 1: Dotfiles repo (self-hosted)
|
||||||
|
dotfiles_manifest = mock_paths["dotfiles_manifest"]
|
||||||
|
dotfiles_manifest.parent.mkdir(parents=True)
|
||||||
|
dotfiles_manifest.write_text("profiles:\n priority-1: {}")
|
||||||
|
|
||||||
|
manifest = load_manifest()
|
||||||
|
assert "priority-1" in manifest.get("profiles", {})
|
||||||
|
|
||||||
|
# If we remove dotfiles, falls back to Priority 2: Local override
|
||||||
|
dotfiles_manifest.unlink()
|
||||||
|
local_manifest = mock_paths["local_manifest"]
|
||||||
|
local_manifest.write_text("profiles:\n priority-2: {}")
|
||||||
|
|
||||||
|
manifest = load_manifest()
|
||||||
|
assert "priority-2" in manifest.get("profiles", {})
|
||||||
|
|
||||||
|
# If neither exists, Priority 3: Empty fallback
|
||||||
|
local_manifest.unlink()
|
||||||
|
manifest = load_manifest()
|
||||||
|
assert manifest == {}
|
||||||
310
tests/test_stow.py
Normal file
310
tests/test_stow.py
Normal file
@@ -0,0 +1,310 @@
|
|||||||
|
"""Tests for flow.core.stow — GNU Stow-style tree folding/unfolding."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from flow.core.stow import LinkOperation, LinkTree, TreeFolder
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_home(tmp_path):
|
||||||
|
"""Create a temporary home directory."""
|
||||||
|
home = tmp_path / "home"
|
||||||
|
home.mkdir()
|
||||||
|
return home
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_dotfiles(tmp_path):
|
||||||
|
"""Create a temporary dotfiles repository."""
|
||||||
|
dotfiles = tmp_path / "dotfiles"
|
||||||
|
dotfiles.mkdir()
|
||||||
|
return dotfiles
|
||||||
|
|
||||||
|
|
||||||
|
def test_linktree_add_remove():
|
||||||
|
"""Test basic LinkTree operations."""
|
||||||
|
tree = LinkTree()
|
||||||
|
source = Path("/dotfiles/zsh/.zshrc")
|
||||||
|
target = Path("/home/user/.zshrc")
|
||||||
|
|
||||||
|
tree.add_link(target, source, "zsh", is_dir_link=False)
|
||||||
|
assert target in tree.links
|
||||||
|
assert tree.links[target] == source
|
||||||
|
assert tree.packages[target] == "zsh"
|
||||||
|
assert not tree.is_directory_link(target)
|
||||||
|
|
||||||
|
tree.remove_link(target)
|
||||||
|
assert target not in tree.links
|
||||||
|
assert target not in tree.packages
|
||||||
|
|
||||||
|
|
||||||
|
def test_linktree_directory_link():
|
||||||
|
"""Test directory link tracking."""
|
||||||
|
tree = LinkTree()
|
||||||
|
source = Path("/dotfiles/nvim/.config/nvim")
|
||||||
|
target = Path("/home/user/.config/nvim")
|
||||||
|
|
||||||
|
tree.add_link(target, source, "nvim", is_dir_link=True)
|
||||||
|
assert tree.is_directory_link(target)
|
||||||
|
|
||||||
|
|
||||||
|
def test_linktree_can_fold_single_package():
|
||||||
|
"""Test can_fold with single package."""
|
||||||
|
tree = LinkTree()
|
||||||
|
target_dir = Path("/home/user/.config/nvim")
|
||||||
|
|
||||||
|
# Add files from same package
|
||||||
|
tree.add_link(target_dir / "init.lua", Path("/dotfiles/nvim/.config/nvim/init.lua"), "nvim")
|
||||||
|
tree.add_link(target_dir / "lua" / "config.lua", Path("/dotfiles/nvim/.config/nvim/lua/config.lua"), "nvim")
|
||||||
|
|
||||||
|
# Should be able to fold since all files are from same package
|
||||||
|
assert tree.can_fold(target_dir, "nvim")
|
||||||
|
|
||||||
|
|
||||||
|
def test_linktree_can_fold_multiple_packages():
|
||||||
|
"""Test can_fold with multiple packages."""
|
||||||
|
tree = LinkTree()
|
||||||
|
target_dir = Path("/home/user/.config")
|
||||||
|
|
||||||
|
# Add files from different packages
|
||||||
|
tree.add_link(target_dir / "nvim", Path("/dotfiles/nvim/.config/nvim"), "nvim", is_dir_link=True)
|
||||||
|
tree.add_link(target_dir / "tmux", Path("/dotfiles/tmux/.config/tmux"), "tmux", is_dir_link=True)
|
||||||
|
|
||||||
|
# Cannot fold .config since it has files from multiple packages
|
||||||
|
assert not tree.can_fold(target_dir, "nvim")
|
||||||
|
|
||||||
|
|
||||||
|
def test_linktree_from_state_old_format_rejected():
|
||||||
|
"""Old state format should be rejected (no backward compatibility)."""
|
||||||
|
state = {
|
||||||
|
"links": {
|
||||||
|
"zsh": {
|
||||||
|
"/home/user/.zshrc": "/dotfiles/zsh/.zshrc",
|
||||||
|
"/home/user/.zshenv": "/dotfiles/zsh/.zshenv",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
with pytest.raises(RuntimeError, match="Unsupported linked state format"):
|
||||||
|
LinkTree.from_state(state)
|
||||||
|
|
||||||
|
|
||||||
|
def test_linktree_from_state_new_format():
|
||||||
|
"""Test loading from new state format (with is_directory_link)."""
|
||||||
|
state = {
|
||||||
|
"version": 2,
|
||||||
|
"links": {
|
||||||
|
"nvim": {
|
||||||
|
"/home/user/.config/nvim": {
|
||||||
|
"source": "/dotfiles/nvim/.config/nvim",
|
||||||
|
"is_directory_link": True,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tree = LinkTree.from_state(state)
|
||||||
|
target = Path("/home/user/.config/nvim")
|
||||||
|
assert target in tree.links
|
||||||
|
assert tree.is_directory_link(target)
|
||||||
|
assert tree.packages[target] == "nvim"
|
||||||
|
|
||||||
|
|
||||||
|
def test_linktree_to_state():
|
||||||
|
"""Test converting LinkTree to state format."""
|
||||||
|
tree = LinkTree()
|
||||||
|
tree.add_link(
|
||||||
|
Path("/home/user/.config/nvim"),
|
||||||
|
Path("/dotfiles/nvim/.config/nvim"),
|
||||||
|
"nvim",
|
||||||
|
is_dir_link=True,
|
||||||
|
)
|
||||||
|
tree.add_link(
|
||||||
|
Path("/home/user/.zshrc"),
|
||||||
|
Path("/dotfiles/zsh/.zshrc"),
|
||||||
|
"zsh",
|
||||||
|
is_dir_link=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
state = tree.to_state()
|
||||||
|
assert state["version"] == 2
|
||||||
|
assert "nvim" in state["links"]
|
||||||
|
assert "zsh" in state["links"]
|
||||||
|
|
||||||
|
nvim_link = state["links"]["nvim"]["/home/user/.config/nvim"]
|
||||||
|
assert nvim_link["is_directory_link"] is True
|
||||||
|
|
||||||
|
zsh_link = state["links"]["zsh"]["/home/user/.zshrc"]
|
||||||
|
assert zsh_link["is_directory_link"] is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_treefolder_plan_link_simple(temp_home, temp_dotfiles):
|
||||||
|
"""Test planning a simple file link."""
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
source = temp_dotfiles / "zsh" / ".zshrc"
|
||||||
|
target = temp_home / ".zshrc"
|
||||||
|
|
||||||
|
# Create source file
|
||||||
|
source.parent.mkdir(parents=True)
|
||||||
|
source.write_text("# zshrc")
|
||||||
|
|
||||||
|
ops = folder.plan_link(source, target, "zsh")
|
||||||
|
assert len(ops) == 1
|
||||||
|
assert ops[0].type == "create_symlink"
|
||||||
|
assert ops[0].source == source
|
||||||
|
assert ops[0].target == target
|
||||||
|
assert ops[0].package == "zsh"
|
||||||
|
|
||||||
|
|
||||||
|
def test_treefolder_detect_conflicts_existing_file(temp_home, temp_dotfiles):
|
||||||
|
"""Test conflict detection for existing files."""
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
source = temp_dotfiles / "zsh" / ".zshrc"
|
||||||
|
target = temp_home / ".zshrc"
|
||||||
|
|
||||||
|
# Create existing file
|
||||||
|
target.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
target.write_text("# existing")
|
||||||
|
|
||||||
|
source.parent.mkdir(parents=True)
|
||||||
|
source.write_text("# zshrc")
|
||||||
|
|
||||||
|
ops = folder.plan_link(source, target, "zsh")
|
||||||
|
conflicts = folder.detect_conflicts(ops)
|
||||||
|
|
||||||
|
assert len(conflicts) == 1
|
||||||
|
assert "already exists" in conflicts[0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_treefolder_detect_conflicts_different_package(temp_home, temp_dotfiles):
|
||||||
|
"""Test conflict detection for links from different packages."""
|
||||||
|
tree = LinkTree()
|
||||||
|
target = temp_home / ".bashrc"
|
||||||
|
|
||||||
|
# Add existing link from different package
|
||||||
|
tree.add_link(target, Path("/dotfiles/bash/.bashrc"), "bash")
|
||||||
|
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
source = temp_dotfiles / "zsh" / ".bashrc"
|
||||||
|
source.parent.mkdir(parents=True)
|
||||||
|
source.write_text("# bashrc")
|
||||||
|
|
||||||
|
ops = folder.plan_link(source, target, "zsh")
|
||||||
|
conflicts = folder.detect_conflicts(ops)
|
||||||
|
|
||||||
|
assert len(conflicts) == 1
|
||||||
|
assert "bash" in conflicts[0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_treefolder_execute_operations_dry_run(temp_home, temp_dotfiles, capsys):
|
||||||
|
"""Test dry-run mode."""
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
source = temp_dotfiles / "zsh" / ".zshrc"
|
||||||
|
target = temp_home / ".zshrc"
|
||||||
|
|
||||||
|
source.parent.mkdir(parents=True)
|
||||||
|
source.write_text("# zshrc")
|
||||||
|
|
||||||
|
ops = folder.plan_link(source, target, "zsh")
|
||||||
|
folder.execute_operations(ops, dry_run=True)
|
||||||
|
|
||||||
|
# Check output
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert "FILE LINK" in captured.out
|
||||||
|
assert str(target) in captured.out
|
||||||
|
|
||||||
|
# No actual symlink created
|
||||||
|
assert not target.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_treefolder_execute_operations_create_symlink(temp_home, temp_dotfiles):
|
||||||
|
"""Test creating actual symlinks."""
|
||||||
|
tree = LinkTree()
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
|
||||||
|
source = temp_dotfiles / "zsh" / ".zshrc"
|
||||||
|
target = temp_home / ".zshrc"
|
||||||
|
|
||||||
|
source.parent.mkdir(parents=True)
|
||||||
|
source.write_text("# zshrc")
|
||||||
|
|
||||||
|
ops = folder.plan_link(source, target, "zsh")
|
||||||
|
folder.execute_operations(ops, dry_run=False)
|
||||||
|
|
||||||
|
# Check symlink created
|
||||||
|
assert target.is_symlink()
|
||||||
|
assert target.resolve() == source.resolve()
|
||||||
|
|
||||||
|
# Check tree updated
|
||||||
|
assert target in folder.tree.links
|
||||||
|
|
||||||
|
|
||||||
|
def test_treefolder_plan_unlink(temp_home, temp_dotfiles):
|
||||||
|
"""Test planning unlink operations."""
|
||||||
|
tree = LinkTree()
|
||||||
|
target = temp_home / ".zshrc"
|
||||||
|
source = temp_dotfiles / "zsh" / ".zshrc"
|
||||||
|
|
||||||
|
tree.add_link(target, source, "zsh")
|
||||||
|
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
ops = folder.plan_unlink(target, "zsh")
|
||||||
|
|
||||||
|
assert len(ops) == 1
|
||||||
|
assert ops[0].type == "remove"
|
||||||
|
assert ops[0].target == target
|
||||||
|
|
||||||
|
|
||||||
|
def test_treefolder_plan_unlink_directory_link(temp_home, temp_dotfiles):
|
||||||
|
"""Test planning unlink for directory symlink."""
|
||||||
|
tree = LinkTree()
|
||||||
|
target = temp_home / ".config" / "nvim"
|
||||||
|
source = temp_dotfiles / "nvim" / ".config" / "nvim"
|
||||||
|
|
||||||
|
tree.add_link(target, source, "nvim", is_dir_link=True)
|
||||||
|
|
||||||
|
folder = TreeFolder(tree)
|
||||||
|
ops = folder.plan_unlink(target, "nvim")
|
||||||
|
|
||||||
|
# Should remove the directory link
|
||||||
|
assert len(ops) >= 1
|
||||||
|
assert ops[-1].type == "remove"
|
||||||
|
assert ops[-1].is_directory_link
|
||||||
|
|
||||||
|
|
||||||
|
def test_linkoperation_str():
|
||||||
|
"""Test LinkOperation string representation."""
|
||||||
|
op1 = LinkOperation(
|
||||||
|
type="create_symlink",
|
||||||
|
source=Path("/src"),
|
||||||
|
target=Path("/dst"),
|
||||||
|
package="test",
|
||||||
|
is_directory_link=False,
|
||||||
|
)
|
||||||
|
assert "FILE LINK" in str(op1)
|
||||||
|
|
||||||
|
op2 = LinkOperation(
|
||||||
|
type="create_symlink",
|
||||||
|
source=Path("/src"),
|
||||||
|
target=Path("/dst"),
|
||||||
|
package="test",
|
||||||
|
is_directory_link=True,
|
||||||
|
)
|
||||||
|
assert "DIR LINK" in str(op2)
|
||||||
|
|
||||||
|
op3 = LinkOperation(
|
||||||
|
type="unfold",
|
||||||
|
source=Path("/src"),
|
||||||
|
target=Path("/dst"),
|
||||||
|
package="test",
|
||||||
|
)
|
||||||
|
assert "UNFOLD" in str(op3)
|
||||||
52
tests/test_variables.py
Normal file
52
tests/test_variables.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
"""Tests for flow.core.variables."""
|
||||||
|
|
||||||
|
from flow.core.variables import substitute, substitute_template
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_dollar():
|
||||||
|
result = substitute("hello $NAME", {"NAME": "world"})
|
||||||
|
assert result == "hello world"
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_braces():
|
||||||
|
result = substitute("hello ${NAME}", {"NAME": "world"})
|
||||||
|
assert result == "hello world"
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_multiple():
|
||||||
|
result = substitute("$A and ${B}", {"A": "1", "B": "2"})
|
||||||
|
assert result == "1 and 2"
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_home():
|
||||||
|
result = substitute("dir=$HOME", {})
|
||||||
|
assert "$HOME" not in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_user():
|
||||||
|
import os
|
||||||
|
result = substitute("u=$USER", {})
|
||||||
|
assert result == f"u={os.getenv('USER', '')}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_non_string():
|
||||||
|
assert substitute(123, {}) == 123
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_template_basic():
|
||||||
|
result = substitute_template("nvim-{{os}}-{{arch}}.tar.gz", {"os": "linux", "arch": "x86_64"})
|
||||||
|
assert result == "nvim-linux-x86_64.tar.gz"
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_template_missing_key():
|
||||||
|
result = substitute_template("{{missing}}", {})
|
||||||
|
assert result == "{{missing}}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_template_non_string():
|
||||||
|
assert substitute_template(42, {}) == 42
|
||||||
|
|
||||||
|
|
||||||
|
def test_substitute_template_no_placeholders():
|
||||||
|
result = substitute_template("plain text", {"os": "linux"})
|
||||||
|
assert result == "plain text"
|
||||||
Reference in New Issue
Block a user