426 lines
15 KiB
Python
426 lines
15 KiB
Python
"""flow dotfiles — dotfile management with GNU Stow-style symlinking."""
|
|
|
|
import json
|
|
import os
|
|
import shlex
|
|
import shutil
|
|
import subprocess
|
|
import sys
|
|
from pathlib import Path
|
|
from typing import Optional
|
|
|
|
from flow.core.config import FlowContext
|
|
from flow.core.paths import DOTFILES_DIR, LINKED_STATE
|
|
from flow.core.stow import LinkTree, TreeFolder
|
|
|
|
|
|
def register(subparsers):
|
|
p = subparsers.add_parser("dotfiles", aliases=["dot"], help="Manage dotfiles")
|
|
sub = p.add_subparsers(dest="dotfiles_command")
|
|
|
|
# init
|
|
init = sub.add_parser("init", help="Clone dotfiles repository")
|
|
init.add_argument("--repo", help="Override repository URL")
|
|
init.set_defaults(handler=run_init)
|
|
|
|
# link
|
|
link = sub.add_parser("link", help="Create symlinks for dotfile packages")
|
|
link.add_argument("packages", nargs="*", help="Specific packages to link (default: all)")
|
|
link.add_argument("--profile", help="Profile to use for overrides")
|
|
link.add_argument("--copy", action="store_true", help="Copy instead of symlink")
|
|
link.add_argument("--force", action="store_true", help="Overwrite existing files")
|
|
link.add_argument("--dry-run", action="store_true", help="Show what would be done")
|
|
link.set_defaults(handler=run_link)
|
|
|
|
# unlink
|
|
unlink = sub.add_parser("unlink", help="Remove dotfile symlinks")
|
|
unlink.add_argument("packages", nargs="*", help="Specific packages to unlink (default: all)")
|
|
unlink.set_defaults(handler=run_unlink)
|
|
|
|
# status
|
|
status = sub.add_parser("status", help="Show dotfiles link status")
|
|
status.set_defaults(handler=run_status)
|
|
|
|
# sync
|
|
sync = sub.add_parser("sync", help="Pull latest dotfiles from remote")
|
|
sync.set_defaults(handler=run_sync)
|
|
|
|
# relink
|
|
relink = sub.add_parser("relink", help="Refresh symlinks after changes")
|
|
relink.add_argument("packages", nargs="*", help="Specific packages to relink (default: all)")
|
|
relink.add_argument("--profile", help="Profile to use for overrides")
|
|
relink.set_defaults(handler=run_relink)
|
|
|
|
# clean
|
|
clean = sub.add_parser("clean", help="Remove broken symlinks")
|
|
clean.add_argument("--dry-run", action="store_true", help="Show what would be done")
|
|
clean.set_defaults(handler=run_clean)
|
|
|
|
# edit
|
|
edit = sub.add_parser("edit", help="Edit package config with auto-commit")
|
|
edit.add_argument("package", help="Package name to edit")
|
|
edit.add_argument("--no-commit", action="store_true", help="Skip auto-commit")
|
|
edit.set_defaults(handler=run_edit)
|
|
|
|
p.set_defaults(handler=lambda ctx, args: p.print_help())
|
|
|
|
|
|
def _load_state() -> dict:
|
|
if LINKED_STATE.exists():
|
|
with open(LINKED_STATE) as f:
|
|
return json.load(f)
|
|
return {"links": {}}
|
|
|
|
|
|
def _save_state(state: dict):
|
|
LINKED_STATE.parent.mkdir(parents=True, exist_ok=True)
|
|
with open(LINKED_STATE, "w") as f:
|
|
json.dump(state, f, indent=2)
|
|
|
|
|
|
def _discover_packages(dotfiles_dir: Path, profile: Optional[str] = None) -> dict:
|
|
"""Discover packages from common/ and optionally profiles/<name>/.
|
|
|
|
Returns {package_name: source_dir} with profile dirs taking precedence.
|
|
"""
|
|
packages = {}
|
|
common = dotfiles_dir / "common"
|
|
if common.is_dir():
|
|
for pkg in sorted(common.iterdir()):
|
|
if pkg.is_dir() and not pkg.name.startswith("."):
|
|
packages[pkg.name] = pkg
|
|
|
|
if profile:
|
|
profile_dir = dotfiles_dir / "profiles" / profile
|
|
if profile_dir.is_dir():
|
|
for pkg in sorted(profile_dir.iterdir()):
|
|
if pkg.is_dir() and not pkg.name.startswith("."):
|
|
packages[pkg.name] = pkg # Override common
|
|
|
|
return packages
|
|
|
|
|
|
def _walk_package(source_dir: Path, home: Path):
|
|
"""Yield (source_file, target_file) pairs for a package directory.
|
|
|
|
Files in the package directory map relative to $HOME.
|
|
"""
|
|
for root, _dirs, files in os.walk(source_dir):
|
|
for fname in files:
|
|
src = Path(root) / fname
|
|
rel = src.relative_to(source_dir)
|
|
dst = home / rel
|
|
yield src, dst
|
|
|
|
|
|
def run_init(ctx: FlowContext, args):
|
|
repo_url = args.repo or ctx.config.dotfiles_url
|
|
if not repo_url:
|
|
ctx.console.error("No dotfiles repository URL. Set it in config or pass --repo.")
|
|
sys.exit(1)
|
|
|
|
if DOTFILES_DIR.exists():
|
|
ctx.console.warn(f"Dotfiles directory already exists: {DOTFILES_DIR}")
|
|
return
|
|
|
|
DOTFILES_DIR.parent.mkdir(parents=True, exist_ok=True)
|
|
branch = ctx.config.dotfiles_branch
|
|
cmd = ["git", "clone", "-b", branch, repo_url, str(DOTFILES_DIR)]
|
|
ctx.console.info(f"Cloning {repo_url} (branch: {branch})...")
|
|
subprocess.run(cmd, check=True)
|
|
ctx.console.success(f"Dotfiles cloned to {DOTFILES_DIR}")
|
|
|
|
|
|
def run_link(ctx: FlowContext, args):
|
|
if not DOTFILES_DIR.exists():
|
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
|
sys.exit(1)
|
|
|
|
home = Path.home()
|
|
packages = _discover_packages(DOTFILES_DIR, args.profile)
|
|
|
|
# Filter to requested packages
|
|
if args.packages:
|
|
packages = {k: v for k, v in packages.items() if k in args.packages}
|
|
missing = set(args.packages) - set(packages.keys())
|
|
if missing:
|
|
ctx.console.warn(f"Packages not found: {', '.join(missing)}")
|
|
if not packages:
|
|
ctx.console.error("No valid packages selected")
|
|
sys.exit(1)
|
|
|
|
# Build current link tree from state
|
|
state = _load_state()
|
|
try:
|
|
tree = LinkTree.from_state(state)
|
|
except RuntimeError as e:
|
|
ctx.console.error(str(e))
|
|
sys.exit(1)
|
|
folder = TreeFolder(tree)
|
|
|
|
all_operations = []
|
|
copied_count = 0
|
|
|
|
for pkg_name, source_dir in packages.items():
|
|
ctx.console.info(f"[{pkg_name}]")
|
|
for src, dst in _walk_package(source_dir, home):
|
|
if args.copy:
|
|
if dst.exists() or dst.is_symlink():
|
|
if not args.force:
|
|
ctx.console.warn(f" Skipped (exists): {dst}")
|
|
continue
|
|
if dst.is_dir() and not dst.is_symlink():
|
|
ctx.console.error(f"Cannot overwrite directory with --copy: {dst}")
|
|
continue
|
|
if not args.dry_run:
|
|
dst.unlink()
|
|
|
|
if args.dry_run:
|
|
print(f" COPY: {src} -> {dst}")
|
|
else:
|
|
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
shutil.copy2(src, dst)
|
|
print(f" Copied: {src} -> {dst}")
|
|
copied_count += 1
|
|
continue
|
|
|
|
ops = folder.plan_link(src, dst, pkg_name)
|
|
all_operations.extend(ops)
|
|
|
|
if args.copy:
|
|
if args.dry_run:
|
|
return
|
|
ctx.console.success(f"Copied {copied_count} item(s)")
|
|
return
|
|
|
|
# Conflict detection (two-phase)
|
|
conflicts = folder.detect_conflicts(all_operations)
|
|
if conflicts and not args.force:
|
|
for conflict in conflicts:
|
|
ctx.console.error(conflict)
|
|
ctx.console.error("\nUse --force to overwrite existing files")
|
|
sys.exit(1)
|
|
|
|
# Handle force mode: remove conflicting targets
|
|
if args.force and not args.dry_run:
|
|
for op in all_operations:
|
|
if op.type != "create_symlink":
|
|
continue
|
|
if not (op.target.exists() or op.target.is_symlink()):
|
|
continue
|
|
if op.target in tree.links:
|
|
continue
|
|
if op.target.is_dir() and not op.target.is_symlink():
|
|
ctx.console.error(f"Cannot overwrite directory with --force: {op.target}")
|
|
sys.exit(1)
|
|
op.target.unlink()
|
|
|
|
# Execute operations
|
|
if args.dry_run:
|
|
ctx.console.info("\nPlanned operations:")
|
|
for op in all_operations:
|
|
print(str(op))
|
|
else:
|
|
folder.execute_operations(all_operations, dry_run=False)
|
|
state = folder.to_state()
|
|
_save_state(state)
|
|
ctx.console.success(f"Linked {len(all_operations)} item(s)")
|
|
|
|
|
|
def run_unlink(ctx: FlowContext, args):
|
|
state = _load_state()
|
|
links_by_package = state.get("links", {})
|
|
if not links_by_package:
|
|
ctx.console.info("No linked dotfiles found.")
|
|
return
|
|
|
|
packages_to_unlink = args.packages if args.packages else list(links_by_package.keys())
|
|
removed = 0
|
|
|
|
for pkg_name in packages_to_unlink:
|
|
links = links_by_package.get(pkg_name, {})
|
|
if not links:
|
|
continue
|
|
|
|
ctx.console.info(f"[{pkg_name}]")
|
|
for dst_str in list(links.keys()):
|
|
dst = Path(dst_str)
|
|
if dst.is_symlink():
|
|
dst.unlink()
|
|
print(f" Removed: {dst}")
|
|
removed += 1
|
|
elif dst.exists():
|
|
ctx.console.warn(f" Not a symlink, skipping: {dst}")
|
|
else:
|
|
print(f" Already gone: {dst}")
|
|
|
|
links_by_package.pop(pkg_name, None)
|
|
|
|
_save_state(state)
|
|
ctx.console.success(f"Removed {removed} symlink(s)")
|
|
|
|
|
|
def run_status(ctx: FlowContext, args):
|
|
state = _load_state()
|
|
links_by_package = state.get("links", {})
|
|
if not links_by_package:
|
|
ctx.console.info("No linked dotfiles.")
|
|
return
|
|
|
|
for pkg_name, links in links_by_package.items():
|
|
ctx.console.info(f"[{pkg_name}]")
|
|
for dst_str, link_info in links.items():
|
|
dst = Path(dst_str)
|
|
|
|
if not isinstance(link_info, dict) or "source" not in link_info:
|
|
ctx.console.error(
|
|
"Unsupported linked state format. Remove linked.json and relink dotfiles."
|
|
)
|
|
sys.exit(1)
|
|
|
|
src_str = link_info["source"]
|
|
is_dir_link = bool(link_info.get("is_directory_link", False))
|
|
|
|
link_type = "FOLDED" if is_dir_link else "OK"
|
|
|
|
if dst.is_symlink():
|
|
target = os.readlink(dst)
|
|
if target == src_str or str(dst.resolve()) == str(Path(src_str).resolve()):
|
|
print(f" {link_type}: {dst} -> {src_str}")
|
|
else:
|
|
print(f" CHANGED: {dst} -> {target} (expected {src_str})")
|
|
elif dst.exists():
|
|
print(f" NOT SYMLINK: {dst}")
|
|
else:
|
|
print(f" BROKEN: {dst} (missing)")
|
|
|
|
|
|
def run_sync(ctx: FlowContext, args):
|
|
if not DOTFILES_DIR.exists():
|
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
|
sys.exit(1)
|
|
|
|
ctx.console.info("Pulling latest dotfiles...")
|
|
result = subprocess.run(
|
|
["git", "-C", str(DOTFILES_DIR), "pull", "--rebase"],
|
|
capture_output=True, text=True,
|
|
)
|
|
if result.returncode == 0:
|
|
if result.stdout.strip():
|
|
print(result.stdout.strip())
|
|
ctx.console.success("Dotfiles synced.")
|
|
else:
|
|
ctx.console.error(f"Git pull failed: {result.stderr.strip()}")
|
|
sys.exit(1)
|
|
|
|
|
|
def run_relink(ctx: FlowContext, args):
|
|
"""Refresh symlinks after changes (unlink + link)."""
|
|
if not DOTFILES_DIR.exists():
|
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
|
sys.exit(1)
|
|
|
|
# First unlink
|
|
ctx.console.info("Unlinking current symlinks...")
|
|
run_unlink(ctx, args)
|
|
|
|
# Then link again
|
|
ctx.console.info("Relinking with updated configuration...")
|
|
run_link(ctx, args)
|
|
|
|
|
|
def run_clean(ctx: FlowContext, args):
|
|
"""Remove broken symlinks."""
|
|
state = _load_state()
|
|
if not state.get("links"):
|
|
ctx.console.info("No linked dotfiles found.")
|
|
return
|
|
|
|
removed = 0
|
|
for pkg_name, links in state["links"].items():
|
|
for dst_str in list(links.keys()):
|
|
dst = Path(dst_str)
|
|
|
|
# Check if symlink is broken
|
|
if dst.is_symlink() and not dst.exists():
|
|
if args.dry_run:
|
|
print(f"Would remove broken symlink: {dst}")
|
|
else:
|
|
dst.unlink()
|
|
print(f"Removed broken symlink: {dst}")
|
|
del links[dst_str]
|
|
removed += 1
|
|
|
|
if not args.dry_run:
|
|
_save_state(state)
|
|
|
|
if removed > 0:
|
|
ctx.console.success(f"Cleaned {removed} broken symlink(s)")
|
|
else:
|
|
ctx.console.info("No broken symlinks found")
|
|
|
|
|
|
def run_edit(ctx: FlowContext, args):
|
|
"""Edit package config with auto-commit workflow."""
|
|
if not DOTFILES_DIR.exists():
|
|
ctx.console.error(f"Dotfiles not found at {DOTFILES_DIR}. Run 'flow dotfiles init' first.")
|
|
sys.exit(1)
|
|
|
|
package_name = args.package
|
|
|
|
# Find package directory
|
|
common_dir = DOTFILES_DIR / "common" / package_name
|
|
profile_dirs = list((DOTFILES_DIR / "profiles").glob(f"*/{package_name}"))
|
|
|
|
package_dir = None
|
|
if common_dir.exists():
|
|
package_dir = common_dir
|
|
elif profile_dirs:
|
|
package_dir = profile_dirs[0]
|
|
else:
|
|
ctx.console.error(f"Package not found: {package_name}")
|
|
sys.exit(1)
|
|
|
|
# Git pull before editing
|
|
ctx.console.info("Pulling latest changes...")
|
|
result = subprocess.run(
|
|
["git", "-C", str(DOTFILES_DIR), "pull", "--rebase"],
|
|
capture_output=True, text=True,
|
|
)
|
|
if result.returncode != 0:
|
|
ctx.console.warn(f"Git pull failed: {result.stderr.strip()}")
|
|
|
|
# Open editor
|
|
editor = os.environ.get("EDITOR", "vim")
|
|
ctx.console.info(f"Opening {package_dir} in {editor}...")
|
|
edit_result = subprocess.run(shlex.split(editor) + [str(package_dir)])
|
|
if edit_result.returncode != 0:
|
|
ctx.console.warn(f"Editor exited with status {edit_result.returncode}")
|
|
|
|
# Check for changes
|
|
result = subprocess.run(
|
|
["git", "-C", str(DOTFILES_DIR), "status", "--porcelain"],
|
|
capture_output=True, text=True,
|
|
)
|
|
|
|
if result.stdout.strip() and not args.no_commit:
|
|
# Auto-commit changes
|
|
ctx.console.info("Changes detected, committing...")
|
|
subprocess.run(["git", "-C", str(DOTFILES_DIR), "add", "."], check=True)
|
|
subprocess.run(
|
|
["git", "-C", str(DOTFILES_DIR), "commit", "-m", f"Update {package_name} config"],
|
|
check=True,
|
|
)
|
|
|
|
# Ask before pushing
|
|
response = input("Push changes to remote? [Y/n] ")
|
|
if response.lower() != "n":
|
|
subprocess.run(["git", "-C", str(DOTFILES_DIR), "push"], check=True)
|
|
ctx.console.success("Changes committed and pushed")
|
|
else:
|
|
ctx.console.info("Changes committed locally (not pushed)")
|
|
elif result.stdout.strip() and args.no_commit:
|
|
ctx.console.info("Changes detected; skipped commit (--no-commit)")
|
|
else:
|
|
ctx.console.info("No changes to commit")
|