Files
Billy D. 202b4e1d61 feat: scaffold avatar pipeline with ComfyUI driver, MLflow logging, and rclone promotion
- setup.sh: automated desktop env setup (ComfyUI, 3D-Pack, UniRig, Blender, Ray)
- ray-join.sh: join Ray cluster as external worker with 3d_gen resource label
- vrm_export.py: headless Blender GLB→VRM conversion script
- generate.py: ComfyUI API driver (submit workflow JSON, poll, download outputs)
- log_mlflow.py: REST-only MLflow experiment tracking (no SDK dependency)
- promote.py: rclone promotion of VRM files to gravenhollow S3
- CLI entry points: avatar-generate, avatar-promote
- workflows/ placeholder for ComfyUI exported workflow JSONs

Implements ADR-0063 (ComfyUI + TRELLIS + UniRig 3D avatar pipeline)
2026-02-24 05:44:04 -05:00

132 lines
3.6 KiB
Python

"""Promote VRM files to gravenhollow via rclone.
Usage:
avatar-promote exports/Silver-Mage.vrm
avatar-promote exports/Silver-Mage.vrm --bucket companion-avatars
avatar-promote --dry-run exports/*.vrm
"""
from __future__ import annotations
import argparse
import logging
import shutil
import subprocess
import sys
from pathlib import Path
logger = logging.getLogger(__name__)
DEFAULT_REMOTE = "gravenhollow"
DEFAULT_BUCKET = "avatar-models"
def check_rclone() -> bool:
"""Verify rclone is installed and the remote is configured."""
if not shutil.which("rclone"):
logger.error("rclone not found. Install: sudo pacman -S rclone")
return False
result = subprocess.run(
["rclone", "listremotes"],
capture_output=True,
text=True,
check=False,
)
remotes = result.stdout.strip().split("\n")
if f"{DEFAULT_REMOTE}:" not in remotes:
logger.error(
"rclone remote '%s' not configured. Run scripts/setup.sh or configure manually.",
DEFAULT_REMOTE,
)
return False
return True
def promote(
files: list[Path],
remote: str = DEFAULT_REMOTE,
bucket: str = DEFAULT_BUCKET,
dry_run: bool = False,
) -> list[str]:
"""Copy VRM files to gravenhollow S3 via rclone.
Returns list of promoted remote paths.
"""
if not check_rclone():
raise RuntimeError("rclone not available")
promoted = []
for file_path in files:
if not file_path.exists():
logger.warning("File not found, skipping: %s", file_path)
continue
if file_path.suffix.lower() not in (".vrm", ".glb", ".fbx"):
logger.warning("Unexpected file type, skipping: %s", file_path)
continue
dest = f"{remote}:{bucket}/{file_path.name}"
cmd = ["rclone", "copy", str(file_path), f"{remote}:{bucket}/"]
if dry_run:
cmd.append("--dry-run")
logger.info("Promoting: %s%s", file_path, dest)
result = subprocess.run(
cmd,
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
logger.error("rclone failed for %s: %s", file_path, result.stderr)
continue
if dry_run:
logger.info(" (dry-run) Would copy %s", file_path.name)
else:
logger.info(" Promoted: %s", dest)
promoted.append(dest)
return promoted
def main() -> None:
"""CLI entry point."""
parser = argparse.ArgumentParser(description="Promote VRM files to gravenhollow storage")
parser.add_argument("files", nargs="+", type=Path, help="VRM/GLB files to promote")
parser.add_argument("--remote", default=DEFAULT_REMOTE, help="rclone remote name")
parser.add_argument("--bucket", default=DEFAULT_BUCKET, help="S3 bucket name")
parser.add_argument("--dry-run", action="store_true", help="Show what would be copied")
parser.add_argument("--verbose", "-v", action="store_true")
args = parser.parse_args()
logging.basicConfig(
level=logging.DEBUG if args.verbose else logging.INFO,
format="%(asctime)s %(levelname)s %(name)s: %(message)s",
)
try:
promoted = promote(
files=args.files,
remote=args.remote,
bucket=args.bucket,
dry_run=args.dry_run,
)
print(f"\nPromoted {len(promoted)} file(s)")
for p in promoted:
print(f" {p}")
except Exception:
logger.exception("Promotion failed")
sys.exit(1)
if __name__ == "__main__":
main()