#!/usr/bin/env python3
"""
auto_merge.py - 자동 머지 처리 스크립트

.done 파일을 스캔하여 merge_needed=True인 작업을 자동으로 머지하고
테스트를 실행한 후 결과를 로깅합니다.

사용법:
    python3 scripts/auto_merge.py                        # 전체 자동 실행
    python3 scripts/auto_merge.py --task-id task-391.1   # 특정 task만
    python3 scripts/auto_merge.py --dry-run              # 드라이런
    python3 scripts/auto_merge.py --force-merge task-391.1  # 강제 머지
"""

import argparse
import json
import logging
import os
import re
import shutil
import subprocess
import sys
import time
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Any

# report_parser.py import (workspace 루트에 위치)
_ws_root = str(Path(__file__).parent.parent)
if _ws_root not in sys.path:
    sys.path.insert(0, _ws_root)
from report_parser import parse_report  # pyright: ignore[reportMissingImports]

try:
    from config.loader import ConfigManager as _CfgMgr  # pyright: ignore[reportMissingImports]
except ImportError:
    # fallback: WORKSPACE_ROOT 환경변수 기반
    _env_root = os.environ.get("WORKSPACE_ROOT", _ws_root)
    if _env_root not in sys.path:
        sys.path.insert(0, _env_root)
    from config.loader import ConfigManager as _CfgMgr  # pyright: ignore[reportMissingImports]

# ---------------------------------------------------------------------------
# 로깅 설정
# ---------------------------------------------------------------------------

LOGS_DIR = Path(os.environ.get("WORKSPACE_ROOT", str(Path(__file__).resolve().parent.parent))) / "logs"
LOGS_DIR.mkdir(parents=True, exist_ok=True)

_KST = timezone(timedelta(hours=9))


def _setup_logger(name: str = "auto_merge") -> logging.Logger:
    """일별 로그 파일 + 콘솔에 출력하는 로거 생성."""
    logger = logging.getLogger(name)
    if logger.handlers:
        return logger

    logger.setLevel(logging.DEBUG)

    today = datetime.now(_KST).strftime("%Y-%m-%d")
    log_file = LOGS_DIR / f"auto_merge_{today}.log"

    fh = logging.FileHandler(log_file, encoding="utf-8")
    fh.setLevel(logging.DEBUG)

    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.INFO)

    formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s", datefmt="%Y-%m-%dT%H:%M:%S")
    fh.setFormatter(formatter)
    ch.setFormatter(formatter)

    logger.addHandler(fh)
    logger.addHandler(ch)
    return logger


logger = _setup_logger()

# ---------------------------------------------------------------------------
# AutoMerger 클래스
# ---------------------------------------------------------------------------


class AutoMerger:
    """자동 머지 처리기."""

    def __init__(
        self,
        workspace_path: str = os.environ.get("WORKSPACE_ROOT", str(Path(__file__).resolve().parent.parent)),
        dry_run: bool = False,
        force_task_id: str | None = None,
        target_task_id: str | None = None,
    ) -> None:
        self.workspace = Path(workspace_path)
        self.events_dir = self.workspace / "memory" / "events"
        self.reports_dir = self.workspace / "memory" / "reports"
        self.merge_log = self.workspace / "memory" / "merge-log.json"
        self.dry_run = dry_run
        self.force_task_id = force_task_id
        self.target_task_id = target_task_id

    # -----------------------------------------------------------------------
    # scan_done_files
    # -----------------------------------------------------------------------

    def scan_done_files(self) -> list[Path]:
        """미처리 .done 파일 스캔: .done.clear가 없는 것만 반환한다."""
        if not self.events_dir.exists():
            logger.debug(f"events_dir 없음: {self.events_dir}")
            return []

        done_files: list[Path] = []
        for done_file in sorted(self.events_dir.glob("*.done")):
            clear_file = done_file.with_suffix(".done.clear")
            merging_file = done_file.with_suffix(".done.merging")
            # .done.clear(아누 보고 완료) 또는 .done.merging(머지 진행 중)이 있으면 스킵
            if not clear_file.exists() and not merging_file.exists():
                done_files.append(done_file)

        logger.info(f"미처리 .done 파일 {len(done_files)}개 발견")
        return done_files

    # -----------------------------------------------------------------------
    # try_claim
    # -----------------------------------------------------------------------

    def try_claim(self, done_file: Path) -> bool:
        """원자적 .done.merging 생성으로 처리 선점.

        Returns:
            True: 선점 성공
            False: 이미 선점됨 (FileExistsError)
        """
        merging_file = done_file.with_suffix(".done.merging")
        try:
            with open(merging_file, "x", encoding="utf-8") as f:
                f.write(
                    json.dumps(
                        {
                            "claimed_at": datetime.now(_KST).isoformat(),
                            "claimed_by": "auto_merge.py",
                        }
                    )
                )
            logger.debug(f"선점 성공: {merging_file.name}")
            return True
        except FileExistsError:
            logger.debug(f"이미 선점됨: {merging_file.name}")
            return False

    # -----------------------------------------------------------------------
    # parse_done
    # -----------------------------------------------------------------------

    def parse_done(self, done_file: Path) -> dict[str, Any]:
        """JSON 파싱 → task_id, team_id 등 추출.

        비정상 JSON → escalate 후 ValueError 발생.
        """
        try:
            raw = json.loads(done_file.read_text(encoding="utf-8"))
        except json.JSONDecodeError as exc:
            reason = f"JSON 파싱 실패: {done_file.name} - {exc}"
            logger.error(reason)
            # task_id를 파일명에서 추출 시도
            task_id = done_file.stem  # e.g. "task-bad"
            self.escalate(task_id, reason)
            raise ValueError(f"JSON 파싱 실패: {done_file.name}") from exc

        # task_id 보정: 없으면 파일명에서 추출
        if not raw.get("task_id"):
            raw["task_id"] = done_file.stem

        # merge_needed 기본값
        raw.setdefault("merge_needed", False)

        return raw

    # -----------------------------------------------------------------------
    # analyze_report
    # -----------------------------------------------------------------------

    def analyze_report(self, task_id: str) -> dict[str, Any]:
        """report_parser.py의 parse_report()로 보고서 분석.

        Returns:
            merge_needed, merge_branch, merge_worktree 포함 dict
        """
        # 보고서 파일 탐색
        report_file = self.reports_dir / f"{task_id}.md"
        if not report_file.exists():
            # reports_dir 하위에서 task_id가 포함된 파일 탐색
            candidates = list(self.reports_dir.glob(f"{task_id}*.md"))
            if candidates:
                report_file = candidates[0]
            else:
                logger.warning(f"보고서 파일 없음: {task_id}")
                return {
                    "task_id": task_id,
                    "merge_needed": False,
                    "merge_branch": None,
                    "merge_worktree": None,
                    "files": [],
                }

        logger.debug(f"보고서 분석: {report_file}")
        result = parse_report(str(report_file))
        return result

    # -----------------------------------------------------------------------
    # resolve_project_path
    # -----------------------------------------------------------------------

    def resolve_project_path(self, task_id: str, report_data: dict[str, Any]) -> str | None:
        """프로젝트 경로 해결 순서:
        1. merge_worktree에서 .worktrees 이전 경로 역추적
        2. files 목록에서 /home/jay/projects/... 패턴 추출
        3. task 메타 파일에서 추출 (memory/tasks/{task_id}.md)
        4. 못 찾으면 None
        """
        # 1. merge_worktree에서 역추적
        worktree = report_data.get("merge_worktree")
        if worktree:
            m = re.match(r"(/home/jay/projects/[^/]+)", worktree)
            if m:
                project_path = m.group(1)
                logger.debug(f"worktree에서 프로젝트 경로 추출: {project_path}")
                return project_path

        # 2. files 목록에서 추출
        files = report_data.get("files", [])
        for f in files:
            m = re.match(r"(/home/jay/projects/[^/]+)", f)
            if m:
                project_path = m.group(1)
                logger.debug(f"files에서 프로젝트 경로 추출: {project_path}")
                return project_path

        # 3. task 메타 파일에서 추출
        task_meta = self.workspace / "memory" / "tasks" / f"{task_id}.md"
        if task_meta.exists():
            content = task_meta.read_text(encoding="utf-8")
            m = re.search(r"/home/jay/projects/([^/\s`'\"]+)", content)
            if m:
                project_path = f"/home/jay/projects/{m.group(1)}"
                logger.debug(f"task 메타에서 프로젝트 경로 추출: {project_path}")
                return project_path

        logger.warning(f"프로젝트 경로를 찾을 수 없음: {task_id}")
        return None

    # -----------------------------------------------------------------------
    # execute_merge
    # -----------------------------------------------------------------------

    def execute_merge(self, project_path: str, task_id: str, team_id: str) -> dict[str, Any]:
        """worktree_manager.py finish --action merge 실행.

        Raises:
            RuntimeError: 머지 실패 (충돌 등)
        """
        worktree_manager = Path(__file__).parent / "worktree_manager.py"
        cmd = [
            sys.executable,
            str(worktree_manager),
            "finish",
            project_path,
            task_id,
            team_id,
            "--action",
            "merge",
        ]

        logger.info(f"머지 실행: {task_id} ({team_id}) @ {project_path}")
        result = subprocess.run(
            cmd,
            capture_output=True,
            text=True,
            timeout=120,
        )

        output = result.stdout.strip()
        try:
            parsed = json.loads(output) if output else {}
        except json.JSONDecodeError:
            parsed = {"raw": output}

        if result.returncode != 0:
            error_msg = parsed.get("message", result.stderr.strip() or output)
            raise RuntimeError(f"머지 실패 [{task_id}]: {error_msg}")

        logger.info(f"머지 성공: {task_id} → {parsed.get('branch', '?')}")
        return parsed

    # -----------------------------------------------------------------------
    # run_tests
    # -----------------------------------------------------------------------

    def run_tests(self, project_path: str) -> dict[str, Any]:
        """프로젝트 테스트 실행.

        - pytest 있으면 pytest
        - package.json의 test 스크립트 있으면 npm test
        - 타임아웃 300초

        Returns:
            {"passed": bool, "output": str, "duration": float}
        """
        start_time = time.monotonic()
        project = Path(project_path)

        # pytest 우선
        if shutil.which("pytest"):
            cmd = ["pytest", "--tb=short", "-q"]
            runner = "pytest"
        elif (project / "package.json").exists():
            try:
                pkg = json.loads((project / "package.json").read_text(encoding="utf-8"))
                if "test" in pkg.get("scripts", {}):
                    cmd = ["npm", "test", "--", "--ci"]
                    runner = "npm test"
                else:
                    return {"passed": True, "output": "테스트 스크립트 없음 (건너뜀)", "duration": 0.0}
            except (json.JSONDecodeError, OSError):
                return {"passed": True, "output": "package.json 파싱 실패 (건너뜀)", "duration": 0.0}
        else:
            return {"passed": True, "output": "테스트 러너 없음 (건너뜀)", "duration": 0.0}

        try:
            result = subprocess.run(
                cmd,
                cwd=project_path,
                capture_output=True,
                text=True,
                timeout=300,
            )
            duration = time.monotonic() - start_time
            output = result.stdout + result.stderr
            passed = result.returncode == 0
            logger.info(f"테스트 결과 [{runner}]: {'PASS' if passed else 'FAIL'} ({duration:.1f}s)")
            return {"passed": passed, "output": output, "duration": duration}

        except subprocess.TimeoutExpired:
            duration = time.monotonic() - start_time
            msg = f"테스트 timeout (300초 초과): {project_path}"
            logger.error(msg)
            return {"passed": False, "output": msg, "duration": duration}

    # -----------------------------------------------------------------------
    # revert_merge
    # -----------------------------------------------------------------------

    def revert_merge(self, project_path: str) -> bool:
        """머지 실패 시 되돌리기: git reset --hard HEAD~1.

        Returns:
            True: revert 성공
            False: revert 실패
        """
        logger.warning(f"머지 revert 시도: {project_path}")
        try:
            result = subprocess.run(
                ["git", "reset", "--hard", "HEAD~1"],
                cwd=project_path,
                capture_output=True,
                text=True,
                timeout=30,
            )
            if result.returncode == 0:
                logger.info(f"머지 revert 성공: {project_path}")
                return True
            else:
                logger.error(f"머지 revert 실패: {result.stderr.strip()}")
                return False
        except (subprocess.TimeoutExpired, OSError) as exc:
            logger.error(f"머지 revert 예외: {exc}")
            return False

    # -----------------------------------------------------------------------
    # escalate
    # -----------------------------------------------------------------------

    def escalate(self, task_id: str, reason: str) -> None:
        """아누에게 에스컬레이션 통보.

        .env.keys에서 CHAT_ID, KEY 로드 후 cokacdir --cron으로 1분 후 통보.
        """
        logger.warning(f"에스컬레이션: {task_id} - {reason}")

        env_keys = self._load_env_keys()
        chat_id = env_keys.get("COKACDIR_CHAT_ID") or _CfgMgr.get_instance().get_constant("chat_id")
        anu_key = env_keys.get("COKACDIR_KEY_ANU")

        if not anu_key:
            logger.error("COKACDIR_KEY_ANU 미설정 - 에스컬레이션 건너뜀")
            return

        prompt = (
            f"[auto_merge] {task_id} 처리 중 문제 발생 - 수동 확인 필요.\n"
            f"사유: {reason}\n"
            f"시각: {datetime.now(_KST).strftime('%Y-%m-%dT%H:%M:%S+09:00')}"
        )

        # 1분 후 절대시각 계산
        at_time = (datetime.now(_KST) + timedelta(minutes=1)).strftime("%Y-%m-%dT%H:%M:%S")

        cmd = [
            "cokacdir",
            "--cron",
            prompt,
            "--at",
            at_time,
            "--chat",
            chat_id,
            "--key",
            anu_key,
            "--once",
        ]

        try:
            result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
            if result.returncode == 0:
                logger.info(f"에스컬레이션 등록 완료: {task_id}")
            else:
                logger.warning(f"cokacdir 등록 실패: {result.stderr.strip()}")
        except (subprocess.TimeoutExpired, OSError) as exc:
            logger.error(f"cokacdir 실행 예외: {exc}")

    # -----------------------------------------------------------------------
    # notify_anu
    # -----------------------------------------------------------------------

    def notify_anu(self, task_id: str) -> None:
        """머지 성공 후 아누에게 완료 통보를 스케줄링한다."""
        env_keys = self._load_env_keys()
        chat_id = env_keys.get("COKACDIR_CHAT_ID") or _CfgMgr.get_instance().get_constant("chat_id")
        anu_key = env_keys.get("COKACDIR_KEY_ANU")

        if not anu_key:
            logger.error("COKACDIR_KEY_ANU 미설정 - 아누 통보 건너뜀")
            return

        instructions_path = f"{self.workspace}/scripts/completion-handler-instructions.md"
        prompt = f"{task_id} 완료 처리. {instructions_path} 읽고 task_id={task_id}로 실행하라."

        cmd = [
            "cokacdir",
            "--cron",
            prompt,
            "--at",
            "1m",
            "--chat",
            chat_id,
            "--key",
            anu_key,
            "--once",
        ]

        try:
            result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
            if result.returncode == 0:
                logger.info(f"아누 통보 등록 완료: {task_id}")
            else:
                logger.warning(f"아누 통보 등록 실패: {result.stderr.strip()}")
        except (subprocess.TimeoutExpired, OSError) as exc:
            logger.error(f"아누 통보 예외: {exc}")

    # -----------------------------------------------------------------------
    # log_result
    # -----------------------------------------------------------------------

    def log_result(self, task_id: str, result: dict[str, Any]) -> None:
        """merge-log.json에 결과 추가 (기존 entries 유지)."""
        # 타임스탬프 자동 추가
        if "timestamp" not in result:
            result["timestamp"] = datetime.now(_KST).isoformat()
        if "task_id" not in result:
            result["task_id"] = task_id

        # 기존 로그 로드
        if self.merge_log.exists():
            try:
                log_data: dict[str, Any] = json.loads(self.merge_log.read_text(encoding="utf-8"))
            except json.JSONDecodeError:
                log_data = {"entries": []}
        else:
            log_data = {"entries": []}

        log_data["entries"].append(result)

        self.merge_log.parent.mkdir(parents=True, exist_ok=True)
        self.merge_log.write_text(json.dumps(log_data, ensure_ascii=False, indent=2), encoding="utf-8")
        logger.debug(f"merge-log.json 기록: {task_id}")

    # -----------------------------------------------------------------------
    # _load_env_keys
    # -----------------------------------------------------------------------

    @staticmethod
    def _team_id_to_short(team_id: str) -> str:
        """team_id → team_short 변환. 'dev1-team' → 'dev1', 'dev2-team' → 'dev2'.

        이미 short 형식이면 그대로 반환.
        """
        if team_id.endswith("-team"):
            return team_id[:-5]  # Remove '-team' suffix
        return team_id

    @staticmethod
    def _extract_team_short_from_report(report_data: dict[str, Any]) -> str | None:
        """보고서 데이터에서 team_short를 역추출한다.

        merge_branch: 'task/task-391.1-dev1' → 'dev1'
        merge_worktree: '/home/jay/projects/App/.worktrees/task-391.1-dev1' → 'dev1'
        """
        # merge_branch에서 추출: task/task-XXX-dev1 → dev1
        branch = report_data.get("merge_branch")
        if branch:
            m = re.search(r"-(dev\d+)$", branch)
            if m:
                return m.group(1)

        # merge_worktree에서 추출: .worktrees/task-XXX-dev1 → dev1
        worktree = report_data.get("merge_worktree")
        if worktree:
            m = re.search(r"-(dev\d+)$", worktree.rstrip("/"))
            if m:
                return m.group(1)

        return None

    def _load_env_keys(self) -> dict[str, str]:
        """.env.keys 파싱 (export KEY=VALUE 형식)."""
        env_file = self.workspace / ".env.keys"
        result: dict[str, str] = {}

        if not env_file.exists():
            logger.debug(f".env.keys 없음: {env_file}")
            return result

        for line in env_file.read_text(encoding="utf-8").splitlines():
            line = line.strip()
            if not line or line.startswith("#"):
                continue
            if line.startswith("export "):
                line = line[len("export ") :]
            if "=" in line:
                key, _, value = line.partition("=")
                result[key.strip()] = value.strip()

        return result

    # -----------------------------------------------------------------------
    # run
    # -----------------------------------------------------------------------

    def run(self) -> dict[str, Any]:
        """메인 실행 루프.

        Returns:
            {"processed": N, "merged": N, "escalated": N, "skipped": N}
        """
        stats: dict[str, int] = {"processed": 0, "merged": 0, "escalated": 0, "skipped": 0}

        # 특정 task_id만 처리하는 경우
        if self.target_task_id:
            done_file = self.events_dir / f"{self.target_task_id}.done"
            if not done_file.exists():
                logger.warning(f"대상 .done 파일 없음: {done_file}")
                return stats
            done_files = [done_file]
        else:
            done_files = self.scan_done_files()

        for done_file in done_files:
            task_id = done_file.stem
            start_time = time.monotonic()

            # parse_done (claim 없이 먼저 파싱)
            try:
                done_data = self.parse_done(done_file)
            except ValueError:
                stats["escalated"] += 1
                continue

            actual_task_id = done_data.get("task_id", task_id)
            team_id = done_data.get("team_id", "dev1")

            # analyze_report
            try:
                report_data = self.analyze_report(actual_task_id)
            except Exception as exc:
                reason = f"보고서 분석 실패: {exc}"
                logger.error(reason)
                if not self.dry_run:
                    self.escalate(actual_task_id, reason)
                stats["escalated"] += 1
                self._finalize_done_file(done_file, actual_task_id, "report_error", reason)
                continue

            merge_needed: bool = report_data.get("merge_needed") or done_data.get("merge_needed", False)

            if not merge_needed:
                logger.info(f"머지 불필요, notify-completion에 위임: {actual_task_id}")
                stats["skipped"] += 1
                continue

            # merge_needed=True인 경우에만 선점 시도
            if self.force_task_id != task_id:
                if not self.try_claim(done_file):
                    logger.info(f"이미 처리 중: {task_id}")
                    stats["skipped"] += 1
                    continue
            else:
                # 강제 머지: clear 파일 재생성
                clear_file = done_file.with_suffix(".done.clear")
                if not clear_file.exists():
                    self.try_claim(done_file)

            stats["processed"] += 1

            # team_short 결정: .done의 team_id 변환 → 보고서에서 역추출 → fallback
            team_short = self._team_id_to_short(team_id)
            # 보고서에서도 추출 시도 (더 신뢰도 높음)
            report_team_short = self._extract_team_short_from_report(report_data)
            if report_team_short:
                team_short = report_team_short

            # 드라이런 모드
            if self.dry_run:
                logger.info(f"[DRY-RUN] 머지 대상: {actual_task_id} (team={team_short})")
                self.log_result(
                    actual_task_id,
                    {
                        "task_id": actual_task_id,
                        "action": "dry_run",
                        "merge_needed": True,
                        "status": "dry_run",
                    },
                )
                continue

            # resolve_project_path
            project_path = self.resolve_project_path(actual_task_id, report_data)
            if not project_path:
                reason = f"프로젝트 경로를 찾을 수 없음: {actual_task_id}"
                logger.error(reason)
                self.escalate(actual_task_id, reason)
                stats["escalated"] += 1
                self._finalize_done_file(done_file, actual_task_id, "no_project_path", reason)
                continue

            # execute_merge
            try:
                merge_result = self.execute_merge(project_path, actual_task_id, team_short)
            except RuntimeError as exc:
                reason = str(exc)
                logger.error(f"머지 충돌/실패: {actual_task_id} - {reason}")
                self.escalate(actual_task_id, reason)
                stats["escalated"] += 1
                self.log_result(
                    actual_task_id,
                    {
                        "task_id": actual_task_id,
                        "action": "merge_failed",
                        "merge_needed": True,
                        "project": project_path,
                        "status": "error",
                        "error": reason,
                        "duration_seconds": time.monotonic() - start_time,
                    },
                )
                # .done 파일 삭제하지 않음 - 아누가 보고 후 .done.clear로 변환해야 함
                continue

            # run_tests
            test_result = self.run_tests(project_path)
            duration = time.monotonic() - start_time

            if not test_result["passed"]:
                # 테스트 실패 → revert + escalate
                reason = f"테스트 실패 후 머지 revert: {actual_task_id}\n{test_result['output'][:500]}"
                logger.error(reason)
                self.revert_merge(project_path)
                self.escalate(actual_task_id, reason)
                stats["escalated"] += 1
                self.log_result(
                    actual_task_id,
                    {
                        "task_id": actual_task_id,
                        "action": "reverted",
                        "merge_needed": True,
                        "project": project_path,
                        "branch": merge_result.get("branch"),
                        "test_result": "fail",
                        "test_output": test_result["output"][:1000],
                        "duration_seconds": duration,
                        "status": "reverted",
                    },
                )
                # .done 파일 삭제하지 않음 - 아누가 보고 후 .done.clear로 변환해야 함
                continue

            # 성공
            stats["merged"] += 1
            self.log_result(
                actual_task_id,
                {
                    "task_id": actual_task_id,
                    "action": "auto_merged",
                    "merge_needed": True,
                    "project": project_path,
                    "branch": merge_result.get("branch"),
                    "test_result": "pass",
                    "duration_seconds": duration,
                    "status": "success",
                },
            )
            logger.info(f"완료: {actual_task_id} 머지 성공 ({duration:.1f}s)")

            # 아누에게 완료 통보
            if not self.dry_run:
                self.notify_anu(actual_task_id)

            # .done 파일 삭제하지 않음 - 아누가 보고 후 .done.clear로 변환해야 함

        logger.info(f"처리 완료: {stats}")
        return stats

    def _finalize_done_file(self, done_file: Path, task_id: str, action: str, reason: str) -> None:
        """처리 완료 후 로그만 기록. .done 파일은 삭제하지 않음 (아누가 처리)."""
        self.log_result(
            task_id,
            {
                "task_id": task_id,
                "action": action,
                "status": "skipped",
                "reason": reason,
                "done_file": str(done_file),
            },
        )
        # .done 파일 삭제하지 않음 - 아누가 보고 후 .done.clear로 변환해야 함


# ---------------------------------------------------------------------------
# BatchWatchdog (L1)
# ---------------------------------------------------------------------------


class BatchWatchdog:
    """L1: .done 파일 수집 → batch_id 기준 전팀 완료 감지."""

    def __init__(self, workspace_path: str | None = None) -> None:
        self.workspace = Path(
            workspace_path or os.environ.get("WORKSPACE_ROOT", str(Path(__file__).resolve().parent.parent))
        )
        self.events_dir = self.workspace / "memory" / "events"
        self.timer_file = self.workspace / "memory" / "task-timers.json"

    # ------------------------------------------------------------------
    # scan_done_files
    # ------------------------------------------------------------------

    def scan_done_files(self) -> list[dict]:
        """memory/events/*.done 파일 수집 → JSON 파싱 → batch_id가 있는 것만 반환.

        .done.clear가 없는 것만 처리 (기존 AutoMerger.scan_done_files 패턴 참조).
        """
        if not self.events_dir.exists():
            logger.debug(f"[BatchWatchdog] events_dir 없음: {self.events_dir}")
            return []

        results: list[dict] = []
        for done_file in sorted(self.events_dir.glob("*.done")):
            clear_file = done_file.with_suffix(".done.clear")
            merging_file = done_file.with_suffix(".done.merging")
            if clear_file.exists() or merging_file.exists():
                continue

            try:
                raw = json.loads(done_file.read_text(encoding="utf-8"))
            except (json.JSONDecodeError, OSError):
                continue

            batch_id = raw.get("batch_id")
            if not batch_id:
                continue

            raw.setdefault("task_id", done_file.stem)
            results.append(raw)

        logger.info(f"[BatchWatchdog] batch_id 포함 .done 파일 {len(results)}개 발견")
        return results

    # ------------------------------------------------------------------
    # check_batch_completion
    # ------------------------------------------------------------------

    def check_batch_completion(self, batch_id: str) -> dict:
        """task-timers.json을 직접 읽어 batch_id 기준 완료 체크.

        Returns:
            {"complete": bool, "total": int, "done": int, "pending": list[str], "batch_id": str}
        """
        if not self.timer_file.exists():
            return {"complete": False, "total": 0, "done": 0, "pending": [], "batch_id": batch_id}

        try:
            data = json.loads(self.timer_file.read_text(encoding="utf-8"))
        except (json.JSONDecodeError, OSError) as exc:
            logger.warning(f"[BatchWatchdog] task-timers.json 읽기 실패: {exc}")
            return {"complete": False, "total": 0, "done": 0, "pending": [], "batch_id": batch_id}

        tasks = data.get("tasks", {})
        matched = {tid: info for tid, info in tasks.items() if info.get("batch_id") == batch_id}

        total = len(matched)
        if total == 0:
            return {"complete": False, "total": 0, "done": 0, "pending": [], "batch_id": batch_id}

        done_count = 0
        pending: list[str] = []
        for tid, info in matched.items():
            status = info.get("status", "")
            if status in ("done", "completed"):
                done_count += 1
            else:
                pending.append(tid)

        complete = done_count == total
        return {
            "complete": complete,
            "total": total,
            "done": done_count,
            "pending": pending,
            "batch_id": batch_id,
        }

    # ------------------------------------------------------------------
    # check_ttl
    # ------------------------------------------------------------------

    def check_ttl(self, batch_id: str, ttl_hours: float = 2.0) -> list[dict]:
        """TTL 2시간 초과 task 감지.

        task-timers.json에서 batch_id 일치하는 task 중
        start_time이 ttl_hours 이상 경과한 running task를 반환한다.

        Returns:
            [{"task_id": str, "elapsed_hours": float}]
        """
        if not self.timer_file.exists():
            return []

        try:
            data = json.loads(self.timer_file.read_text(encoding="utf-8"))
        except (json.JSONDecodeError, OSError) as exc:
            logger.warning(f"[BatchWatchdog] TTL 체크 중 timer_file 읽기 실패: {exc}")
            return []

        now = datetime.now(timezone.utc)
        tasks = data.get("tasks", {})
        stale: list[dict] = []

        for tid, info in tasks.items():
            if info.get("batch_id") != batch_id:
                continue
            status = info.get("status", "")
            if status in ("done", "completed"):
                continue

            start_raw = info.get("start_time")
            if not start_raw:
                continue

            try:
                start_dt = datetime.fromisoformat(start_raw)
                # naive datetime → UTC 가정
                if start_dt.tzinfo is None:
                    start_dt = start_dt.replace(tzinfo=timezone.utc)
                elapsed_hours = (now - start_dt).total_seconds() / 3600.0
            except (ValueError, TypeError):
                continue

            if elapsed_hours >= ttl_hours:
                stale.append({"task_id": tid, "elapsed_hours": round(elapsed_hours, 2)})

        return stale

    # ------------------------------------------------------------------
    # check_batch_ttl
    # ------------------------------------------------------------------

    def check_batch_ttl(self, batch_ttl_hours: float = 24.0) -> list[dict]:
        """batch 전체의 생성 시점 기반 자동 만료 감지.

        task-timers.json에서 batch_id 기준 모든 task를 조회하고,
        해당 batch의 가장 오래된 start_time을 batch 생성 시점으로 간주한다.
        batch_ttl_hours 경과 시 미완료 task를 "expired" 상태로 마킹한다.

        Returns:
            [{"batch_id": str, "expired_tasks": list[str], "elapsed_hours": float}]
        """
        if not self.timer_file.exists():
            return []

        try:
            data = json.loads(self.timer_file.read_text(encoding="utf-8"))
        except (json.JSONDecodeError, OSError) as exc:
            logger.warning(f"[BatchWatchdog] check_batch_ttl: timer_file 읽기 실패: {exc}")
            return []

        now = datetime.now(timezone.utc)
        tasks = data.get("tasks", {})

        # batch_id별로 task 그룹핑
        batch_map: dict[str, list[tuple[str, dict]]] = {}
        for tid, info in tasks.items():
            bid = info.get("batch_id")
            if bid:
                batch_map.setdefault(bid, []).append((tid, info))

        expired_results: list[dict] = []

        for batch_id, task_list in batch_map.items():
            # 가장 오래된 start_time을 batch 생성 시점으로 간주
            oldest_dt: datetime | None = None
            for _, info in task_list:
                start_raw = info.get("start_time")
                if not start_raw:
                    continue
                try:
                    start_dt = datetime.fromisoformat(start_raw)
                    if start_dt.tzinfo is None:
                        start_dt = start_dt.replace(tzinfo=timezone.utc)
                    if oldest_dt is None or start_dt < oldest_dt:
                        oldest_dt = start_dt
                except (ValueError, TypeError):
                    continue

            if oldest_dt is None:
                continue

            elapsed_hours = (now - oldest_dt).total_seconds() / 3600.0
            if elapsed_hours < batch_ttl_hours:
                continue

            # TTL 초과: 미완료 task를 "expired"로 마킹
            expired_tasks: list[str] = []
            changed = False
            for tid, info in task_list:
                status = info.get("status", "")
                if status not in ("done", "completed", "expired"):
                    tasks[tid]["status"] = "expired"
                    tasks[tid]["expired_at"] = datetime.now(_KST).isoformat()
                    expired_tasks.append(tid)
                    changed = True
                    logger.warning(
                        f"[BatchWatchdog] batch TTL 만료: batch_id={batch_id}, "
                        f"task_id={tid}, elapsed={elapsed_hours:.2f}h"
                    )

            if changed:
                # 원자적 쓰기: 수정된 data를 다시 저장
                try:
                    self.timer_file.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8")
                    logger.info(
                        f"[BatchWatchdog] task-timers.json 업데이트 완료: "
                        f"batch_id={batch_id}, expired={expired_tasks}"
                    )
                except OSError as exc:
                    logger.error(f"[BatchWatchdog] task-timers.json 쓰기 실패: {exc}")

            if expired_tasks:
                expired_results.append(
                    {
                        "batch_id": batch_id,
                        "expired_tasks": expired_tasks,
                        "elapsed_hours": round(elapsed_hours, 2),
                    }
                )

        return expired_results

    # ------------------------------------------------------------------
    # cleanup_expired_batches
    # ------------------------------------------------------------------

    def cleanup_expired_batches(self, batch_ttl_hours: float = 24.0) -> dict:
        """만료된 batch를 정리한다.

        1. check_batch_ttl()로 만료된 batch 탐지
        2. 만료된 batch의 .done 파일에 .done.expired 마커 생성
        3. send_ttl_warning()으로 아누에게 경고

        Returns:
            {"expired_count": int, "expired_batches": list[str]}
        """
        expired_list = self.check_batch_ttl(batch_ttl_hours=batch_ttl_hours)

        if not expired_list:
            logger.debug("[BatchWatchdog] 만료된 batch 없음")
            return {"expired_count": 0, "expired_batches": []}

        expired_batch_ids: list[str] = []

        for item in expired_list:
            batch_id: str = item["batch_id"]
            expired_tasks: list[str] = item["expired_tasks"]
            elapsed_hours: float = item["elapsed_hours"]

            logger.warning(
                f"[BatchWatchdog] batch 만료 정리: batch_id={batch_id}, "
                f"expired_tasks={expired_tasks}, elapsed={elapsed_hours:.2f}h"
            )

            # 만료 마커 파일 생성: .done → .done.expired
            if self.events_dir.exists():
                for tid in expired_tasks:
                    done_file = self.events_dir / f"{tid}.done"
                    if done_file.exists():
                        expired_marker = done_file.with_suffix(".done.expired")
                        if not expired_marker.exists():
                            try:
                                expired_marker.write_text(
                                    json.dumps(
                                        {
                                            "batch_id": batch_id,
                                            "task_id": tid,
                                            "elapsed_hours": elapsed_hours,
                                            "expired_at": datetime.now(_KST).isoformat(),
                                            "reason": f"batch TTL {batch_ttl_hours}h 초과",
                                        },
                                        ensure_ascii=False,
                                        indent=2,
                                    ),
                                    encoding="utf-8",
                                )
                                logger.info(f"[BatchWatchdog] 만료 마커 생성: {expired_marker.name}")
                            except OSError as exc:
                                logger.error(f"[BatchWatchdog] 만료 마커 생성 실패: {expired_marker.name} - {exc}")

            # send_ttl_warning 호출 (경고 메시지 포맷을 위해 stale_tasks 형식으로 변환)
            stale_tasks_for_warn = [{"task_id": tid, "elapsed_hours": elapsed_hours} for tid in expired_tasks]
            self.send_ttl_warning(batch_id, stale_tasks_for_warn)
            expired_batch_ids.append(batch_id)

        return {
            "expired_count": len(expired_batch_ids),
            "expired_batches": expired_batch_ids,
        }

    # ------------------------------------------------------------------
    # send_ttl_warning
    # ------------------------------------------------------------------

    def send_ttl_warning(self, batch_id: str, stale_tasks: list[dict]) -> None:
        """TTL 초과 시 Telegram 경고. AutoMerger의 env key 로드 패턴 사용."""
        # _load_env_keys는 인스턴스 메서드이므로 직접 파싱
        env_file = self.workspace / ".env.keys"
        env_data: dict[str, str] = {}
        if env_file.exists():
            for line in env_file.read_text(encoding="utf-8").splitlines():
                line = line.strip()
                if not line or line.startswith("#"):
                    continue
                if line.startswith("export "):
                    line = line[len("export ") :]
                if "=" in line:
                    k, _, v = line.partition("=")
                    env_data[k.strip()] = v.strip()

        chat_id = env_data.get("COKACDIR_CHAT_ID") or _CfgMgr.get_instance().get_constant("chat_id")
        anu_key = env_data.get("COKACDIR_KEY_ANU")

        if not anu_key:
            logger.error("[BatchWatchdog] COKACDIR_KEY_ANU 미설정 - TTL 경고 건너뜀")
            return

        stale_summary = ", ".join(f"{t['task_id']}({t['elapsed_hours']:.1f}h)" for t in stale_tasks)
        prompt = (
            f"[BatchWatchdog] batch_id={batch_id} TTL 초과 경고\n"
            f"정체 task: {stale_summary}\n"
            f"시각: {datetime.now(_KST).strftime('%Y-%m-%dT%H:%M:%S+09:00')}"
        )

        cmd = [
            "cokacdir",
            "--cron",
            prompt,
            "--at",
            "1m",
            "--chat",
            chat_id,
            "--key",
            anu_key,
            "--once",
        ]

        try:
            result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
            if result.returncode == 0:
                logger.info(f"[BatchWatchdog] TTL 경고 등록 완료: batch_id={batch_id}")
            else:
                logger.warning(f"[BatchWatchdog] cokacdir TTL 경고 등록 실패: {result.stderr.strip()}")
        except (subprocess.TimeoutExpired, OSError) as exc:
            logger.error(f"[BatchWatchdog] cokacdir 실행 예외: {exc}")

    # ------------------------------------------------------------------
    # run
    # ------------------------------------------------------------------

    def run(self) -> list[str]:
        """메인 루프:
        1. .done 수집
        2. 고유 batch_id 추출
        3. 각 batch_id에 대해:
           - check_batch_completion
           - TTL 체크 → 경고
           - 전팀 완료 batch_id 목록 반환 (L2로 전달)

        Returns:
            완료된 batch_id 목록
        """
        done_items = self.scan_done_files()

        # 고유 batch_id 추출
        seen_batch_ids: set[str] = set()
        for item in done_items:
            bid = item.get("batch_id", "")
            if bid:
                seen_batch_ids.add(bid)

        completed_batches: list[str] = []

        for batch_id in sorted(seen_batch_ids):
            completion = self.check_batch_completion(batch_id)
            logger.info(
                f"[BatchWatchdog] batch_id={batch_id}: "
                f"{completion['done']}/{completion['total']} 완료, "
                f"pending={completion['pending']}"
            )

            stale_tasks = self.check_ttl(batch_id)
            if stale_tasks:
                logger.warning(f"[BatchWatchdog] TTL 초과: {stale_tasks}")
                self.send_ttl_warning(batch_id, stale_tasks)

            if completion["complete"]:
                logger.info(f"[BatchWatchdog] 전팀 완료 확인: batch_id={batch_id}")
                completed_batches.append(batch_id)

        # batch-level TTL 만료 확인 및 정리
        cleanup_result = self.cleanup_expired_batches()
        if cleanup_result["expired_count"] > 0:
            logger.warning(
                f"[BatchWatchdog] batch TTL 만료 정리 완료: "
                f"expired_count={cleanup_result['expired_count']}, "
                f"batches={cleanup_result['expired_batches']}"
            )

        return completed_batches


# ---------------------------------------------------------------------------
# PreFlightCheck (L2)
# ---------------------------------------------------------------------------


class PreFlightCheck:
    """L2: git merge --no-commit 시뮬레이션으로 충돌 검증."""

    def __init__(self, workspace_path: str | None = None) -> None:
        self.workspace = Path(
            workspace_path or os.environ.get("WORKSPACE_ROOT", str(Path(__file__).resolve().parent.parent))
        )
        self.events_dir = self.workspace / "memory" / "events"
        self.reports_dir = self.workspace / "memory" / "reports"

    # ------------------------------------------------------------------
    # find_merge_branches
    # ------------------------------------------------------------------

    def find_merge_branches(self, batch_id: str) -> list[dict]:
        """batch_id에 해당하는 task들의 merge branch 목록 수집.

        1. memory/events/{task_id}.done → JSON에서 merge_branch 추출
        2. memory/reports/{task_id}.md → report_parser로 merge_branch 추출

        Returns:
            [{"task_id": str, "branch": str, "project_path": str}]
        """
        branches: list[dict] = []

        if not self.events_dir.exists():
            return branches

        for done_file in sorted(self.events_dir.glob("*.done")):
            try:
                raw = json.loads(done_file.read_text(encoding="utf-8"))
            except (json.JSONDecodeError, OSError):
                continue

            if raw.get("batch_id") != batch_id:
                continue

            task_id = raw.get("task_id", done_file.stem)

            # merge_branch: .done JSON 우선
            merge_branch = raw.get("merge_branch")
            project_path = raw.get("project_path", "")

            # 없으면 보고서에서 추출
            if not merge_branch:
                report_file = self.reports_dir / f"{task_id}.md"
                if not report_file.exists():
                    candidates = list(self.reports_dir.glob(f"{task_id}*.md"))
                    if candidates:
                        report_file = candidates[0]
                if report_file.exists():
                    try:
                        report_data = parse_report(str(report_file))
                        merge_branch = report_data.get("merge_branch")
                        # project_path 역추출
                        if not project_path:
                            worktree = report_data.get("merge_worktree") or ""
                            m = re.match(r"(/home/jay/projects/[^/]+)", worktree)
                            if m:
                                project_path = m.group(1)
                            if not project_path:
                                for f_path in report_data.get("files", []):
                                    m2 = re.match(r"(/home/jay/projects/[^/]+)", f_path)
                                    if m2:
                                        project_path = m2.group(1)
                                        break
                    except Exception as exc:
                        logger.debug(f"[PreFlightCheck] 보고서 파싱 실패 {task_id}: {exc}")

            if merge_branch:
                branches.append(
                    {
                        "task_id": task_id,
                        "branch": merge_branch,
                        "project_path": project_path or "",
                    }
                )

        logger.info(f"[PreFlightCheck] batch_id={batch_id}: {len(branches)}개 branch 수집")
        return branches

    # ------------------------------------------------------------------
    # simulate_merge
    # ------------------------------------------------------------------

    def simulate_merge(self, project_path: str, branches: list[str]) -> dict:
        """git merge --no-commit 시뮬레이션.

        1. git stash (현재 변경 보호)
        2. 각 branch에 대해 git merge --no-commit --no-ff 시도
        3. 충돌 발생 시 git merge --abort
        4. git stash pop

        Returns:
            {"passed": bool, "conflicts": [{"branch": str, "files": list[str]}]}
        """
        conflicts: list[dict] = []
        project = Path(project_path)

        if not project.exists():
            logger.warning(f"[PreFlightCheck] 프로젝트 경로 없음: {project_path}")
            return {"passed": False, "conflicts": [{"branch": "__project_not_found__", "files": []}]}

        # git stash
        try:
            subprocess.run(
                ["git", "stash", "--include-untracked"],
                cwd=project_path,
                capture_output=True,
                text=True,
                timeout=30,
            )
        except (subprocess.TimeoutExpired, OSError) as exc:
            logger.warning(f"[PreFlightCheck] git stash 실패: {exc}")

        try:
            for branch in branches:
                try:
                    result = subprocess.run(
                        ["git", "merge", "--no-commit", "--no-ff", branch],
                        cwd=project_path,
                        capture_output=True,
                        text=True,
                        timeout=60,
                    )

                    if result.returncode != 0:
                        # 충돌 파일 수집
                        conflict_files_result = subprocess.run(
                            ["git", "diff", "--name-only", "--diff-filter=U"],
                            cwd=project_path,
                            capture_output=True,
                            text=True,
                            timeout=15,
                        )
                        conflict_files = [f.strip() for f in conflict_files_result.stdout.splitlines() if f.strip()]
                        conflicts.append({"branch": branch, "files": conflict_files})
                        logger.warning(f"[PreFlightCheck] 충돌 감지: branch={branch}, files={conflict_files}")

                        # merge abort
                        subprocess.run(
                            ["git", "merge", "--abort"],
                            cwd=project_path,
                            capture_output=True,
                            text=True,
                            timeout=15,
                        )
                    else:
                        # 성공한 no-commit merge → reset
                        subprocess.run(
                            ["git", "merge", "--abort"],
                            cwd=project_path,
                            capture_output=True,
                            text=True,
                            timeout=15,
                        )
                        # merge --abort가 실패하면 reset HEAD
                        subprocess.run(
                            ["git", "reset", "--hard", "HEAD"],
                            cwd=project_path,
                            capture_output=True,
                            text=True,
                            timeout=15,
                        )

                except (subprocess.TimeoutExpired, OSError) as exc:
                    logger.error(f"[PreFlightCheck] merge 시뮬레이션 예외: {exc}")
                    conflicts.append({"branch": branch, "files": [f"__exception__: {exc}"]})

        finally:
            # git stash pop
            try:
                subprocess.run(
                    ["git", "stash", "pop"],
                    cwd=project_path,
                    capture_output=True,
                    text=True,
                    timeout=30,
                )
            except (subprocess.TimeoutExpired, OSError) as exc:
                logger.warning(f"[PreFlightCheck] git stash pop 실패: {exc}")

        passed = len(conflicts) == 0
        return {"passed": passed, "conflicts": conflicts}

    # ------------------------------------------------------------------
    # send_conflict_alert
    # ------------------------------------------------------------------

    def send_conflict_alert(self, batch_id: str, conflicts: list[dict]) -> None:
        """충돌 시 아누 Telegram 알림."""
        env_file = self.workspace / ".env.keys"
        env_data: dict[str, str] = {}
        if env_file.exists():
            for line in env_file.read_text(encoding="utf-8").splitlines():
                line = line.strip()
                if not line or line.startswith("#"):
                    continue
                if line.startswith("export "):
                    line = line[len("export ") :]
                if "=" in line:
                    k, _, v = line.partition("=")
                    env_data[k.strip()] = v.strip()

        chat_id = env_data.get("COKACDIR_CHAT_ID") or _CfgMgr.get_instance().get_constant("chat_id")
        anu_key = env_data.get("COKACDIR_KEY_ANU")

        if not anu_key:
            logger.error("[PreFlightCheck] COKACDIR_KEY_ANU 미설정 - 충돌 알림 건너뜀")
            return

        conflict_summary = "; ".join(f"{c['branch']}: {', '.join(c['files'][:3]) or 'unknown'}" for c in conflicts[:5])
        prompt = (
            f"[PreFlightCheck] batch_id={batch_id} 머지 충돌 감지\n"
            f"충돌: {conflict_summary}\n"
            f"수동 검토 필요\n"
            f"시각: {datetime.now(_KST).strftime('%Y-%m-%dT%H:%M:%S+09:00')}"
        )

        cmd = [
            "cokacdir",
            "--cron",
            prompt,
            "--at",
            "1m",
            "--chat",
            chat_id,
            "--key",
            anu_key,
            "--once",
        ]

        try:
            result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
            if result.returncode == 0:
                logger.info(f"[PreFlightCheck] 충돌 알림 등록 완료: batch_id={batch_id}")
            else:
                logger.warning(f"[PreFlightCheck] 충돌 알림 등록 실패: {result.stderr.strip()}")
        except (subprocess.TimeoutExpired, OSError) as exc:
            logger.error(f"[PreFlightCheck] cokacdir 실행 예외: {exc}")

    # ------------------------------------------------------------------
    # run
    # ------------------------------------------------------------------

    def run(self, batch_id: str) -> dict:
        """L2 실행:
        1. find_merge_branches
        2. simulate_merge (프로젝트별)
        3. 충돌 시 send_conflict_alert → return {"passed": False, ...}
        4. PASS 시 return {"passed": True, ...}
        """
        branch_infos = self.find_merge_branches(batch_id)

        if not branch_infos:
            logger.warning(f"[PreFlightCheck] batch_id={batch_id}: merge branch 없음")
            return {"passed": True, "conflicts": [], "branch_count": 0, "batch_id": batch_id}

        # 프로젝트별 그룹핑
        project_map: dict[str, list[str]] = {}
        for info in branch_infos:
            pp = info.get("project_path") or ""
            project_map.setdefault(pp, []).append(info["branch"])

        all_conflicts: list[dict] = []
        for project_path, branches in project_map.items():
            if not project_path:
                logger.warning(f"[PreFlightCheck] 프로젝트 경로 없음 (branches={branches})")
                continue
            sim_result = self.simulate_merge(project_path, branches)
            if not sim_result["passed"]:
                all_conflicts.extend(sim_result["conflicts"])

        passed = len(all_conflicts) == 0

        if not passed:
            self.send_conflict_alert(batch_id, all_conflicts)

        return {
            "passed": passed,
            "conflicts": all_conflicts,
            "branch_count": len(branch_infos),
            "batch_id": batch_id,
        }


# ---------------------------------------------------------------------------
# IntegrationTestRunner (L3)
# ---------------------------------------------------------------------------


class IntegrationTestRunner:
    """L3: pytest tests/integration/ 실행."""

    def __init__(self, workspace_path: str | None = None) -> None:
        self.workspace = Path(
            workspace_path or os.environ.get("WORKSPACE_ROOT", str(Path(__file__).resolve().parent.parent))
        )
        self.reports_dir = self.workspace / "memory" / "reports"

    # ------------------------------------------------------------------
    # run_tests
    # ------------------------------------------------------------------

    def run_tests(self, test_dir: str = "tests/integration/") -> dict:
        """pytest 실행.

        Returns:
            {"passed": bool, "output": str, "duration": float, "test_count": int}
        """
        test_path = Path(test_dir)
        if not test_path.is_absolute():
            test_path = self.workspace / test_dir

        if not test_path.exists():
            logger.warning(f"[IntegrationTestRunner] 테스트 디렉터리 없음: {test_path}")
            return {
                "passed": True,
                "output": f"테스트 디렉터리 없음 (건너뜀): {test_path}",
                "duration": 0.0,
                "test_count": 0,
            }

        start_time = time.monotonic()
        try:
            result = subprocess.run(
                ["pytest", str(test_path), "--tb=short", "-q", "--no-header"],
                cwd=str(self.workspace),
                capture_output=True,
                text=True,
                timeout=600,
            )
            duration = time.monotonic() - start_time
            output = result.stdout + result.stderr
            passed = result.returncode == 0

            # 테스트 수 파싱: "5 passed, 1 failed" 또는 "6 passed"
            test_count = 0
            m = re.search(r"(\d+)\s+passed", output)
            if m:
                test_count = int(m.group(1))
            fail_m = re.search(r"(\d+)\s+failed", output)
            if fail_m:
                test_count += int(fail_m.group(1))

            logger.info(
                f"[IntegrationTestRunner] 결과: {'PASS' if passed else 'FAIL'} "
                f"({duration:.1f}s, {test_count} tests)"
            )
            return {"passed": passed, "output": output, "duration": duration, "test_count": test_count}

        except subprocess.TimeoutExpired:
            duration = time.monotonic() - start_time
            msg = f"통합 테스트 timeout (600초 초과): {test_path}"
            logger.error(f"[IntegrationTestRunner] {msg}")
            return {"passed": False, "output": msg, "duration": duration, "test_count": 0}
        except (OSError, FileNotFoundError) as exc:
            duration = time.monotonic() - start_time
            msg = f"pytest 실행 실패: {exc}"
            logger.error(f"[IntegrationTestRunner] {msg}")
            return {"passed": False, "output": msg, "duration": duration, "test_count": 0}

    # ------------------------------------------------------------------
    # generate_report
    # ------------------------------------------------------------------

    def generate_report(self, batch_id: str, test_result: dict) -> Path:
        """결과 리포트 생성: memory/reports/batch-{batch_id}-integration.md"""
        self.reports_dir.mkdir(parents=True, exist_ok=True)
        report_file = self.reports_dir / f"batch-{batch_id}-integration.md"

        status = "PASS" if test_result["passed"] else "FAIL"
        now_str = datetime.now(_KST).strftime("%Y-%m-%dT%H:%M:%S+09:00")

        content = (
            f"# Integration Test Report - batch-{batch_id}\n\n"
            f"- **batch_id**: {batch_id}\n"
            f"- **status**: {status}\n"
            f"- **test_count**: {test_result.get('test_count', 0)}\n"
            f"- **duration**: {test_result.get('duration', 0.0):.1f}s\n"
            f"- **generated_at**: {now_str}\n\n"
            f"## Output\n\n"
            f"```\n{test_result.get('output', '')[:3000]}\n```\n"
        )

        report_file.write_text(content, encoding="utf-8")
        logger.info(f"[IntegrationTestRunner] 리포트 생성: {report_file}")
        return report_file

    # ------------------------------------------------------------------
    # send_result
    # ------------------------------------------------------------------

    def send_result(self, batch_id: str, test_result: dict) -> None:
        """아누에게 Telegram 결과 전송.

        PASS: 자동 완료 보고
        FAIL: 실패 로그 → 아누 판단
        """
        env_file = self.workspace / ".env.keys"
        env_data: dict[str, str] = {}
        if env_file.exists():
            for line in env_file.read_text(encoding="utf-8").splitlines():
                line = line.strip()
                if not line or line.startswith("#"):
                    continue
                if line.startswith("export "):
                    line = line[len("export ") :]
                if "=" in line:
                    k, _, v = line.partition("=")
                    env_data[k.strip()] = v.strip()

        chat_id = env_data.get("COKACDIR_CHAT_ID") or _CfgMgr.get_instance().get_constant("chat_id")
        anu_key = env_data.get("COKACDIR_KEY_ANU")

        if not anu_key:
            logger.error("[IntegrationTestRunner] COKACDIR_KEY_ANU 미설정 - 결과 전송 건너뜀")
            return

        status = "PASS" if test_result["passed"] else "FAIL"
        test_count = test_result.get("test_count", 0)
        duration = test_result.get("duration", 0.0)

        if test_result["passed"]:
            prompt = (
                f"[IntegrationTestRunner] batch_id={batch_id} 통합 테스트 완료\n"
                f"결과: {status} ({test_count}개 테스트, {duration:.1f}s)\n"
                f"자동 완료 처리 확인 요청\n"
                f"시각: {datetime.now(_KST).strftime('%Y-%m-%dT%H:%M:%S+09:00')}"
            )
        else:
            output_tail = test_result.get("output", "")[-500:]
            prompt = (
                f"[IntegrationTestRunner] batch_id={batch_id} 통합 테스트 실패\n"
                f"결과: {status} ({test_count}개 테스트, {duration:.1f}s)\n"
                f"로그(말미):\n{output_tail}\n"
                f"수동 판단 필요\n"
                f"시각: {datetime.now(_KST).strftime('%Y-%m-%dT%H:%M:%S+09:00')}"
            )

        cmd = [
            "cokacdir",
            "--cron",
            prompt,
            "--at",
            "1m",
            "--chat",
            chat_id,
            "--key",
            anu_key,
            "--once",
        ]

        try:
            result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
            if result.returncode == 0:
                logger.info(f"[IntegrationTestRunner] 결과 전송 등록 완료: batch_id={batch_id} [{status}]")
            else:
                logger.warning(f"[IntegrationTestRunner] 결과 전송 등록 실패: {result.stderr.strip()}")
        except (subprocess.TimeoutExpired, OSError) as exc:
            logger.error(f"[IntegrationTestRunner] cokacdir 실행 예외: {exc}")

    # ------------------------------------------------------------------
    # run
    # ------------------------------------------------------------------

    def run(self, batch_id: str) -> dict:
        """L3 실행: run_tests → generate_report → send_result"""
        test_result = self.run_tests()
        report_path = self.generate_report(batch_id, test_result)
        self.send_result(batch_id, test_result)
        return {
            "batch_id": batch_id,
            "passed": test_result["passed"],
            "test_count": test_result.get("test_count", 0),
            "duration": test_result.get("duration", 0.0),
            "report": str(report_path),
        }


# ---------------------------------------------------------------------------
# GraduatedAutoGate (오케스트레이터)
# ---------------------------------------------------------------------------


class GraduatedAutoGate:
    """3-Layer Auto-Gate 오케스트레이터."""

    def __init__(self, workspace_path: str | None = None, dry_run: bool = False) -> None:
        self.workspace = Path(
            workspace_path or os.environ.get("WORKSPACE_ROOT", str(Path(__file__).resolve().parent.parent))
        )
        self.dry_run = dry_run
        self.watchdog = BatchWatchdog(str(self.workspace))
        self.preflight = PreFlightCheck(str(self.workspace))
        self.test_runner = IntegrationTestRunner(str(self.workspace))

    def run(self) -> dict:
        """L1 → L2 → L3 순차 실행.

        1. L1: batch watchdog → 완료된 batch_id 목록
        2. 각 batch_id에 대해:
           a. L2: pre-flight check → FAIL이면 alert + skip
           b. L3: integration test → PASS이면 자동 보고, FAIL이면 로그

        Returns:
            {"batches_checked": int, "passed": int, "failed": int, "details": list}
        """
        logger.info("[GraduatedAutoGate] 3-Layer Auto-Gate 시작")

        # L1
        completed_batches = self.watchdog.run()
        logger.info(f"[GraduatedAutoGate] L1 완료: {len(completed_batches)}개 batch 처리 대상")

        stats: dict[str, Any] = {
            "batches_checked": len(completed_batches),
            "passed": 0,
            "failed": 0,
            "details": [],
        }

        for batch_id in completed_batches:
            detail: dict[str, Any] = {"batch_id": batch_id, "l2": None, "l3": None}

            if self.dry_run:
                logger.info(f"[GraduatedAutoGate][DRY-RUN] batch_id={batch_id}: L2/L3 건너뜀")
                detail["dry_run"] = True
                stats["details"].append(detail)
                stats["passed"] += 1
                continue

            # L2
            logger.info(f"[GraduatedAutoGate] L2 Pre-Flight Check: batch_id={batch_id}")
            l2_result = self.preflight.run(batch_id)
            detail["l2"] = l2_result

            if not l2_result["passed"]:
                logger.warning(
                    f"[GraduatedAutoGate] L2 FAIL batch_id={batch_id}: " f"충돌 {len(l2_result['conflicts'])}건 → skip"
                )
                stats["failed"] += 1
                stats["details"].append(detail)
                continue

            # L3
            logger.info(f"[GraduatedAutoGate] L3 Integration Test: batch_id={batch_id}")
            l3_result = self.test_runner.run(batch_id)
            detail["l3"] = l3_result

            if l3_result["passed"]:
                logger.info(f"[GraduatedAutoGate] L3 PASS batch_id={batch_id}: 자동 완료 보고")
                stats["passed"] += 1
            else:
                logger.warning(f"[GraduatedAutoGate] L3 FAIL batch_id={batch_id}: " f"테스트 실패 → 아누 판단 대기")
                stats["failed"] += 1

            stats["details"].append(detail)

        logger.info(
            f"[GraduatedAutoGate] 완료: checked={stats['batches_checked']}, "
            f"passed={stats['passed']}, failed={stats['failed']}"
        )
        return stats


# ---------------------------------------------------------------------------
# CLI 진입점
# ---------------------------------------------------------------------------


def build_parser() -> argparse.ArgumentParser:
    """argparse 파서 구성."""
    parser = argparse.ArgumentParser(
        prog="auto_merge.py",
        description="자동 머지 처리 스크립트",
    )
    parser.add_argument("--task-id", help="특정 task만 처리 (예: task-391.1)")
    parser.add_argument("--dry-run", action="store_true", help="드라이런: 실제 동작 없이 분석만")
    parser.add_argument("--force-merge", metavar="TASK_ID", help="강제 머지 (이미 처리됐어도)")
    parser.add_argument(
        "--graduated",
        action="store_true",
        help="3-Layer Graduated Auto-Gate 실행 (L1 BatchWatchdog → L2 PreFlightCheck → L3 IntegrationTestRunner)",
    )
    return parser


def main() -> None:
    """CLI 메인 진입점."""
    parser = build_parser()
    args = parser.parse_args()

    if args.graduated:
        gate = GraduatedAutoGate(dry_run=args.dry_run)
        result = gate.run()
    else:
        merger = AutoMerger(
            dry_run=args.dry_run,
            target_task_id=args.task_id,
            force_task_id=args.force_merge,
        )
        result = merger.run()

    print(json.dumps(result, ensure_ascii=False, indent=2))


if __name__ == "__main__":
    main()
