#!/usr/bin/env python3
"""
project-map.py - 프로젝트 구조 요약 Markdown 파일 생성 CLI 스크립트

Usage:
    python3 scripts/project-map.py <PROJECT_PATH> --output <OUTPUT_PATH> [--depth 2] [--include-tests]

Incremental:
    python3 scripts/project-map.py <PROJECT_PATH> --output <OUTPUT_PATH> \
        --incremental --changed-files "file1.ts,file2.tsx" --deleted-files "old.ts"

Rollback:
    python3 scripts/project-map.py <PROJECT_PATH> --output <OUTPUT_PATH> --rollback
"""

import argparse
import fcntl
import hashlib
import json
import os
import re
import subprocess
import sys
import tempfile
import time
from datetime import datetime
from pathlib import Path

# 제외할 디렉토리/파일 패턴
EXCLUDE_DIRS = {
    "node_modules", ".next", ".git", "__pycache__", ".cache",
    "dist", "build", ".turbo", "coverage", ".vercel", ".idea",
    ".vscode", ".yarn", ".npm", "out", ".svelte-kit",
}

EXCLUDE_FILE_PATTERNS = [
    r"\.pyc$", r"\.DS_Store$", r"Thumbs\.db$", r"\.env$",
    r"\.env\.local$", r"\.env\.\w+\.local$",
]

TEST_PATTERNS = [
    r"__tests__", r"\.test\.", r"\.spec\.",
]

# HTTP 메서드 (API 라우트 추출용)
HTTP_METHODS = ["GET", "POST", "PUT", "DELETE", "PATCH"]

# 민감 파일 패턴 (보안)
SENSITIVE_PATTERNS = [
    r"(^|/)\.env(\.|$)",
    r"\.pem$",
    r"credentials",
    r"\.key$",
    r"\.secret$",
    r"id_rsa",
    r"id_ed25519",
]


def parse_args():
    parser = argparse.ArgumentParser(
        description="프로젝트 구조 요약 Markdown 파일 생성"
    )
    parser.add_argument("project_path", help="프로젝트 루트 경로 (필수)")
    parser.add_argument("--output", required=True, help="출력 파일 경로 (필수)")
    parser.add_argument(
        "--depth", type=int, default=2, help="디렉토리 트리 깊이 (기본값: 2)"
    )
    parser.add_argument(
        "--include-tests",
        action="store_true",
        default=False,
        help="테스트 파일도 포함 (기본값: False)",
    )
    # Incremental 모드
    parser.add_argument(
        "--incremental",
        action="store_true",
        default=False,
        help="incremental 업데이트 모드 활성화",
    )
    parser.add_argument(
        "--changed-files",
        default="",
        help="변경/생성된 파일 목록 (쉼표 구분, 프로젝트 상대 경로)",
    )
    parser.add_argument(
        "--deleted-files",
        default="",
        help="삭제된 파일 목록 (쉼표 구분, 프로젝트 상대 경로)",
    )
    # Rollback 모드
    parser.add_argument(
        "--rollback",
        action="store_true",
        default=False,
        help=".bak 파일에서 Markdown 복원",
    )
    # Drive 변경 로그
    parser.add_argument(
        "--drive-log",
        default=None,
        help="Drive 변경 로그 파일 경로 (JSONL)",
    )
    return parser.parse_args()


def is_excluded_dir(name: str) -> bool:
    """디렉토리가 제외 목록에 있는지 확인."""
    return name in EXCLUDE_DIRS or name.startswith(".")


def is_excluded_file(name: str) -> bool:
    """파일이 제외 패턴에 해당하는지 확인."""
    for pattern in EXCLUDE_FILE_PATTERNS:
        if re.search(pattern, name):
            return True
    return False


def is_sensitive_file(rel_path: str) -> bool:
    """파일이 민감 패턴에 해당하는지 확인."""
    for pattern in SENSITIVE_PATTERNS:
        if re.search(pattern, rel_path, re.IGNORECASE):
            return True
    return False


def is_test_path(path_str: str) -> bool:
    """경로가 테스트 파일/디렉토리인지 확인."""
    for pattern in TEST_PATTERNS:
        if re.search(pattern, path_str):
            return True
    return False


def validate_path(path: str, base_dir: str) -> str:
    """경로 정규화 + Path Traversal 방지 검증."""
    real_base = os.path.realpath(base_dir)
    # 절대경로면 그대로, 상대경로면 base_dir 기준으로 조합
    candidate = os.path.join(real_base, path)
    real_candidate = os.path.realpath(candidate)
    if not real_candidate.startswith(real_base + os.sep) and real_candidate != real_base:
        raise ValueError(f"경로 검증 실패 (Path Traversal 의심): {path!r}")
    return real_candidate


def build_tree(
    root: Path,
    prefix: str = "",
    depth: int = 3,
    current_depth: int = 0,
    project_root: Path = None,
) -> list:
    """디렉토리 트리를 재귀적으로 구성."""
    if current_depth >= depth:
        return []

    if project_root is None:
        project_root = root

    lines = []
    try:
        entries = sorted(root.iterdir(), key=lambda e: (e.is_file(), e.name.lower()))
    except PermissionError:
        return [f"{prefix}[권한 없음]"]

    # 숨김 디렉토리 중 .git만 처리 (이미 EXCLUDE_DIRS에 있음)
    visible_entries = []
    for entry in entries:
        if entry.is_dir():
            if is_excluded_dir(entry.name):
                continue
        elif entry.is_file():
            if is_excluded_file(entry.name):
                continue
        visible_entries.append(entry)

    for i, entry in enumerate(visible_entries):
        is_last = i == len(visible_entries) - 1
        connector = "└── " if is_last else "├── "
        extension = "    " if is_last else "│   "

        if entry.is_dir():
            lines.append(f"{prefix}{connector}{entry.name}/")
            sub_lines = build_tree(
                entry,
                prefix=prefix + extension,
                depth=depth,
                current_depth=current_depth + 1,
                project_root=project_root,
            )
            lines.extend(sub_lines)
        elif entry.is_file():
            lines.append(f"{prefix}{connector}{entry.name}")

    return lines


def extract_types_interfaces(project_root: Path, include_tests: bool) -> dict:
    """*.ts 파일에서 export interface / export type 추출."""
    results = {}  # {relative_path: [type_names]}

    pattern = re.compile(
        r"^\s*export\s+(?:interface|type)\s+([A-Za-z_$][A-Za-z0-9_$]*)",
        re.MULTILINE,
    )

    # .ts 파일 탐색 (제외 디렉토리 건너뜀)
    ts_files = []
    for dirpath, dirnames, filenames in os.walk(project_root):
        # 제외 디렉토리 필터링
        dirnames[:] = [d for d in dirnames if not is_excluded_dir(d)]

        for fname in filenames:
            if not fname.endswith(".ts") or fname.endswith(".d.ts"):
                continue
            if is_excluded_file(fname):
                continue

            full_path = Path(dirpath) / fname
            rel_path = full_path.relative_to(project_root)
            rel_str = str(rel_path)

            if not include_tests and is_test_path(rel_str):
                continue

            ts_files.append(full_path)

    # 파일 수 제한 (성능)
    ts_files = ts_files[:500]

    for ts_file in ts_files:
        try:
            content = ts_file.read_text(encoding="utf-8", errors="replace")
            matches = pattern.findall(content)
            if matches:
                rel = str(ts_file.relative_to(project_root))
                results[rel] = matches
        except (OSError, PermissionError):
            continue

    return results


def extract_api_routes(project_root: Path, include_tests: bool) -> list:
    """
    app/api/ 또는 pages/api/ 하위 route.ts/route.js 파일에서
    HTTP 메서드와 경로를 추출.
    """
    routes = []

    for dirpath, dirnames, filenames in os.walk(project_root):
        dirnames[:] = [d for d in dirnames if not is_excluded_dir(d)]

        for fname in filenames:
            if fname not in ("route.ts", "route.js"):
                continue

            full_path = Path(dirpath) / fname
            rel_path = str(full_path.relative_to(project_root))

            # app/api/ 또는 pages/api/ 하위인지 확인
            normalized = rel_path.replace("\\", "/")
            is_app_router = "/app/api/" in normalized or normalized.startswith("app/api/")
            is_pages_router = "/pages/api/" in normalized or normalized.startswith("pages/api/")

            if not (is_app_router or is_pages_router):
                continue

            if not include_tests and is_test_path(rel_path):
                continue

            # HTTP 메서드 추출
            try:
                content = full_path.read_text(encoding="utf-8", errors="replace")
            except (OSError, PermissionError):
                continue

            methods = []
            for method in HTTP_METHODS:
                # export async function GET / export function GET / export const GET
                pattern = re.compile(
                    rf"^\s*export\s+(?:async\s+)?(?:function|const)\s+{method}\b",
                    re.MULTILINE,
                )
                if pattern.search(content):
                    methods.append(method)

            if not methods:
                continue

            # URL 경로 추출 (App Router: /api/... 형태로)
            if is_app_router:
                # app/api/... 또는 nextapp/src/app/api/...
                idx = normalized.find("/app/api/")
                if idx == -1:
                    idx = normalized.find("app/api/")
                    url_path = "/" + normalized[idx:].replace("/route.ts", "").replace("/route.js", "")
                else:
                    url_path = normalized[idx + len("/app"):].replace("/route.ts", "").replace("/route.js", "")
            else:
                # pages/api/...
                idx = normalized.find("/pages/api/")
                if idx == -1:
                    idx = normalized.find("pages/api/")
                    url_path = "/" + normalized[idx:].replace("/route.ts", "").replace("/route.js", "")
                else:
                    url_path = normalized[idx + len("/pages"):].replace("/route.ts", "").replace("/route.js", "")

            routes.append({
                "methods": methods,
                "url": url_path,
                "file": rel_path,
            })

    # URL 기준 정렬
    routes.sort(key=lambda r: r["url"])
    return routes


def extract_components(project_root: Path, include_tests: bool) -> list:
    """components/ 하위 .tsx 파일에서 export default 컴포넌트명 추출."""
    components = []

    # export default function ComponentName / export default class ComponentName
    # export default ComponentName (변수)
    pattern_func = re.compile(
        r"^\s*export\s+default\s+(?:function|class)\s+([A-Za-z_$][A-Za-z0-9_$]*)",
        re.MULTILINE,
    )
    pattern_var = re.compile(
        r"^\s*export\s+default\s+([A-Za-z_$][A-Za-z0-9_$]*)\s*[;,\n]",
        re.MULTILINE,
    )

    for dirpath, dirnames, filenames in os.walk(project_root):
        dirnames[:] = [d for d in dirnames if not is_excluded_dir(d)]

        # components/ 하위만 처리
        rel_dir = str(Path(dirpath).relative_to(project_root)).replace("\\", "/")
        in_components = "components" in rel_dir.split("/")
        if not in_components:
            continue

        for fname in filenames:
            if not fname.endswith(".tsx"):
                continue
            if is_excluded_file(fname):
                continue

            full_path = Path(dirpath) / fname
            rel_path = str(full_path.relative_to(project_root))

            if not include_tests and is_test_path(rel_path):
                continue

            try:
                content = full_path.read_text(encoding="utf-8", errors="replace")
            except (OSError, PermissionError):
                continue

            name = None
            m = pattern_func.search(content)
            if m:
                name = m.group(1)
            else:
                m = pattern_var.search(content)
                if m:
                    name = m.group(1)

            if not name:
                # 파일명에서 추론 (PascalCase)
                stem = Path(fname).stem
                if stem[0].isupper():
                    name = stem

            if name:
                components.append({"name": name, "file": rel_path})

    components.sort(key=lambda c: c["name"].lower())
    return components


def summarize_package_json(project_root: Path) -> list:
    """package.json 파일 요약 (name, version, dependencies 키 목록)."""
    results = []

    # 프로젝트 루트 및 하위 1단계 package.json 탐색
    candidates = list(project_root.glob("package.json")) + list(
        project_root.glob("*/package.json")
    )

    for pkg_path in candidates:
        try:
            data = json.loads(pkg_path.read_text(encoding="utf-8"))
        except (OSError, json.JSONDecodeError, PermissionError):
            continue

        rel = str(pkg_path.relative_to(project_root))
        info = {
            "file": rel,
            "name": data.get("name", "(unknown)"),
            "version": data.get("version", "(unknown)"),
            "dependencies": sorted(data.get("dependencies", {}).keys()),
            "devDependencies": sorted(data.get("devDependencies", {}).keys()),
            "scripts": list(data.get("scripts", {}).keys()),
        }
        results.append(info)

    return results


def summarize_tsconfig(project_root: Path) -> list:
    """tsconfig.json 파일 요약 (주요 설정 추출)."""
    results = []

    candidates = list(project_root.glob("tsconfig.json")) + list(
        project_root.glob("*/tsconfig.json")
    )

    KEY_FIELDS = ["target", "module", "moduleResolution", "lib", "strict",
                  "baseUrl", "outDir", "rootDir", "jsx", "esModuleInterop",
                  "allowSyntheticDefaultImports", "resolveJsonModule",
                  "incremental", "noEmit"]

    for ts_path in candidates:
        try:
            # JSON5 스타일 주석 제거 후 파싱
            raw = ts_path.read_text(encoding="utf-8")
            # 한줄 주석 제거
            raw = re.sub(r"//.*?$", "", raw, flags=re.MULTILINE)
            # 블록 주석 제거
            raw = re.sub(r"/\*.*?\*/", "", raw, flags=re.DOTALL)
            data = json.loads(raw)
        except (OSError, json.JSONDecodeError, PermissionError):
            continue

        rel = str(ts_path.relative_to(project_root))
        compiler = data.get("compilerOptions", {})

        summary = {"file": rel, "compilerOptions": {}}
        for field in KEY_FIELDS:
            if field in compiler:
                summary["compilerOptions"][field] = compiler[field]

        if "paths" in compiler:
            summary["paths"] = compiler["paths"]

        results.append(summary)

    return results


def get_recently_modified_files(project_root: Path, top_n: int = 20) -> list:
    """최근 수정된 파일 목록 (git log 기반, fallback: mtime)."""

    # git log 시도
    try:
        result = subprocess.run(
            [
                "git", "log",
                "--name-only",
                "--pretty=format:%ad",
                "--date=short",
                "-200",
            ],
            cwd=str(project_root),
            capture_output=True,
            text=True,
            timeout=15,
        )
        if result.returncode == 0 and result.stdout.strip():
            seen = {}
            current_date = None
            for line in result.stdout.splitlines():
                line = line.strip()
                if not line:
                    continue
                # 날짜 형식 YYYY-MM-DD
                if re.match(r"^\d{4}-\d{2}-\d{2}$", line):
                    current_date = line
                else:
                    if line not in seen and current_date:
                        # 제외 패턴 확인
                        skip = False
                        parts = line.replace("\\", "/").split("/")
                        for part in parts:
                            if is_excluded_dir(part):
                                skip = True
                                break
                        if not skip and not is_excluded_file(os.path.basename(line)):
                            seen[line] = current_date
                    if len(seen) >= top_n:
                        break

            files = [{"file": f, "date": d} for f, d in list(seen.items())[:top_n]]
            if files:
                return files
    except (subprocess.TimeoutExpired, FileNotFoundError, OSError):
        pass

    # fallback: mtime 기반
    file_mtimes = []
    for dirpath, dirnames, filenames in os.walk(project_root):
        dirnames[:] = [d for d in dirnames if not is_excluded_dir(d)]
        for fname in filenames:
            if is_excluded_file(fname):
                continue
            full_path = Path(dirpath) / fname
            try:
                mtime = full_path.stat().st_mtime
                rel = str(full_path.relative_to(project_root))
                file_mtimes.append((mtime, rel))
            except (OSError, PermissionError):
                continue

    file_mtimes.sort(reverse=True)
    results = []
    for mtime, rel in file_mtimes[:top_n]:
        date_str = datetime.fromtimestamp(mtime).strftime("%Y-%m-%d")
        results.append({"file": rel, "date": date_str})
    return results


def generate_markdown(
    project_root: Path,
    output_path: Path,
    depth: int,
    include_tests: bool,
) -> str:
    """전체 Markdown 내용 생성."""
    project_name = project_root.name
    generated_at = datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    lines = []

    # 헤더
    lines.append(f"# Project Map: {project_name}")
    lines.append(f"> Generated: {generated_at}")
    lines.append(f"> Path: {project_root}")
    lines.append("")

    # ── a. 디렉토리 트리 ──────────────────────────────────────────────────────
    lines.append(f"## Directory Tree (depth: {depth})")
    lines.append("```")
    lines.append(f"{project_name}/")
    tree_lines = build_tree(project_root, depth=depth, project_root=project_root)
    lines.extend(tree_lines)
    lines.append("```")
    lines.append("")

    # ── b. 주요 타입/인터페이스 ───────────────────────────────────────────────
    lines.append("## Types & Interfaces")
    print("[*] 타입/인터페이스 추출 중...", file=sys.stderr)
    types_map = extract_types_interfaces(project_root, include_tests)

    if types_map:
        for rel_path, names in sorted(types_map.items()):
            names_str = ", ".join(f"`{n}`" for n in names)
            lines.append(f"- `{rel_path}`: {names_str}")
    else:
        lines.append("_(타입/인터페이스 없음)_")
    lines.append("")

    # ── c. API 라우트 목록 ────────────────────────────────────────────────────
    lines.append("## API Routes")
    print("[*] API 라우트 추출 중...", file=sys.stderr)
    routes = extract_api_routes(project_root, include_tests)

    if routes:
        for route in routes:
            methods_str = ", ".join(route["methods"])
            lines.append(f"- `{methods_str} {route['url']}` → `{route['file']}`")
    else:
        lines.append("_(API 라우트 없음)_")
    lines.append("")

    # ── d. 주요 컴포넌트 목록 ─────────────────────────────────────────────────
    lines.append("## Components")
    print("[*] 컴포넌트 추출 중...", file=sys.stderr)
    components = extract_components(project_root, include_tests)

    if components:
        for comp in components:
            lines.append(f"- `{comp['name']}` → `{comp['file']}`")
    else:
        lines.append("_(컴포넌트 없음)_")
    lines.append("")

    # ── e. 핵심 설정 파일 요약 ────────────────────────────────────────────────
    lines.append("## Configuration Summary")
    print("[*] 설정 파일 요약 중...", file=sys.stderr)

    # package.json
    pkg_summaries = summarize_package_json(project_root)
    for pkg in pkg_summaries:
        lines.append(f"### {pkg['file']}")
        lines.append(f"- **Name**: {pkg['name']}")
        lines.append(f"- **Version**: {pkg['version']}")
        if pkg["scripts"]:
            lines.append(f"- **Scripts**: {', '.join(pkg['scripts'])}")
        if pkg["dependencies"]:
            lines.append(f"- **Dependencies**: {', '.join(pkg['dependencies'])}")
        else:
            lines.append("- **Dependencies**: (없음)")
        if pkg["devDependencies"]:
            lines.append(f"- **DevDependencies**: {', '.join(pkg['devDependencies'])}")
        else:
            lines.append("- **DevDependencies**: (없음)")
        lines.append("")

    # tsconfig.json
    ts_summaries = summarize_tsconfig(project_root)
    for ts in ts_summaries:
        lines.append(f"### {ts['file']}")
        for key, val in ts["compilerOptions"].items():
            if isinstance(val, list):
                lines.append(f"- **{key}**: {', '.join(str(v) for v in val)}")
            else:
                lines.append(f"- **{key}**: {val}")
        if "paths" in ts:
            lines.append("- **paths**:")
            for alias, targets in ts["paths"].items():
                targets_str = ", ".join(targets)
                lines.append(f"  - `{alias}` → `{targets_str}`")
        lines.append("")

    # ── f. 최근 수정된 파일 Top 20 ────────────────────────────────────────────
    lines.append("## Recently Modified Files (Top 20)")
    print("[*] 최근 수정 파일 조회 중...", file=sys.stderr)
    recent_files = get_recently_modified_files(project_root, top_n=20)

    if recent_files:
        for i, item in enumerate(recent_files, 1):
            lines.append(f"{i}. `{item['file']}` ({item['date']})")
    else:
        lines.append("_(파일 없음)_")
    lines.append("")

    return "\n".join(lines)


# ──────────────────────────────────────────────────────────────────────────────
# IncrementalUpdater
# ──────────────────────────────────────────────────────────────────────────────

class IncrementalUpdater:
    """변경/삭제된 파일만 처리하여 project-map을 증분 업데이트하는 클래스."""

    CACHE_VERSION = 1
    LOCK_TIMEOUT = 30  # seconds

    def __init__(
        self,
        project_root: Path,
        output_path: Path,
        depth: int,
        include_tests: bool,
        drive_log_path: str = None,
    ):
        self.project_root = project_root
        self.output_path = output_path
        self.depth = depth
        self.include_tests = include_tests
        self.drive_log_path = drive_log_path  # Drive 변경 로그 파일 경로 (JSONL)
        self.cache_path = self._get_cache_path()
        self.lock_path = self._get_lock_path()
        self._lock_fd = None

    # ── 경로 헬퍼 ─────────────────────────────────────────────────────────────

    def _get_cache_path(self) -> Path:
        """JSON 캐시 파일 경로 생성."""
        project_id = self.project_root.name
        cache_name = f".project-map-cache-{project_id}.json"
        return self.output_path.parent / cache_name

    def _get_lock_path(self) -> Path:
        """Lock 파일 경로 생성."""
        project_id = self.project_root.name
        lock_name = f".project-map-lock-{project_id}.lock"
        return self.output_path.parent / lock_name

    # ── 동시성 제어 ───────────────────────────────────────────────────────────

    def _acquire_lock(self):
        """fcntl LOCK_EX 획득 (timeout 30초)."""
        lock_file = str(self.lock_path)
        fd = open(lock_file, "w")
        deadline = time.time() + self.LOCK_TIMEOUT
        while True:
            try:
                fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
                self._lock_fd = fd
                return fd
            except BlockingIOError:
                if time.time() >= deadline:
                    fd.close()
                    raise TimeoutError(
                        f"Lock 획득 실패: {self.LOCK_TIMEOUT}초 초과 ({lock_file})"
                    )
                time.sleep(0.2)

    def _release_lock(self, lock_fd) -> None:
        """fcntl flock 해제."""
        if lock_fd is None:
            return
        try:
            fcntl.flock(lock_fd, fcntl.LOCK_UN)
        except OSError:
            pass
        finally:
            try:
                lock_fd.close()
            except OSError:
                pass
        self._lock_fd = None

    # ── 보안 ──────────────────────────────────────────────────────────────────

    def _validate_path(self, rel_path: str) -> Path:
        """
        경로 검증 (Path Traversal 방지).
        프로젝트 루트 내 경로인지 os.path.realpath로 확인.
        """
        # 절대경로 입력 차단
        if os.path.isabs(rel_path):
            raise ValueError(f"절대 경로는 허용되지 않습니다: {rel_path!r}")

        real_root = str(self.project_root.resolve())
        real_candidate = os.path.realpath(os.path.join(real_root, rel_path))

        if not (
            real_candidate.startswith(real_root + os.sep)
            or real_candidate == real_root
        ):
            raise ValueError(
                f"경로 검증 실패 (Path Traversal 의심): {rel_path!r}"
            )

        if is_sensitive_file(rel_path):
            raise ValueError(f"민감 파일은 처리할 수 없습니다: {rel_path!r}")

        return Path(real_candidate)

    # ── 해시 ──────────────────────────────────────────────────────────────────

    def _compute_hash(self, file_path: Path) -> str:
        """SHA-256 해시 계산."""
        h = hashlib.sha256()
        try:
            with open(file_path, "rb") as f:
                for chunk in iter(lambda: f.read(65536), b""):
                    h.update(chunk)
        except (OSError, PermissionError) as e:
            raise OSError(f"해시 계산 실패: {file_path}: {e}") from e
        return h.hexdigest()

    # ── 파일 분류 ─────────────────────────────────────────────────────────────

    def _classify_file(self, rel_path: str) -> str:
        """
        파일을 섹션으로 분류.
        반환값: 'types' | 'routes' | 'components' | 'config' | 'other'
        """
        normalized = rel_path.replace("\\", "/")
        fname = os.path.basename(normalized)

        # config
        if fname in ("package.json", "tsconfig.json"):
            return "config"

        # routes: route.ts/route.js in app/api/ or pages/api/
        if fname in ("route.ts", "route.js"):
            is_app = "/app/api/" in normalized or normalized.startswith("app/api/")
            is_pages = "/pages/api/" in normalized or normalized.startswith("pages/api/")
            if is_app or is_pages:
                return "routes"

        # components: *.tsx in components/
        if fname.endswith(".tsx"):
            parts = normalized.split("/")
            if "components" in parts[:-1]:
                return "components"

        # types: *.ts (not .d.ts)
        if fname.endswith(".ts") and not fname.endswith(".d.ts"):
            return "types"

        return "other"

    # ── 섹션 데이터 추출 ──────────────────────────────────────────────────────

    def _extract_file_data(self, rel_path: str, section: str) -> dict:
        """
        파일에서 해당 섹션 데이터 추출.
        단일 파일 기준으로 데이터를 추출하여 dict 반환.
        """
        full_path = self.project_root / rel_path

        if section == "types":
            pattern = re.compile(
                r"^\s*export\s+(?:interface|type)\s+([A-Za-z_$][A-Za-z0-9_$]*)",
                re.MULTILINE,
            )
            try:
                content = full_path.read_text(encoding="utf-8", errors="replace")
                matches = pattern.findall(content)
                return {"types": matches}
            except (OSError, PermissionError):
                return {"types": []}

        elif section == "routes":
            normalized = rel_path.replace("\\", "/")
            is_app_router = (
                "/app/api/" in normalized or normalized.startswith("app/api/")
            )
            is_pages_router = (
                "/pages/api/" in normalized or normalized.startswith("pages/api/")
            )

            try:
                content = full_path.read_text(encoding="utf-8", errors="replace")
            except (OSError, PermissionError):
                return {"methods": [], "url": ""}

            methods = []
            for method in HTTP_METHODS:
                pat = re.compile(
                    rf"^\s*export\s+(?:async\s+)?(?:function|const)\s+{method}\b",
                    re.MULTILINE,
                )
                if pat.search(content):
                    methods.append(method)

            if is_app_router:
                idx = normalized.find("/app/api/")
                if idx == -1:
                    idx = normalized.find("app/api/")
                    url_path = (
                        "/" + normalized[idx:]
                        .replace("/route.ts", "")
                        .replace("/route.js", "")
                    )
                else:
                    url_path = (
                        normalized[idx + len("/app"):]
                        .replace("/route.ts", "")
                        .replace("/route.js", "")
                    )
            else:
                idx = normalized.find("/pages/api/")
                if idx == -1:
                    idx = normalized.find("pages/api/")
                    url_path = (
                        "/" + normalized[idx:]
                        .replace("/route.ts", "")
                        .replace("/route.js", "")
                    )
                else:
                    url_path = (
                        normalized[idx + len("/pages"):]
                        .replace("/route.ts", "")
                        .replace("/route.js", "")
                    )

            return {"methods": methods, "url": url_path}

        elif section == "components":
            pattern_func = re.compile(
                r"^\s*export\s+default\s+(?:function|class)\s+([A-Za-z_$][A-Za-z0-9_$]*)",
                re.MULTILINE,
            )
            pattern_var = re.compile(
                r"^\s*export\s+default\s+([A-Za-z_$][A-Za-z0-9_$]*)\s*[;,\n]",
                re.MULTILINE,
            )
            try:
                content = full_path.read_text(encoding="utf-8", errors="replace")
            except (OSError, PermissionError):
                return {"name": None}

            name = None
            m = pattern_func.search(content)
            if m:
                name = m.group(1)
            else:
                m = pattern_var.search(content)
                if m:
                    name = m.group(1)
            if not name:
                stem = full_path.stem
                if stem and stem[0].isupper():
                    name = stem
            return {"name": name}

        elif section == "config":
            fname = os.path.basename(rel_path)
            if fname == "package.json":
                try:
                    data = json.loads(full_path.read_text(encoding="utf-8"))
                    return {
                        "type": "package",
                        "name": data.get("name", "(unknown)"),
                        "version": data.get("version", "(unknown)"),
                        "dependencies": sorted(data.get("dependencies", {}).keys()),
                        "devDependencies": sorted(
                            data.get("devDependencies", {}).keys()
                        ),
                        "scripts": list(data.get("scripts", {}).keys()),
                    }
                except (OSError, json.JSONDecodeError, PermissionError):
                    return {"type": "package"}

            elif fname == "tsconfig.json":
                KEY_FIELDS = [
                    "target", "module", "moduleResolution", "lib", "strict",
                    "baseUrl", "outDir", "rootDir", "jsx", "esModuleInterop",
                    "allowSyntheticDefaultImports", "resolveJsonModule",
                    "incremental", "noEmit",
                ]
                try:
                    raw = full_path.read_text(encoding="utf-8")
                    raw = re.sub(r"//.*?$", "", raw, flags=re.MULTILINE)
                    raw = re.sub(r"/\*.*?\*/", "", raw, flags=re.DOTALL)
                    data = json.loads(raw)
                    compiler = data.get("compilerOptions", {})
                    co = {f: compiler[f] for f in KEY_FIELDS if f in compiler}
                    result = {"type": "tsconfig", "compilerOptions": co}
                    if "paths" in compiler:
                        result["paths"] = compiler["paths"]
                    return result
                except (OSError, json.JSONDecodeError, PermissionError):
                    return {"type": "tsconfig"}

        return {}

    # ── 캐시 I/O ──────────────────────────────────────────────────────────────

    def load_cache(self):
        """JSON 캐시 로드. 없거나 파싱 실패 시 None 반환."""
        if not self.cache_path.exists():
            return None
        try:
            raw = self.cache_path.read_text(encoding="utf-8")
            return json.loads(raw)
        except (OSError, json.JSONDecodeError, PermissionError):
            return None

    def save_cache(self, cache: dict) -> None:
        """JSON 캐시 저장 (atomic write: tempfile → os.rename)."""
        cache_dir = str(self.cache_path.parent)
        try:
            fd, tmp_path = tempfile.mkstemp(dir=cache_dir, suffix=".tmp")
            try:
                with os.fdopen(fd, "w", encoding="utf-8") as f:
                    json.dump(cache, f, ensure_ascii=False, indent=2)
                os.rename(tmp_path, str(self.cache_path))
            except Exception:
                # 임시 파일 정리
                try:
                    os.unlink(tmp_path)
                except OSError:
                    pass
                raise
        except (OSError, PermissionError) as e:
            raise OSError(f"캐시 저장 실패: {self.cache_path}: {e}") from e

    # ── 전체 스캔 → 캐시 생성 ─────────────────────────────────────────────────

    def full_scan_to_cache(self) -> dict:
        """전체 스캔하여 JSON 캐시 구조 생성. 기존 extract 함수들 재활용."""
        print("[*] Full scan → 캐시 생성 중...", file=sys.stderr)
        now = datetime.now().isoformat(timespec="seconds")
        project_name = self.project_root.name

        # 트리 생성
        tree_lines = [f"{project_name}/"] + build_tree(
            self.project_root, depth=self.depth, project_root=self.project_root
        )

        # 파일 맵 구축
        files = {}

        # types
        types_map = extract_types_interfaces(self.project_root, self.include_tests)
        for rel_path, type_names in types_map.items():
            if is_sensitive_file(rel_path):
                continue
            try:
                file_hash = self._compute_hash(self.project_root / rel_path)
            except OSError:
                file_hash = ""
            files[rel_path] = {
                "hash": file_hash,
                "section": "types",
                "data": {"types": type_names},
                "updated_at": now,
            }

        # routes
        routes = extract_api_routes(self.project_root, self.include_tests)
        for route in routes:
            rel_path = route["file"]
            if is_sensitive_file(rel_path):
                continue
            try:
                file_hash = self._compute_hash(self.project_root / rel_path)
            except OSError:
                file_hash = ""
            files[rel_path] = {
                "hash": file_hash,
                "section": "routes",
                "data": {"methods": route["methods"], "url": route["url"]},
                "updated_at": now,
            }

        # components
        components = extract_components(self.project_root, self.include_tests)
        for comp in components:
            rel_path = comp["file"]
            if is_sensitive_file(rel_path):
                continue
            try:
                file_hash = self._compute_hash(self.project_root / rel_path)
            except OSError:
                file_hash = ""
            files[rel_path] = {
                "hash": file_hash,
                "section": "components",
                "data": {"name": comp["name"]},
                "updated_at": now,
            }

        # config
        pkg_list = summarize_package_json(self.project_root)
        ts_list = summarize_tsconfig(self.project_root)
        config = {"packages": pkg_list, "tsconfig": ts_list}

        # recent files
        recent_files = get_recently_modified_files(self.project_root, top_n=20)

        cache = {
            "version": self.CACHE_VERSION,
            "generated_at": now,
            "project_name": project_name,
            "project_path": str(self.project_root),
            "depth": self.depth,
            "include_tests": self.include_tests,
            "files": files,
            "tree_lines": tree_lines,
            "config": config,
            "recent_files": recent_files,
        }

        self.save_cache(cache)
        print(f"[*] 캐시 저장 완료: {self.cache_path}", file=sys.stderr)
        return cache

    # ── Markdown 렌더링 ───────────────────────────────────────────────────────

    def render_markdown(self, cache: dict) -> str:
        """JSON 캐시 → Markdown 렌더링."""
        project_name = cache.get("project_name", self.project_root.name)
        generated_at = cache.get("generated_at", datetime.now().isoformat())
        project_path = cache.get("project_path", str(self.project_root))
        depth = cache.get("depth", self.depth)

        lines = []

        # 헤더
        lines.append(f"# Project Map: {project_name}")
        lines.append(f"> Generated: {generated_at}")
        lines.append(f"> Path: {project_path}")
        lines.append("")

        # ── a. 디렉토리 트리
        lines.append(f"## Directory Tree (depth: {depth})")
        lines.append("```")
        for tl in cache.get("tree_lines", []):
            lines.append(tl)
        lines.append("```")
        lines.append("")

        # ── b. Types & Interfaces
        lines.append("## Types & Interfaces")
        files = cache.get("files", {})
        types_items = sorted(
            [(rp, fd) for rp, fd in files.items() if fd.get("section") == "types"],
            key=lambda x: x[0],
        )
        if types_items:
            for rel_path, fd in types_items:
                type_names = fd.get("data", {}).get("types", [])
                if type_names:
                    names_str = ", ".join(f"`{n}`" for n in type_names)
                    lines.append(f"- `{rel_path}`: {names_str}")
        else:
            lines.append("_(타입/인터페이스 없음)_")
        lines.append("")

        # ── c. API Routes
        lines.append("## API Routes")
        route_items = sorted(
            [(rp, fd) for rp, fd in files.items() if fd.get("section") == "routes"],
            key=lambda x: x[1].get("data", {}).get("url", ""),
        )
        if route_items:
            for rel_path, fd in route_items:
                data = fd.get("data", {})
                methods_str = ", ".join(data.get("methods", []))
                url = data.get("url", "")
                if methods_str:
                    lines.append(f"- `{methods_str} {url}` → `{rel_path}`")
        else:
            lines.append("_(API 라우트 없음)_")
        lines.append("")

        # ── d. Components
        lines.append("## Components")
        comp_items = sorted(
            [
                (rp, fd)
                for rp, fd in files.items()
                if fd.get("section") == "components"
            ],
            key=lambda x: (x[1].get("data", {}).get("name") or "").lower(),
        )
        if comp_items:
            for rel_path, fd in comp_items:
                name = fd.get("data", {}).get("name")
                if name:
                    lines.append(f"- `{name}` → `{rel_path}`")
        else:
            lines.append("_(컴포넌트 없음)_")
        lines.append("")

        # ── e. Configuration Summary
        lines.append("## Configuration Summary")
        config = cache.get("config", {})

        for pkg in config.get("packages", []):
            lines.append(f"### {pkg.get('file', '')}")
            lines.append(f"- **Name**: {pkg.get('name', '(unknown)')}")
            lines.append(f"- **Version**: {pkg.get('version', '(unknown)')}")
            scripts = pkg.get("scripts", [])
            if scripts:
                lines.append(f"- **Scripts**: {', '.join(scripts)}")
            deps = pkg.get("dependencies", [])
            if deps:
                lines.append(f"- **Dependencies**: {', '.join(deps)}")
            else:
                lines.append("- **Dependencies**: (없음)")
            dev_deps = pkg.get("devDependencies", [])
            if dev_deps:
                lines.append(f"- **DevDependencies**: {', '.join(dev_deps)}")
            else:
                lines.append("- **DevDependencies**: (없음)")
            lines.append("")

        for ts in config.get("tsconfig", []):
            lines.append(f"### {ts.get('file', '')}")
            for key, val in ts.get("compilerOptions", {}).items():
                if isinstance(val, list):
                    lines.append(f"- **{key}**: {', '.join(str(v) for v in val)}")
                else:
                    lines.append(f"- **{key}**: {val}")
            if "paths" in ts:
                lines.append("- **paths**:")
                for alias, targets in ts["paths"].items():
                    targets_str = ", ".join(targets)
                    lines.append(f"  - `{alias}` → `{targets_str}`")
            lines.append("")

        # ── f. Recently Modified Files
        lines.append("## Recently Modified Files (Top 20)")
        recent_files = cache.get("recent_files", [])
        if recent_files:
            for i, item in enumerate(recent_files, 1):
                lines.append(f"{i}. `{item['file']}` ({item['date']})")
        else:
            lines.append("_(파일 없음)_")
        lines.append("")

        # ── g. Drive Changes (Recent)
        drive_changes = cache.get("drive_changes", [])
        if drive_changes:
            lines.append("## Drive Changes (Recent)")
            for change in drive_changes:
                ts = change.get("timestamp", "")
                date_part = ts[:10] if ts else "(unknown)"
                path = change.get("path", "")
                action = change.get("action", "upload")
                lines.append(f"- {date_part}: {path} ({action})")
            lines.append("")

        return "\n".join(lines)

    # ── Markdown 저장 (atomic + .bak) ─────────────────────────────────────────

    def _write_markdown_atomic(self, content: str) -> None:
        """Markdown 파일을 atomic write로 저장. 기존 파일은 .bak으로 백업."""
        out_str = str(self.output_path)
        bak_str = out_str + ".bak"
        out_dir = str(self.output_path.parent)

        # 기존 파일 .bak 복사
        if self.output_path.exists():
            try:
                import shutil
                shutil.copy2(out_str, bak_str)
            except (OSError, PermissionError):
                pass

        # atomic write
        fd, tmp_path = tempfile.mkstemp(dir=out_dir, suffix=".tmp")
        try:
            with os.fdopen(fd, "w", encoding="utf-8") as f:
                f.write(content)
            os.rename(tmp_path, out_str)
        except Exception:
            try:
                os.unlink(tmp_path)
            except OSError:
                pass
            raise

    # ── Drive 변경 로그 처리 ──────────────────────────────────────────────────

    def _process_drive_log(self, cache: dict) -> None:
        """
        Drive 변경 로그 파일(JSONL)에서 미처리 변경사항을 읽어 캐시에 반영하고,
        drive-change-log.py mark-processed를 호출하여 처리 완료 마킹.
        """
        import json as _json

        drive_log_path = Path(self.drive_log_path)
        if not drive_log_path.exists():
            print(
                f"[경고] Drive 로그 파일 없음, 건너뜀: {drive_log_path}",
                file=sys.stderr,
            )
            return

        # 미처리 항목 읽기
        unprocessed = []
        try:
            with open(str(drive_log_path), "r", encoding="utf-8") as f:
                for line in f:
                    line = line.strip()
                    if not line:
                        continue
                    try:
                        entry = _json.loads(line)
                        if not entry.get("processed", False):
                            unprocessed.append(entry)
                    except _json.JSONDecodeError:
                        continue
        except OSError as e:
            print(f"[경고] Drive 로그 읽기 실패: {e}", file=sys.stderr)
            return

        if not unprocessed:
            print("[*] Drive 로그: 미처리 변경 없음", file=sys.stderr)
            return

        print(
            f"[*] Drive 로그: {len(unprocessed)}건 미처리 변경 발견",
            file=sys.stderr,
        )

        # 캐시의 drive_changes 섹션에 추가 (최근 50건 유지)
        existing_changes = cache.get("drive_changes", [])
        existing_changes.extend(unprocessed)
        # 타임스탬프 기준 정렬 (최신순), 최대 50건 보존
        existing_changes.sort(key=lambda x: x.get("timestamp", ""), reverse=True)
        cache["drive_changes"] = existing_changes[:50]

        # 처리 완료 마킹 (drive-change-log.py 호출)
        ids_to_mark = ",".join(e["id"] for e in unprocessed if e.get("id"))
        if ids_to_mark:
            drive_log_script = Path(__file__).parent / "drive-change-log.py"
            if drive_log_script.exists():
                try:
                    result = subprocess.run(
                        [
                            sys.executable,
                            str(drive_log_script),
                            "--log-dir",
                            str(drive_log_path.parent),
                            "mark-processed",
                            "--project",
                            unprocessed[0].get("project", "unknown"),
                            "--ids",
                            ids_to_mark,
                        ],
                        capture_output=True,
                        text=True,
                        timeout=15,
                    )
                    if result.returncode == 0:
                        print(
                            f"[*] Drive 로그 mark-processed 완료 ({len(unprocessed)}건)",
                            file=sys.stderr,
                        )
                    else:
                        print(
                            f"[경고] Drive 로그 mark-processed 실패: {result.stderr.strip()}",
                            file=sys.stderr,
                        )
                except (OSError, subprocess.TimeoutExpired) as e:
                    print(f"[경고] drive-change-log.py 호출 실패: {e}", file=sys.stderr)
            else:
                print(
                    f"[경고] drive-change-log.py 스크립트 없음: {drive_log_script}",
                    file=sys.stderr,
                )

    # ── 메인 업데이트 ─────────────────────────────────────────────────────────

    def update(self, changed_files: list, deleted_files: list) -> None:
        """
        변경/삭제 파일 기반 incremental 업데이트 메인 메서드.

        1. Lock 획득
        2. 캐시 로드 (없으면 full scan)
        3. changed_files 처리
        4. deleted_files 처리
        5. 트리/recent_files 재생성
        6. 캐시 version 증가 후 저장
        7. Markdown 렌더링 후 atomic write
        8. Lock 해제
        """
        lock_fd = None
        try:
            lock_fd = self._acquire_lock()

            # 캐시 로드 (없으면 full scan)
            cache = self.load_cache()
            if cache is None:
                print("[*] 캐시 없음 → full scan 수행", file=sys.stderr)
                cache = self.full_scan_to_cache()

            now = datetime.now().isoformat(timespec="seconds")
            files = cache.setdefault("files", {})

            # ── changed_files 처리
            for rel_path in changed_files:
                rel_path = rel_path.strip()
                if not rel_path:
                    continue

                # 경로 검증
                try:
                    abs_path = self._validate_path(rel_path)
                except ValueError as e:
                    print(f"[경고] 경로 검증 실패, 건너뜀: {e}", file=sys.stderr)
                    continue

                # 파일 존재 확인
                if not abs_path.exists():
                    print(
                        f"[경고] 파일 없음, 건너뜀: {rel_path}",
                        file=sys.stderr,
                    )
                    continue

                # 테스트 파일 필터
                if not self.include_tests and is_test_path(rel_path):
                    continue

                # 기본 제외 파일 필터
                fname = os.path.basename(rel_path)
                if is_excluded_file(fname):
                    continue

                # SHA-256 hash 계산
                try:
                    new_hash = self._compute_hash(abs_path)
                except OSError as e:
                    print(f"[경고] 해시 계산 실패, 건너뜀: {e}", file=sys.stderr)
                    continue

                # 기존 hash와 비교 → 동일하면 skip
                existing = files.get(rel_path, {})
                if existing.get("hash") == new_hash:
                    print(
                        f"[*] 변경 없음 (hash 동일), 건너뜀: {rel_path}",
                        file=sys.stderr,
                    )
                    continue

                # 파일 분류
                section = self._classify_file(rel_path)

                # 섹션 데이터 추출
                data = self._extract_file_data(rel_path, section)

                # 캐시 노드 업데이트
                files[rel_path] = {
                    "hash": new_hash,
                    "section": section,
                    "data": data,
                    "updated_at": now,
                }
                print(f"[*] 업데이트: {rel_path} (section={section})", file=sys.stderr)

            # ── deleted_files 처리
            for rel_path in deleted_files:
                rel_path = rel_path.strip()
                if not rel_path:
                    continue
                if rel_path in files:
                    del files[rel_path]
                    print(f"[*] 삭제: {rel_path}", file=sys.stderr)

            # ── config 재생성 (package.json / tsconfig.json 변경 시)
            changed_set = set(f.strip() for f in changed_files)
            deleted_set = set(f.strip() for f in deleted_files)
            config_changed = any(
                os.path.basename(p) in ("package.json", "tsconfig.json")
                for p in changed_set | deleted_set
            )
            if config_changed:
                cache["config"] = {
                    "packages": summarize_package_json(self.project_root),
                    "tsconfig": summarize_tsconfig(self.project_root),
                }
                print("[*] config 재생성 완료", file=sys.stderr)

            # ── Directory Tree 재생성
            project_name = cache.get("project_name", self.project_root.name)
            cache["tree_lines"] = [f"{project_name}/"] + build_tree(
                self.project_root, depth=self.depth, project_root=self.project_root
            )

            # ── recent_files 업데이트
            cache["recent_files"] = get_recently_modified_files(
                self.project_root, top_n=20
            )

            # ── Drive 변경 로그 처리
            if self.drive_log_path:
                self._process_drive_log(cache)

            # ── version 증가
            cache["version"] = cache.get("version", self.CACHE_VERSION) + 1
            cache["generated_at"] = now

            # ── 캐시 저장 (atomic)
            self.save_cache(cache)

            # ── Markdown 렌더링 + atomic write
            md_content = self.render_markdown(cache)
            self._write_markdown_atomic(md_content)
            print(f"[완료] Markdown 업데이트: {self.output_path}", file=sys.stderr)

        finally:
            self._release_lock(lock_fd)


# ──────────────────────────────────────────────────────────────────────────────
# main
# ──────────────────────────────────────────────────────────────────────────────

def main():
    args = parse_args()

    project_root = Path(args.project_path).resolve()
    output_path = Path(args.output).resolve()

    # 입력 경로 검증
    if not project_root.exists():
        print(f"[오류] 프로젝트 경로가 존재하지 않습니다: {project_root}", file=sys.stderr)
        sys.exit(1)
    if not project_root.is_dir():
        print(f"[오류] 프로젝트 경로가 디렉토리가 아닙니다: {project_root}", file=sys.stderr)
        sys.exit(1)

    # 출력 디렉토리 생성
    output_path.parent.mkdir(parents=True, exist_ok=True)

    # ── Rollback 모드
    if args.rollback:
        bak_path = Path(str(output_path) + ".bak")
        if not bak_path.exists():
            print(f"[오류] 백업 파일이 없습니다: {bak_path}", file=sys.stderr)
            sys.exit(1)
        try:
            import shutil
            shutil.copy2(str(bak_path), str(output_path))
            print(f"[완료] 롤백 완료: {bak_path} → {output_path}", file=sys.stderr)
        except (OSError, PermissionError) as e:
            print(f"[오류] 롤백 실패: {e}", file=sys.stderr)
            sys.exit(1)
        return

    # ── Incremental 모드
    if args.incremental:
        changed_files = (
            [f for f in args.changed_files.split(",") if f.strip()]
            if args.changed_files
            else []
        )
        deleted_files = (
            [f for f in args.deleted_files.split(",") if f.strip()]
            if args.deleted_files
            else []
        )

        print(f"[*] 프로젝트: {project_root}", file=sys.stderr)
        print(f"[*] 출력: {output_path}", file=sys.stderr)
        print(
            f"[*] incremental 모드: changed={len(changed_files)}, deleted={len(deleted_files)}",
            file=sys.stderr,
        )

        drive_log = getattr(args, "drive_log", None)
        if drive_log:
            print(f"[*] Drive 로그: {drive_log}", file=sys.stderr)

        updater = IncrementalUpdater(
            project_root=project_root,
            output_path=output_path,
            depth=args.depth,
            include_tests=args.include_tests,
            drive_log_path=drive_log,
        )
        try:
            updater.update(changed_files, deleted_files)
        except TimeoutError as e:
            print(f"[오류] {e}", file=sys.stderr)
            sys.exit(1)
        except (OSError, PermissionError) as e:
            print(f"[오류] {e}", file=sys.stderr)
            sys.exit(1)
        return

    # ── Full 모드 (기존 동작 유지)
    print(f"[*] 프로젝트: {project_root}", file=sys.stderr)
    print(f"[*] 출력: {output_path}", file=sys.stderr)
    print(f"[*] depth: {args.depth}, include-tests: {args.include_tests}", file=sys.stderr)
    print("[*] Markdown 생성 시작...", file=sys.stderr)

    content = generate_markdown(
        project_root=project_root,
        output_path=output_path,
        depth=args.depth,
        include_tests=args.include_tests,
    )

    try:
        output_path.write_text(content, encoding="utf-8")
        print(f"[완료] 파일 생성: {output_path}", file=sys.stderr)
    except (OSError, PermissionError) as e:
        print(f"[오류] 파일 쓰기 실패: {e}", file=sys.stderr)
        sys.exit(1)

    # Full scan 후 JSON 캐시도 생성
    try:
        updater = IncrementalUpdater(
            project_root=project_root,
            output_path=output_path,
            depth=args.depth,
            include_tests=args.include_tests,
        )
        updater.full_scan_to_cache()
    except (OSError, PermissionError) as e:
        print(f"[경고] 캐시 생성 실패 (무시): {e}", file=sys.stderr)


if __name__ == "__main__":
    main()
