﻿import json
import os
import re
import sys
import subprocess
import threading
import time
import uuid
from http import HTTPStatus
from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
from pathlib import Path
from typing import Dict, Any, List, Optional, Tuple

import base64
import hashlib
import html
import secrets
import urllib.error
import urllib.request
from datetime import datetime, timezone

from main import (
    HighlightCandidate,
    Segment,
    CropWindow,
    prepare_highlight_subtitles,
    export_highlight_variant,
    force_uppercase_title,
    sanitise_title_text,
    isoformat_sao_paulo,
)

# Imports pesados sao adiados para acelerar o start do servidor.
TikTokUploadVideo = None  # type: ignore
_TIKTOK_IMPORT_ATTEMPTED = False
_TIKTOK_IMPORT_ERROR: Optional[BaseException] = None

try:  # optional, usado apenas para identificar erros transitórios do Selenium
    from selenium.common.exceptions import StaleElementReferenceException  # type: ignore
except ImportError:  # pragma: no cover
    StaleElementReferenceException = None  # type: ignore

InstagramClient = None  # type: ignore
_INSTAGRAM_IMPORT_ATTEMPTED = False
_INSTAGRAM_IMPORT_ERROR: Optional[BaseException] = None
_INSTAGRAM_CLIENT_LOCK = threading.Lock()
_INSTAGRAM_CLIENT_STATE: Dict[str, Optional[Any]] = {"client": None}

ROOT_DIR = Path(__file__).resolve().parent
CORTES_DIR = ROOT_DIR / "cortes"
INSTAGRAM_SESSION_PATH = ROOT_DIR / "instagram_session.json"
def _resolve_inbox_dir() -> Path:
    env_value = os.getenv("INBOX_DIR") or os.getenv("INPUT_DIR")
    if env_value:
        return Path(env_value).expanduser()
    candidate_paths = [ROOT_DIR / "in", Path("/home/user157/in")]
    for candidate in candidate_paths:
        expanded = Path(candidate).expanduser()
        if expanded.exists():
            return expanded
    return (ROOT_DIR / "in").expanduser()


INBOX_DIR = _resolve_inbox_dir()
DEFAULT_ENDPOINT_PORT = 8000

JOB_STATUS: Dict[str, Dict[str, Any]] = {}
JOB_LOCK = threading.Lock()
LOG_HISTORY_LIMIT = 400


ALLOWED_SUFFIXES = {".mp4", ".mxf", ".srt", ".ass", ".txt", ".json"}

TIKTOK_USERNAME = os.getenv("TIKTOK_USERNAME", "joaoaayturmsiwbn1836@gmail.com")
TIKTOK_PASSWORD = os.getenv("TIKTOK_PASSWORD", "tvthathi1!")
INSTAGRAM_USERNAME = os.getenv("INSTAGRAM_USERNAME", "testeia-th@hotmail.com")
INSTAGRAM_PASSWORD = os.getenv("INSTAGRAM_PASSWORD", "tvthathi")

PORTAL_API_URL = os.getenv("PORTAL_API_URL", "https://thmais.com.br/wp-json/wp/v2/posts")
PORTAL_USERNAME = os.getenv("PORTAL_USERNAME", "Matheus Helmer")
PORTAL_PASSWORD = os.getenv("PORTAL_PASSWORD", "Rk1G VoDe s6ex tyR1 gnb2 Mcng")
PORTAL_STATUS = os.getenv("PORTAL_STATUS", "draft")

LOGS_DIR = ROOT_DIR / "logs"
POST_LOG_PATH = LOGS_DIR / "social_posts.jsonl"
POST_LOG_LOCK = threading.Lock()
USERS_DB_PATH = ROOT_DIR / "users.json"
ADMINS_DB_PATH = ROOT_DIR / "admins.json"
MASTER_USER = "tvthathi"
TOKEN_TTL_SECONDS = 60 * 60 * 12
AUTH_TOKENS: Dict[str, Dict[str, Any]] = {}
USER_STATS_PATH = ROOT_DIR / "user_stats.json"

_USERS_CACHE: Dict[str, str] = {}
_USERS_MTIME: Optional[int] = None
_USERS_LOCK = threading.Lock()

_ADMINS_CACHE: List[str] = []
_ADMINS_MTIME: Optional[int] = None
_ADMINS_LOCK = threading.Lock()

_CORTES_CACHE: Dict[str, Any] = {"data": None, "signature": None, "expires": 0.0}
_CORTES_CACHE_LOCK = threading.Lock()
try:
    _CORTES_CACHE_TTL = float(os.getenv("CORTES_CACHE_TTL", "1.5"))
except ValueError:
    _CORTES_CACHE_TTL = 1.5

_USER_STATS_CACHE: Dict[str, Any] = {}
_USER_STATS_MTIME: Optional[int] = None
_USER_STATS_LOCK = threading.Lock()

_CORTES_TEXT_CACHE: Dict[str, Tuple[int, str]] = {}
_CORTES_TEXT_CACHE_LOCK = threading.Lock()
_TITLE_JOBS: Dict[str, Dict[str, Any]] = {}
_TITLE_JOBS_LOCK = threading.Lock()


def hash_password(password: str) -> str:
    return hashlib.sha256(password.encode("utf-8")).hexdigest()


def load_users() -> Dict[str, str]:
    global _USERS_CACHE, _USERS_MTIME
    with _USERS_LOCK:
        if not USERS_DB_PATH.exists():
            _USERS_CACHE.clear()
            _USERS_MTIME = None
            return {}
        try:
            stat = USERS_DB_PATH.stat()
            current_mtime = stat.st_mtime_ns
        except OSError:
            _USERS_CACHE.clear()
            _USERS_MTIME = None
            return {}
        if _USERS_MTIME == current_mtime:
            return dict(_USERS_CACHE)
        try:
            data = json.loads(USERS_DB_PATH.read_text(encoding="utf-8"))
            if isinstance(data, dict):
                _USERS_CACHE = {str(k): str(v) for k, v in data.items()}
            else:
                _USERS_CACHE = {}
        except json.JSONDecodeError:
            _USERS_CACHE = {}
        _USERS_MTIME = current_mtime
        return dict(_USERS_CACHE)


def save_users(users: Dict[str, str]) -> None:
    global _USERS_CACHE, _USERS_MTIME
    USERS_DB_PATH.write_text(json.dumps(users, ensure_ascii=False, indent=2), encoding="utf-8")
    with _USERS_LOCK:
        _USERS_CACHE = dict(users)
        try:
            _USERS_MTIME = USERS_DB_PATH.stat().st_mtime_ns
        except OSError:
            _USERS_MTIME = None


def load_admins() -> List[str]:
    global _ADMINS_CACHE, _ADMINS_MTIME
    with _ADMINS_LOCK:
        if not ADMINS_DB_PATH.exists():
            _ADMINS_CACHE = []
            _ADMINS_MTIME = None
            return []
        try:
            stat = ADMINS_DB_PATH.stat()
            current_mtime = stat.st_mtime_ns
        except OSError:
            _ADMINS_CACHE = []
            _ADMINS_MTIME = None
            return []
        if _ADMINS_MTIME == current_mtime:
            return list(_ADMINS_CACHE)
        try:
            data = json.loads(ADMINS_DB_PATH.read_text(encoding="utf-8"))
            if isinstance(data, list):
                _ADMINS_CACHE = [str(x) for x in data if isinstance(x, str)]
            else:
                _ADMINS_CACHE = []
        except json.JSONDecodeError:
            _ADMINS_CACHE = []
        _ADMINS_MTIME = current_mtime
        return list(_ADMINS_CACHE)


def save_admins(admins: List[str]) -> None:
    global _ADMINS_CACHE, _ADMINS_MTIME
    unique = sorted(set(admins))
    ADMINS_DB_PATH.write_text(json.dumps(unique, ensure_ascii=False, indent=2), encoding="utf-8")
    with _ADMINS_LOCK:
        _ADMINS_CACHE = list(unique)
        try:
            _ADMINS_MTIME = ADMINS_DB_PATH.stat().st_mtime_ns
        except OSError:
            _ADMINS_MTIME = None


def ensure_default_user() -> None:
    users = load_users()
    if MASTER_USER not in users:
        users[MASTER_USER] = hash_password("tvthathi123")
        save_users(users)


def ensure_default_admin() -> None:
    admins = load_admins()
    if MASTER_USER not in admins:
        admins.append(MASTER_USER)
        save_admins(admins)


def is_admin(username: str) -> bool:
    if not username:
        return False
    if username == MASTER_USER:
        return True
    return username in load_admins()


def verify_credentials(username: str, password: str) -> bool:
    users = load_users()
    stored = users.get(username)
    return stored == hash_password(password)


def cleanup_tokens() -> None:
    now = time.time()
    expired = [token for token, data in AUTH_TOKENS.items() if data.get("expires", 0) < now]
    for token in expired:
        AUTH_TOKENS.pop(token, None)


def issue_token(username: str) -> str:
    cleanup_tokens()
    token = secrets.token_hex(32)
    AUTH_TOKENS[token] = {"user": username, "expires": time.time() + TOKEN_TTL_SECONDS}
    return token


def validate_token(token: str) -> Optional[str]:
    if not token:
        return None
    cleanup_tokens()
    data = AUTH_TOKENS.get(token)
    if not data:
        return None
    if data["expires"] < time.time():
        AUTH_TOKENS.pop(token, None)
        return None
    return str(data.get("user"))


ensure_default_user()
ensure_default_admin()


def load_user_stats() -> Dict[str, Any]:
    global _USER_STATS_CACHE, _USER_STATS_MTIME
    with _USER_STATS_LOCK:
        if not USER_STATS_PATH.exists():
            _USER_STATS_CACHE = {}
            _USER_STATS_MTIME = None
            return {}
        try:
            stat = USER_STATS_PATH.stat()
            current_mtime = stat.st_mtime_ns
        except OSError:
            _USER_STATS_CACHE = {}
            _USER_STATS_MTIME = None
            return {}
        if _USER_STATS_MTIME == current_mtime:
            return dict(_USER_STATS_CACHE)
        try:
            data = json.loads(USER_STATS_PATH.read_text(encoding="utf-8"))
            if isinstance(data, dict):
                _USER_STATS_CACHE = data
            else:
                _USER_STATS_CACHE = {}
        except json.JSONDecodeError:
            _USER_STATS_CACHE = {}
        _USER_STATS_MTIME = current_mtime
        return dict(_USER_STATS_CACHE)


def save_user_stats(stats: Dict[str, Any]) -> None:
    USER_STATS_PATH.write_text(json.dumps(stats, ensure_ascii=False, indent=2), encoding="utf-8")
    with _USER_STATS_LOCK:
        _USER_STATS_CACHE = dict(stats)
        try:
            _USER_STATS_MTIME = USER_STATS_PATH.stat().st_mtime_ns
        except OSError:
            _USER_STATS_MTIME = None


def increment_user_stat(username: str, key: str, amount: int = 1) -> None:
    stats = load_user_stats()
    user_entry = stats.get(username) if isinstance(stats.get(username), dict) else {}
    current = user_entry.get(key, 0)
    user_entry[key] = current + amount
    stats[username] = user_entry
    save_user_stats(stats)


def append_job_log(job_id: str, message: Any) -> None:
    text = str(message)
    if not text:
        return
    text = text.replace("\r", "").rstrip("\n")
    with JOB_LOCK:
        entry = JOB_STATUS.setdefault(job_id, {"status": "running", "logs": []})
        logs = entry.setdefault("logs", [])
        logs.append(text)
        if len(logs) > LOG_HISTORY_LIMIT:
            del logs[:-LOG_HISTORY_LIMIT]


def append_post_log(entry: Dict[str, Any]) -> None:
    line = json.dumps(entry, ensure_ascii=False)
    LOGS_DIR.mkdir(parents=True, exist_ok=True)
    with POST_LOG_LOCK:
        with POST_LOG_PATH.open("a", encoding="utf-8") as fp:
            fp.write(line + "\n")
def _read_text_cached(path: Path, *, strip: bool = False) -> str:
    try:
        stat = path.stat()
        current_mtime = stat.st_mtime_ns
    except OSError:
        return ""
    key = str(path)
    with _CORTES_TEXT_CACHE_LOCK:
        cached = _CORTES_TEXT_CACHE.get(key)
        if cached and cached[0] == current_mtime:
            text = cached[1]
        else:
            try:
                text = path.read_text(encoding="utf-8")
            except OSError:
                text = ""
            else:
                _CORTES_TEXT_CACHE[key] = (current_mtime, text)
    return text.strip() if strip else text



def normalize_base_name(filename: str) -> str:

    base, _ = os.path.splitext(filename)

    base = re.sub(r"_(zoom|fit)_tiktok$", "", base, flags=re.IGNORECASE)

    base = re.sub(r"_tiktok$", "", base, flags=re.IGNORECASE)

    base = re.sub(r"_(zoom|fit)$", "", base, flags=re.IGNORECASE)

    base = re.sub(r"_caption$", "", base, flags=re.IGNORECASE)

    base = re.sub(r"_ass$", "", base, flags=re.IGNORECASE)

    base = re.sub(r"_meta$", "", base, flags=re.IGNORECASE)

    base = re.sub(r"_portal$", "", base, flags=re.IGNORECASE)

    base = re.sub(r"\s+", "_", base)

    return base





def build_cortes_listing() -> Dict[str, Dict[str, Dict[str, str]]]:
    entries: Dict[str, Dict[str, Any]] = {}
    source_map: Dict[str, str] = {}
    if not CORTES_DIR.exists():
        return entries
    for path in sorted(CORTES_DIR.iterdir()):
        if not path.is_file():
            continue
        if path.suffix.lower() not in ALLOWED_SUFFIXES:
            continue
        name = path.name
        base = normalize_base_name(name)
        entry = entries.setdefault(
            base,
            {
                "files": {},
                "title": "",
                "slug": base,
                "source": "",
                "generated_by": "",
                "generated_at": "",
            },
        )
        name_lower = name.lower()
        if name_lower.endswith("_title.txt"):
            try:
                raw_title = _read_text_cached(path, strip=True)
                if raw_title.startswith("{"):
                    data = json.loads(raw_title)
                    if isinstance(data, dict):
                        for key in ("title", "TITLE", "Title"):
                            if key in data:
                                entry["title"] = str(data[key]).strip()
                                break
                        else:
                            entry["title"] = ""
                    else:
                        entry["title"] = ""
                else:
                    entry["title"] = raw_title
            except (OSError, json.JSONDecodeError):
                entry["title"] = ""
            continue
        if name_lower.endswith("_meta.json"):
            try:
                meta_raw = _read_text_cached(path)
                meta_data = json.loads(meta_raw or "{}")
                if isinstance(meta_data, dict):
                    if meta_data.get("generated_by"):
                        entry["generated_by"] = str(meta_data["generated_by"]).strip()
                    if meta_data.get("generated_at"):
                        entry["generated_at"] = str(meta_data["generated_at"]).strip()
            except (OSError, json.JSONDecodeError):
                pass
            continue
        match_source = re.match(r"corte_(\d{8}_\d{6}(?:_[^_]+)?)_source\.txt$", name_lower)
        if match_source:
            try:
                source_map[match_source.group(1)] = _read_text_cached(path, strip=True)
            except OSError:
                source_map[match_source.group(1)] = ""
            continue
        if name_lower.endswith("_source.txt"):
            try:
                entry["source"] = _read_text_cached(path, strip=True)
            except OSError:
                entry["source"] = ""
            continue
        entry["files"][name] = f"/cortes/{name}"

    # atribui nome do arquivo de origem com base no run_tag
    pattern_run_tag = re.compile(r"^corte_(\d{8}_\d{6}(?:_[^_]+)?)_")
    for base, info in entries.items():
        if info.get("source"):
            continue
        match_rt = pattern_run_tag.match(base)
        if match_rt:
            run_tag = match_rt.group(1)
            source_name = source_map.get(run_tag, "")
            if source_name:
                info["source"] = source_name
    return entries


def list_input_files() -> List[Dict[str, Any]]:
    files: List[Dict[str, Any]] = []
    inbox_path = INBOX_DIR.expanduser()
    if not inbox_path.exists():
        return files
    candidates: List[Tuple[Path, os.stat_result]] = []
    try:
        for entry in inbox_path.iterdir():
            try:
                stat_result = entry.stat()
            except OSError:
                continue
            if not entry.is_file():
                continue
            candidates.append((entry, stat_result))
    except OSError:
        return files
    candidates.sort(key=lambda item: item[1].st_mtime, reverse=True)
    for path, stat in candidates:
        files.append(
            {
                "name": path.name,
                "size": stat.st_size,
                "modified": datetime.fromtimestamp(stat.st_mtime, timezone.utc).isoformat(),
            }
        )
    return files


def get_cortes_listing_cached(force: bool = False) -> Dict[str, Dict[str, Dict[str, str]]]:
    """Retorna o catalogo de cortes reutilizando cache recente para evitar I/O pesado."""
    signature: Optional[Tuple[int, int]] = None
    if CORTES_DIR.exists():
        try:
            stat = CORTES_DIR.stat()
            signature = (stat.st_mtime_ns, stat.st_size)
        except OSError:
            signature = None
    now = time.time()
    with _CORTES_CACHE_LOCK:
        cached_data = _CORTES_CACHE["data"]
        if (
            not force
            and cached_data is not None
            and _CORTES_CACHE["signature"] == signature
            and now < _CORTES_CACHE["expires"]
        ):
            return cached_data
    entries = build_cortes_listing()
    ttl = _CORTES_CACHE_TTL if _CORTES_CACHE_TTL > 0 else 1.0
    with _CORTES_CACHE_LOCK:
        _CORTES_CACHE["data"] = entries
        _CORTES_CACHE["signature"] = signature
        _CORTES_CACHE["expires"] = now + ttl
    return entries


def invalidate_cortes_cache() -> None:
    with _CORTES_CACHE_LOCK:
        _CORTES_CACHE["data"] = None
        _CORTES_CACHE["signature"] = None
        _CORTES_CACHE["expires"] = 0.0


def _load_highlight_metadata(base_key: str) -> Tuple[Path, Dict[str, Any]]:
    meta_path = (CORTES_DIR / f"{base_key}_data.json").resolve()
    if not meta_path.exists():
        raise FileNotFoundError("Dados do corte nao encontrados para reprocessar o titulo.")
    try:
        raw = meta_path.read_text(encoding="utf-8")
        data = json.loads(raw or "{}")
    except (OSError, json.JSONDecodeError) as exc:
        raise RuntimeError(f"Falha ao ler metadata do corte: {exc}") from exc
    if not isinstance(data, dict):
        raise RuntimeError("Metadata do corte esta em formato inesperado.")
    return meta_path, data


def _resolve_video_source(meta: Dict[str, Any]) -> Path:
    candidates: List[Path] = []
    raw_path = meta.get("input_path")
    if isinstance(raw_path, str) and raw_path.strip():
        candidates.append(Path(raw_path).expanduser())
    input_name = meta.get("input_name")
    if isinstance(input_name, str) and input_name.strip():
        base_name = Path(input_name.strip()).name
        candidates.append((INBOX_DIR / base_name).expanduser())
        candidates.append((ROOT_DIR / base_name).expanduser())
        candidates.append(Path(base_name).expanduser())
    for candidate in candidates:
        if candidate.exists():
            return candidate
    raise FileNotFoundError("Video original nao foi encontrado para reprocessar o titulo do corte.")


def _build_highlight_from_meta(meta: Dict[str, Any], new_title: str) -> HighlightCandidate:
    segments_data = meta.get("segments") or []
    if not segments_data:
        raise RuntimeError("Metadata do corte nao contem segmentos para reconstruir o titulo.")
    segments: List[Segment] = []
    for item in segments_data:
        if not isinstance(item, dict):
            continue
        segments.append(
            Segment(
                start=float(item.get("start", 0.0)),
                end=float(item.get("end", 0.0)),
                text=str(item.get("text", "")),
                avg_logprob=float(item.get("avg_logprob", 0.0)),
                no_speech_prob=float(item.get("no_speech_prob", 0.0)),
            )
        )
    if not segments:
        raise RuntimeError("Nao ha segmentos suficientes para reconstruir o titulo.")
    crop_data = meta.get("crop")
    crop: Optional[CropWindow] = None
    if isinstance(crop_data, dict):
        crop = CropWindow(
            x=int(crop_data.get("x", 0)),
            y=int(crop_data.get("y", 0)),
            width=int(crop_data.get("width", 0)),
            height=int(crop_data.get("height", 0)),
            confidence=float(crop_data.get("confidence", 0.0)),
            detections=int(crop_data.get("detections", 0)),
            samples=int(crop_data.get("samples", 0)),
        )
    highlight = HighlightCandidate(
        start=float(meta.get("start", 0.0)),
        end=float(meta.get("end", 0.0)),
        score=float(meta.get("score", 0.0)),
        segments=segments,
        reason=str(meta.get("reason", "")),
        title=new_title,
        origin=str(meta.get("origin", "")),
        crop=crop,
    )
    return highlight


def regenerate_cut_with_new_title(base_key: str, new_title: str) -> str:
    meta_path, meta = _load_highlight_metadata(base_key)
    video_source = _resolve_video_source(meta)
    highlight = _build_highlight_from_meta(meta, new_title)
    output_dir = Path(meta.get("output_dir") or CORTES_DIR).expanduser()
    output_dir.mkdir(parents=True, exist_ok=True)
    base_name = meta.get("base_name") or base_key
    ass_fill_path, ass_fit_path, _, fit_title_path = prepare_highlight_subtitles(
        highlight, output_dir, base_name
    )
    generated_formats = meta.get("generated_formats") or {}
    zoom_requested = bool(generated_formats.get("zoom", True))
    fit_requested = bool(generated_formats.get("fit", False))
    if zoom_requested:
        export_highlight_variant(video_source, highlight, output_dir, base_name, "zoom", ass_fill_path, "fill")
    if fit_requested:
        export_highlight_variant(
            video_source,
            highlight,
            output_dir,
            base_name,
            "fit",
            ass_fit_path,
            "fit",
            title_ass_path=fit_title_path,
        )
    meta["title"] = new_title
    meta["updated_at"] = isoformat_sao_paulo()
    try:
        meta_path.write_text(json.dumps(meta, ensure_ascii=False, indent=2) + "\n", encoding="utf-8")
    except OSError as exc:
        print(f"[titulo] Falha ao atualizar metadata do corte: {exc}")

    title_path = output_dir / f"{base_name}_title.txt"
    try:
        title_path.write_text(json.dumps({"title": new_title}, ensure_ascii=False) + "\n", encoding="utf-8")
    except OSError as exc:
        print(f"[titulo] Falha ao salvar titulo atualizado: {exc}")
    else:
        with _CORTES_TEXT_CACHE_LOCK:
            _CORTES_TEXT_CACHE.pop(str(title_path), None)
        invalidate_cortes_cache()

    return new_title


def enqueue_title_job(base_key: str, new_title: str) -> str:
    job_id = str(uuid.uuid4())
    payload = {
        "job_id": job_id,
        "base_key": base_key,
        "title": new_title,
        "status": "queued",
        "error": "",
        "started_at": None,
        "finished_at": None,
    }
    with _TITLE_JOBS_LOCK:
        _TITLE_JOBS[job_id] = payload
    thread = threading.Thread(target=_run_title_job, args=(job_id, base_key, new_title), daemon=True)
    thread.start()
    return job_id


def _run_title_job(job_id: str, base_key: str, new_title: str) -> None:
    with _TITLE_JOBS_LOCK:
        job = _TITLE_JOBS.get(job_id)
        if job:
            job["status"] = "running"
            job["started_at"] = isoformat_sao_paulo()
    try:
        final_title = regenerate_cut_with_new_title(base_key, new_title)
        result = {
            "status": "completed",
            "title": final_title,
            "finished_at": isoformat_sao_paulo(),
            "error": "",
        }
    except Exception as exc:
        result = {
            "status": "failed",
            "error": str(exc),
            "finished_at": isoformat_sao_paulo(),
        }
    with _TITLE_JOBS_LOCK:
        job = _TITLE_JOBS.get(job_id)
        if job:
            job.update(result)


def get_title_job(job_id: str) -> Optional[Dict[str, Any]]:
    with _TITLE_JOBS_LOCK:
        job = _TITLE_JOBS.get(job_id)
        return dict(job) if job else None


def resolve_corte_file(filename: str) -> Path:
    cleaned = Path(str(filename or "")).name
    if not cleaned:
        raise ValueError("Nome de arquivo invalido")
    target = (CORTES_DIR / cleaned).resolve()
    cortes_root = CORTES_DIR.resolve()
    try:
        target.relative_to(cortes_root)
    except ValueError as exc:
        raise ValueError("Arquivo fora da pasta cortes") from exc
    if not target.exists():
        raise FileNotFoundError(f"Arquivo nao encontrado: {cleaned}")
    if not target.is_file():
        raise FileNotFoundError(f"Entrada nao eh arquivo: {cleaned}")
    return target


def ensure_tiktok_uploader() -> Any:
    global TikTokUploadVideo, _TIKTOK_IMPORT_ATTEMPTED, _TIKTOK_IMPORT_ERROR
    if TikTokUploadVideo is not None:
        return TikTokUploadVideo
    if _TIKTOK_IMPORT_ATTEMPTED:
        raise RuntimeError(
            "Dependencias do TikTok nao instaladas. Execute 'pip install tiktok-uploader' "
            "e garanta que o Chromedriver esteja acessivel."
        ) from _TIKTOK_IMPORT_ERROR
    _TIKTOK_IMPORT_ATTEMPTED = True
    try:
        from tiktok_uploader.upload import upload_video as uploader  # type: ignore
    except Exception as exc:  # pragma: no cover - dependencia opcional
        _TIKTOK_IMPORT_ERROR = exc
        raise RuntimeError(
            "Dependencias do TikTok nao instaladas. Execute 'pip install tiktok-uploader' "
            "e garanta que o Chromedriver esteja acessivel."
        ) from exc
    TikTokUploadVideo = uploader
    return TikTokUploadVideo


def ensure_instagram_client() -> Any:
    global InstagramClient, _INSTAGRAM_IMPORT_ATTEMPTED, _INSTAGRAM_IMPORT_ERROR
    if InstagramClient is not None:
        return InstagramClient
    if _INSTAGRAM_IMPORT_ATTEMPTED:
        raise RuntimeError("Instagrapi nao instalada. Execute 'pip install instagrapi'.") from _INSTAGRAM_IMPORT_ERROR
    _INSTAGRAM_IMPORT_ATTEMPTED = True
    try:
        from instagrapi import Client as client_cls  # type: ignore
    except Exception as exc:  # pragma: no cover - dependencia opcional
        _INSTAGRAM_IMPORT_ERROR = exc
        raise RuntimeError("Instagrapi nao instalada. Execute 'pip install instagrapi'.") from exc
    InstagramClient = client_cls
    return InstagramClient


def _invalidate_instagram_session() -> None:
    with _INSTAGRAM_CLIENT_LOCK:
        _INSTAGRAM_CLIENT_STATE["client"] = None
    try:
        INSTAGRAM_SESSION_PATH.unlink(missing_ok=True)
    except Exception:
        pass


def get_instagram_client(force_refresh: bool = False) -> Any:
    if not INSTAGRAM_USERNAME or not INSTAGRAM_PASSWORD:
        raise RuntimeError("Credenciais do Instagram nao configuradas.")
    client_cls = ensure_instagram_client()
    with _INSTAGRAM_CLIENT_LOCK:
        cached = _INSTAGRAM_CLIENT_STATE.get("client")
        if cached is not None and not force_refresh:
            return cached
        client = client_cls()
        if INSTAGRAM_SESSION_PATH.exists():
            try:
                settings = json.loads(INSTAGRAM_SESSION_PATH.read_text(encoding="utf-8"))
                client.set_settings(settings)
            except Exception:
                INSTAGRAM_SESSION_PATH.unlink(missing_ok=True)
        client.delay_range = (1, 3)
        client.login(INSTAGRAM_USERNAME, INSTAGRAM_PASSWORD)
        try:
            INSTAGRAM_SESSION_PATH.write_text(
                json.dumps(client.get_settings()), encoding="utf-8"
            )
        except Exception:
            pass
        _INSTAGRAM_CLIENT_STATE["client"] = client
        return client


def post_to_tiktok(video_path: Path, caption: str) -> str:
    uploader = ensure_tiktok_uploader()
    if not TIKTOK_USERNAME or not TIKTOK_PASSWORD:
        raise RuntimeError("Credenciais do TikTok nao configuradas.")
    cookies_path = ROOT_DIR / "tiktok_cookies.txt"
    cookies_arg = str(cookies_path) if cookies_path.exists() else ""
    last_error: Optional[Exception] = None
    for attempt in range(2):
        try:
            result = uploader(
                filename=str(video_path),
                description=caption or "",
                username=TIKTOK_USERNAME,
                password=TIKTOK_PASSWORD,
                cookies=cookies_arg,
            )
            return str(result or "ok")
        except Exception as exc:  # pragma: no cover - dependencia externa
            last_error = exc
            message = str(exc).lower()
            recoverable = "stale element" in message or (
                StaleElementReferenceException
                and isinstance(exc, StaleElementReferenceException)
            )
            if attempt == 0 and recoverable:
                time.sleep(3)
                continue
            break
    raise RuntimeError(f"Falha ao postar no TikTok: {last_error}") from last_error


def post_to_instagram(video_path: Path, caption: str) -> str:
    last_error: Optional[Exception] = None
    for attempt in range(2):
        force = attempt > 0
        try:
            client = get_instagram_client(force_refresh=force)
        except Exception as exc:
            last_error = exc
            print(f"[instagram] login tentativa {attempt + 1}/2 falhou: {exc}")
            time.sleep(3)
            continue
        try:
            upload_result = client.clip_upload(str(video_path), caption or "")
            return str(upload_result or "ok")
        except Exception as clip_exc:
            last_error = clip_exc
            print(f"[instagram] clip_upload tentativa {attempt + 1}/2 falhou: {clip_exc}")
            try:
                upload_result = client.video_upload(str(video_path), caption or "")
                return str(upload_result or "ok")
            except Exception as video_exc:
                last_error = video_exc
                print(f"[instagram] video_upload fallback tentativa {attempt + 1}/2 falhou: {video_exc}")
                _invalidate_instagram_session()
                time.sleep(3)
    raise RuntimeError(f"Falha ao postar no Instagram: {last_error}") from last_error


def locate_portal_file(video_name: str, hint: Optional[str] = None) -> Optional[Path]:
    if hint:
        try:
            return resolve_corte_file(hint)
        except Exception:
            pass
    base = normalize_base_name(video_name)
    if base:
        candidate = f"{base}_portal.txt"
        try:
            return resolve_corte_file(candidate)
        except Exception:
            pass
        for path in CORTES_DIR.glob(f"{base}*_portal.txt"):
            if path.is_file():
                return path
    return None


def load_portal_article(article_path: Path) -> Dict[str, str]:
    raw = article_path.read_text(encoding="utf-8")
    normalized = raw.replace("\r\n", "\n")
    lines = normalized.split("\n")
    idx = next((i for i, line in enumerate(lines) if line.strip()), -1)
    if idx == -1:
        return {"title": article_path.stem, "body": ""}
    title = lines[idx].strip() or article_path.stem
    body = "\n".join(lines[idx + 1 :]).strip()
    return {"title": title, "body": body}


def _portal_html_from_text(body: str) -> str:
    content = body.strip()
    if not content:
        return "<p></p>"
    blocks = re.split(r"\n\s*\n", content)
    parts: List[str] = []
    for block in blocks:
        block = block.strip()
        if not block:
            continue
        lines = [html.escape(line.strip()) for line in block.split("\n") if line.strip()]
        paragraph = "<br/>".join(lines) or html.escape(block)
        parts.append(f"<p>{paragraph}</p>")
    return "".join(parts) or f"<p>{html.escape(content)}</p>"


def post_portal_article(title: str, body: str) -> Dict[str, Any]:
    if not PORTAL_API_URL:
        raise RuntimeError("PORTAL_API_URL nao configurada.")
    if not PORTAL_USERNAME or not PORTAL_PASSWORD:
        raise RuntimeError("Credenciais do portal nao configuradas.")
    payload = {
        "title": title.strip() or "Materia automatica",
        "content": _portal_html_from_text(body),
        "status": (PORTAL_STATUS or "draft").lower(),
    }
    data = json.dumps(payload, ensure_ascii=False).encode("utf-8")
    raw_credentials = f"{PORTAL_USERNAME}:{PORTAL_PASSWORD}"
    token = base64.b64encode(raw_credentials.encode("ascii", "ignore")).decode("ascii")
    headers = {
        "Authorization": f"Basic {token}",
        "Content-Type": "application/json",
    }
    request = urllib.request.Request(PORTAL_API_URL, data=data, headers=headers, method="POST")
    try:
        with urllib.request.urlopen(request, timeout=30) as response:
            body_bytes = response.read()
            try:
                return json.loads(body_bytes.decode("utf-8"))
            except json.JSONDecodeError:
                return {"raw": body_bytes.decode("utf-8", errors="ignore")}
    except urllib.error.HTTPError as exc:
        detail = exc.read().decode("utf-8", errors="ignore").strip()
        message = detail or exc.reason or f"HTTP {exc.code}"
        raise RuntimeError(f"Portal HTTP {exc.code}: {message}") from exc
    except urllib.error.URLError as exc:
        raise RuntimeError(f"Portal indisponivel: {exc}") from exc


def build_cli_args(payload: Dict[str, Any]) -> List[str]:
    args: List[str] = [os.fspath(ROOT_DIR / "main.py")]
    input_value = str(payload.get("input", "") or "").strip()
    if input_value:
        args += ["--input", input_value]
    else:
        args.append("--batch-all")
    if payload.get("output_dir"):
        args += ["--output-dir", str(payload["output_dir"])]
    if payload.get("top"):
        args += ["--top", str(payload["top"])]
    variant = str(payload.get("variant", "") or "").strip().lower()
    if not variant:
        variant = "zoom"
    if variant in {"zoom", "fit", "both"}:
        args += ["--variant", variant]
    if payload.get("model"):
        args += ["--model", str(payload["model"])]
    if payload.get("min_duration"):
        args += ["--min-duration", str(payload["min_duration"])]
    if payload.get("max_duration"):
        args += ["--max-duration", str(payload["max_duration"])]
    if payload.get("lang"):
        args += ["--lang", str(payload["lang"])]
    return args

def launch_job(job_id: str, cli_args: List[str], prompt_env: Dict[str, str], user: str) -> None:
    env = os.environ.copy()
    if prompt_env:
        env.update(prompt_env)
    env.setdefault("PYTHONUNBUFFERED", "1")
    env.setdefault("PYTHONIOENCODING", "utf-8")
    env["CORTE_REQUEST_USER"] = user
    command = [os.fspath(Path(sys.executable))] + cli_args
    append_job_log(job_id, "== Preparando ambiente e iniciando pipeline ==")
    append_job_log(job_id, f"$ {' '.join(command)}")
    process = None
    try:
        process = subprocess.Popen(
            command,
            cwd=str(ROOT_DIR),
            env=env,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            bufsize=1,
            encoding="utf-8",
            errors="replace",
        )
        with JOB_LOCK:
            entry = JOB_STATUS.setdefault(job_id, {"status": "running", "logs": []})
            entry["status"] = "running"
            entry["returncode"] = None
            entry["pid"] = process.pid
        if process.stdout:
            for raw_line in process.stdout:
                append_job_log(job_id, raw_line)
                try:
                    print(raw_line, end="", flush=True)
                except Exception:
                    pass
        returncode = process.wait()
        status = "completed" if returncode == 0 else "failed"
        append_job_log(job_id, f"[status] Processo finalizado com codigo {returncode}")
        try:
            print(f"[job {job_id}] finalizado com codigo {returncode}")
        except Exception:
            pass
        with JOB_LOCK:
            entry = JOB_STATUS.setdefault(job_id, {"logs": []})
            entry["status"] = status
            entry["returncode"] = returncode
            entry["user"] = user
        if status == "completed":
            increment_user_stat(user, "jobs_completed", 1)
    except Exception as exc:  # pragma: no cover - defensive
        append_job_log(job_id, f"[erro] {exc}")
        with JOB_LOCK:
            entry = JOB_STATUS.setdefault(job_id, {"logs": []})
            entry["status"] = "failed"
            entry["error"] = str(exc)
        append_job_log(job_id, "[status] Processo finalizado com erro.")
        try:
            print(f"[job {job_id}] erro: {exc}")
        except Exception:
            pass
    finally:
        if process and process.stdout:
            try:
                process.stdout.close()
            except Exception:
                pass


class DashboardHandler(SimpleHTTPRequestHandler):
    def __init__(self, *args: Any, **kwargs: Any) -> None:
        super().__init__(*args, directory=str(ROOT_DIR), **kwargs)

    def log_message(self, *args: Any, **kwargs: Any) -> None:  # type: ignore[override]
        return

    def end_headers(self) -> None:  # type: ignore[override]
        self.send_header("Cache-Control", "no-store")
        super().end_headers()

    def send_json(self, status: int, payload: Dict[str, Any]) -> None:
        data = json.dumps(payload, ensure_ascii=False).encode("utf-8")
        self.send_response(status)
        self.send_header("Content-Type", "application/json; charset=utf-8")
        self.send_header("Content-Length", str(len(data)))
        self.end_headers()
        self.wfile.write(data)

    def get_authenticated_user(self) -> Optional[str]:
        auth_header = self.headers.get("Authorization", "")
        if auth_header and auth_header.lower().startswith("bearer "):
            token = auth_header.split(" ", 1)[1].strip()
            return validate_token(token)
        return None

    def require_auth(self) -> Optional[str]:
        user = self.get_authenticated_user()
        if not user:
            self.send_json(HTTPStatus.UNAUTHORIZED, {"error": "Nao autenticado"})
        return user

    def handle_login(self, payload: Dict[str, Any]) -> None:
        username = str(payload.get("username") or "").strip()
        password = str(payload.get("password") or "")
        if not username or not password:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Usuario e senha obrigatorios"})
            return
        if not verify_credentials(username, password):
            self.send_json(HTTPStatus.UNAUTHORIZED, {"error": "Credenciais invalidas"})
            return
        token = issue_token(username)
        self.send_json(
            HTTPStatus.OK,
            {"token": token, "username": username, "expires_in": TOKEN_TTL_SECONDS},
        )

    def handle_add_user(self, payload: Dict[str, Any], requester: str) -> None:
        if not is_admin(requester):
            self.send_json(HTTPStatus.FORBIDDEN, {"error": "Apenas admins podem criar contas."})
            return
        username = str(payload.get("username") or "").strip()
        password = str(payload.get("password") or "")
        if not username or not password:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Usuario e senha obrigatorios"})
            return
        users = load_users()
        if username in users:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Usuario ja existe"})
            return
        users[username] = hash_password(password)
        save_users(users)
        self.send_json(HTTPStatus.OK, {"status": "ok", "created": username, "created_by": requester})

    def handle_delete_user(self, payload: Dict[str, Any], requester: str) -> None:
        if requester != MASTER_USER:
            self.send_json(HTTPStatus.FORBIDDEN, {"error": "Apenas o admin master pode remover contas."})
            return
        username = str(payload.get("username") or "").strip()
        if not username:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Usuario obrigatorio"})
            return
        if username == MASTER_USER:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Nao e permitido remover o admin master"})
            return
        users = load_users()
        if username not in users:
            self.send_json(HTTPStatus.NOT_FOUND, {"error": "Usuario nao encontrado"})
            return
        users.pop(username, None)
        save_users(users)
        # opcional: limpar stats do usuario removido
        stats = load_user_stats()
        if isinstance(stats, dict) and username in stats:
            stats.pop(username, None)
            save_user_stats(stats)
        # se for admin comum, remove da lista de admins
        admins = load_admins()
        if username in admins:
            admins.remove(username)
            save_admins(admins)
        # invalida tokens do usuario removido
        cleanup_tokens()
        for token, info in list(AUTH_TOKENS.items()):
            if info.get("user") == username:
                AUTH_TOKENS.pop(token, None)
        self.send_json(HTTPStatus.OK, {"status": "ok", "removed": username})

    def handle_promote_user(self, payload: Dict[str, Any], requester: str) -> None:
        if requester != MASTER_USER:
            self.send_json(HTTPStatus.FORBIDDEN, {"error": "Apenas o admin master pode promover usuarios."})
            return
        username = str(payload.get("username") or "").strip()
        if not username:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Usuario obrigatorio"})
            return
        users = load_users()
        if username not in users:
            self.send_json(HTTPStatus.NOT_FOUND, {"error": "Usuario nao encontrado"})
            return
        admins = load_admins()
        if username in admins:
            self.send_json(HTTPStatus.OK, {"status": "ok", "admin": username, "already_admin": True})
            return
        admins.append(username)
        save_admins(admins)
        self.send_json(HTTPStatus.OK, {"status": "ok", "admin": username})

    def handle_title_job_status(self, job_id: str) -> None:
        if not job_id:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Job_id obrigatorio"})
            return
        info = get_title_job(job_id)
        if not info:
            self.send_json(HTTPStatus.NOT_FOUND, {"error": "Job nao encontrado"})
            return
        self.send_json(HTTPStatus.OK, info)

    def handle_update_title(self, payload: Dict[str, Any], requester: str) -> None:
        base_key = str(payload.get("base_key") or "").strip()
        new_title = str(payload.get("title") or "").strip()
        if not base_key:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Identificador do corte obrigatorio"})
            return
        if any(sep in base_key for sep in ("/", "\\")) or ".." in base_key:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Identificador de corte invalido"})
            return
        sanitized_title = force_uppercase_title(sanitise_title_text(new_title))
        if not sanitized_title:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Titulo nao pode ser vazio"})
            return
        job_id = enqueue_title_job(base_key, sanitized_title)
        self.send_json(HTTPStatus.ACCEPTED, {"status": "queued", "job_id": job_id})

    def handle_demote_user(self, payload: Dict[str, Any], requester: str) -> None:
        if requester != MASTER_USER:
            self.send_json(HTTPStatus.FORBIDDEN, {"error": "Apenas o admin master pode remover privilegios de admin."})
            return
        username = str(payload.get("username") or "").strip()
        if not username:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Usuario obrigatorio"})
            return
        if username == MASTER_USER:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Nao e permitido alterar o admin master"})
            return
        admins = load_admins()
        if username not in admins:
            self.send_json(HTTPStatus.OK, {"status": "ok", "admin": username, "already_demoted": True})
            return
        admins = [u for u in admins if u != username]
        save_admins(admins)
        self.send_json(HTTPStatus.OK, {"status": "ok", "demoted": username})

    def handle_list_users(self, requester: str) -> None:
        if not is_admin(requester):
            self.send_json(HTTPStatus.FORBIDDEN, {"error": "Apenas admins podem listar usuarios."})
            return
        users = load_users()
        stats = load_user_stats()
        admins = set(load_admins())
        payload = []
        for username in sorted(users.keys()):
            entry = {
                "username": username,
                "is_admin": username == MASTER_USER or username in admins,
                "is_master": username == MASTER_USER,
                "stats": stats.get(username, {}),
            }
            payload.append(entry)
        self.send_json(HTTPStatus.OK, {"users": payload})

    def do_GET(self) -> None:  # type: ignore[override]
        raw_path = self.path.split('?', 1)[0]
        path_only = raw_path.rstrip('/') or '/'
        # normaliza caminhos dinâmicos assinados
        if raw_path.startswith("/auth/users"):
            path_only = "/auth/users"
        if raw_path.startswith("/auth/me"):
            path_only = "/auth/me"

        protected_gets = {'/jobs', '/api/cortes-listing', '/cortes-listing', '/auth/me', '/auth/users', '/api/input-files'}
        if path_only in protected_gets and not self.require_auth():
            return

        if path_only == '/auth/me':
            user = self.require_auth()
            if not user:
                return
            stats = load_user_stats()
            self.send_json(
                HTTPStatus.OK,
                {
                    "username": user,
                    "stats": stats.get(user, {}),
                    "is_admin": is_admin(user),
                    "is_master": user == MASTER_USER,
                },
            )
            return

        if path_only == "/auth/users":
            user = self.require_auth()
            if not user:
                return
            self.handle_list_users(user)
            return

        if path_only == '/jobs':
            with JOB_LOCK:
                payload = {job_id: info for job_id, info in JOB_STATUS.items()}
            self.send_json(HTTPStatus.OK, payload)
            return

        if path_only.startswith("/cuts/title-job-status/"):
            job_id = path_only.rsplit("/", 1)[-1]
            self.handle_title_job_status(job_id)
            return

        if path_only in ('/api/cortes-listing', '/cortes-listing'):
            entries = get_cortes_listing_cached()
            self.send_json(HTTPStatus.OK, {'entries': entries})
            return
        if path_only == "/api/input-files":
            entries = list_input_files()
            self.send_json(HTTPStatus.OK, {"files": entries})
            return

        try:
            super().do_GET()
        except (BrokenPipeError, ConnectionResetError, ConnectionAbortedError):
            pass


    def do_POST(self) -> None:  # type: ignore[override]
        path_only = self.path.rstrip("/")
        length = int(self.headers.get("Content-Length", "0") or 0)
        raw_body = self.rfile.read(length) if length else b"{}"
        try:
            payload = json.loads(raw_body.decode("utf-8") or "{}")
            if not isinstance(payload, dict):
                raise ValueError("JSON deve ser um objeto")
        except ValueError as exc:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": str(exc)})
            return

        if path_only == "/auth/login":
            self.handle_login(payload)
            return

        if path_only == "/auth/add-user":
            user = self.require_auth()
            if not user:
                return
            self.handle_add_user(payload, user)
            return

        if path_only == "/auth/delete-user":
            user = self.require_auth()
            if not user:
                return
            self.handle_delete_user(payload, user)
            return

        if path_only == "/auth/promote-user":
            user = self.require_auth()
            if not user:
                return
            self.handle_promote_user(payload, user)
            return

        if path_only == "/auth/demote-user":
            user = self.require_auth()
            if not user:
                return
            self.handle_demote_user(payload, user)
            return

        if path_only == "/cuts/update-title":
            user = self.require_auth()
            if not user:
                return
            self.handle_update_title(payload, user)
            return

        if path_only == "/auth/me":
            user = self.require_auth()
            if not user:
                return
            stats = load_user_stats()
            self.send_json(
                HTTPStatus.OK,
                {"username": user, "stats": stats.get(user, {}), "is_admin": is_admin(user)},
            )
            return

        user = self.require_auth()
        if not user:
            return

        if path_only == "/post/social":
            self.handle_post_social(payload)
            return

        if path_only == "/post/portal":
            self.handle_post_portal(payload)
            return

        if path_only == "/delete-cut":
            files_value = payload.get("files")
            if not isinstance(files_value, list) or not files_value:
                self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Lista de arquivos obrigatoria"})
                return
            deleted: List[str] = []
            missing: List[str] = []
            errors: List[Dict[str, str]] = []
            cortes_root = CORTES_DIR.resolve()
            seen = set()
            for raw in files_value:
                if not isinstance(raw, str):
                    continue
                cleaned = Path(raw.strip()).name
                if not cleaned or cleaned in seen:
                    continue
                seen.add(cleaned)
                target = (CORTES_DIR / cleaned).resolve()
                try:
                    target.relative_to(cortes_root)
                except ValueError:
                    errors.append({"file": cleaned, "error": "arquivo fora da pasta cortes"})
                    continue
                if not target.exists():
                    missing.append(cleaned)
                    continue
                if not target.is_file():
                    errors.append({"file": cleaned, "error": "alvo nao eh um arquivo"})
                    continue
                try:
                    target.unlink()
                    deleted.append(cleaned)
                except OSError as exc:  # pragma: no cover - seguro
                    errors.append({"file": cleaned, "error": str(exc)})
            self.send_json(HTTPStatus.OK, {"deleted": deleted, "missing": missing, "errors": errors})
            return

        if path_only not in ("/trigger", "/cortes", "/run"):
            self.send_json(HTTPStatus.NOT_FOUND, {"error": "Endpoint desconhecido"})
            return

        job_id = str(uuid.uuid4())
        cli_args = build_cli_args(payload)

        prompt_env: Dict[str, str] = {}
        prompts_payload = payload.get("prompts")
        if isinstance(prompts_payload, dict):
            mapping = {
                "highlight_selection": "PROMPT_HIGHLIGHT_SELECTION",
                "transcript_cleanup": "PROMPT_TRANSCRIPT_CLEANUP",
                "social_caption": "PROMPT_SOCIAL_CAPTION",
                "title_generation": "PROMPT_TITLE_GENERATION",
                "portal_article": "PROMPT_PORTAL_ARTICLE",
            }
            for key, env_key in mapping.items():
                value = prompts_payload.get(key)
                if isinstance(value, str) and value.strip():
                    prompt_env[env_key] = value

        thread = threading.Thread(
            target=launch_job,
            args=(job_id, cli_args, prompt_env, user),
            daemon=True,
        )
        with JOB_LOCK:
            JOB_STATUS[job_id] = {"status": "running", "logs": [], "returncode": None, "user": user}
        append_job_log(job_id, "Job enfileirado. Preparando pipeline...")
        thread.start()

        try:
            self.send_json(HTTPStatus.ACCEPTED, {"job_id": job_id, "status": "queued"})
        except (BrokenPipeError, ConnectionResetError, ConnectionAbortedError):
            pass

    def handle_post_social(self, payload: Dict[str, Any]) -> None:
        video_name = str(payload.get("video") or "").strip()
        if not video_name:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Arquivo do video obrigatorio"})
            return
        try:
            video_path = resolve_corte_file(video_name)
        except Exception as exc:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": str(exc)})
            return

        caption = str(payload.get("caption") or "").strip()
        platforms_value = payload.get("platforms")
        if not isinstance(platforms_value, list) or not platforms_value:
            platforms_value = ["instagram"]
        platforms = [str(item or "").strip() or "instagram" for item in platforms_value]

        successes: List[str] = []
        errors: Dict[str, str] = {}
        responses: Dict[str, Any] = {}

        for platform in platforms:
            key = platform.lower()
            if key == "instagram":
                try:
                    result = post_to_instagram(video_path, caption)
                    successes.append("instagram")
                    responses["instagram"] = result
                except Exception as exc:
                    errors["instagram"] = str(exc)
            elif key == "tiktok":
                try:
                    result = post_to_tiktok(video_path, caption)
                    successes.append("tiktok")
                    responses["tiktok"] = str(result)
                except Exception as exc:
                    errors["tiktok"] = str(exc)
            else:
                errors[key or "plataforma"] = "Plataforma nao suportada"

        portal_hint = str(payload.get("portal_file") or "").strip() or None
        portal_path: Optional[Path] = None
        portal_debug: Dict[str, Any] = {"hint": portal_hint}
        should_post_portal = "instagram" in [p.lower() for p in platforms]
        if should_post_portal:
            try:
                portal_path = locate_portal_file(video_name, portal_hint)
                if portal_path:
                    portal_debug["resolved"] = True
                    portal_debug["path"] = portal_path.name
            except Exception as exc:
                errors["portal"] = f"Falha ao localizar materia: {exc}"
                portal_debug["error"] = str(exc)
            if portal_path:
                try:
                    article = load_portal_article(portal_path)
                    portal_response = post_portal_article(article["title"], article["body"])
                    portal_summary = {
                        "id": portal_response.get("id"),
                        "status": portal_response.get("status"),
                        "link": portal_response.get("link"),
                    }
                    successes.append("portal")
                    responses["portal"] = portal_summary
                except Exception as exc:
                    errors["portal"] = str(exc)
                    portal_debug["error"] = str(exc)
            else:
                errors.setdefault("portal", "Arquivo de materia nao encontrado para este corte.")
                portal_debug.setdefault("error", "Arquivo de materia nao encontrado para este corte.")
        else:
            portal_debug["skipped"] = "ignored_for_platforms"

        timestamp = datetime.now(timezone.utc).isoformat(timespec="seconds")
        log_entry = {
            "timestamp": timestamp,
            "video": video_path.name,
            "caption_preview": caption[:200],
            "platforms": platforms,
            "success": successes,
            "errors": errors,
            "responses": responses,
            "portal_debug": portal_debug,
        }
        append_post_log(log_entry)

        status = HTTPStatus.OK
        if errors and successes:
            status = HTTPStatus.MULTI_STATUS
        elif errors and not successes:
            status = HTTPStatus.BAD_REQUEST

        payload = {
            "timestamp": timestamp,
            "video": video_path.name,
            "caption": caption,
            "platforms": platforms,
            "success": successes,
            "errors": errors,
            "portal": responses.get("portal"),
            "portal_debug": portal_debug,
        }
        self.send_json(status, payload)

    def handle_post_portal(self, payload: Dict[str, Any]) -> None:
        portal_name = str(payload.get("portal_file") or "").strip()
        if not portal_name:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Arquivo de materia obrigatorio"})
            return
        try:
            portal_path = resolve_corte_file(portal_name)
        except Exception as exc:
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": str(exc)})
            return

        explicit_title = str(payload.get("title") or "").strip()
        explicit_body = str(payload.get("body") or "").strip()
        try:
            article = load_portal_article(portal_path)
        except Exception as exc:
            self.send_json(HTTPStatus.INTERNAL_SERVER_ERROR, {"error": f"Falha ao ler materia: {exc}"})
            return

        final_title = explicit_title or article["title"]
        final_body = explicit_body or article["body"]
        if not final_body.strip():
            self.send_json(HTTPStatus.BAD_REQUEST, {"error": "Conteudo da materia vazio"})
            return

        timestamp = datetime.now(timezone.utc).isoformat(timespec="seconds")
        log_entry = {
            "timestamp": timestamp,
            "mode": "portal_only",
            "portal_file": portal_path.name,
            "title": final_title,
            "body_chars": len(final_body),
        }
        try:
            portal_response = post_portal_article(final_title, final_body)
            log_entry["response"] = {
                "id": portal_response.get("id"),
                "status": portal_response.get("status"),
                "link": portal_response.get("link"),
            }
            log_entry["status"] = "success"
            append_post_log(log_entry)
            self.send_json(
                HTTPStatus.OK,
                {
                    "timestamp": timestamp,
                    "portal_file": portal_path.name,
                    "portal": log_entry["response"],
                },
            )
        except Exception as exc:
            log_entry["status"] = "error"
            log_entry["error"] = str(exc)
            append_post_log(log_entry)
            self.send_json(HTTPStatus.BAD_GATEWAY, {"error": str(exc)})


def main() -> None:
    host = os.getenv("DASHBOARD_HOST") or os.getenv("HOST") or "0.0.0.0"
    port_raw = os.getenv("DASHBOARD_PORT") or os.getenv("PORT")
    try:
        port = int(port_raw) if port_raw else DEFAULT_ENDPOINT_PORT
    except ValueError:
        port = DEFAULT_ENDPOINT_PORT
    address = (host, port)
    with ThreadingHTTPServer(address, DashboardHandler) as server:
        display_host = host if host not in {"0.0.0.0", "::"} else "localhost"
        print(f"Dashboard server em http://{display_host}:{port}/dashboard.html")
        print("Pressione Ctrl+C para encerrar.")
        try:
            server.serve_forever()
        except KeyboardInterrupt:
            print("\nEncerrando servidor...")


if __name__ == "__main__":
    main()
