"""
Topic selection and semantic deduplication for content pipeline.
"""

import json
import logging
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import Optional

from google import genai

from config.settings import get_settings
from database.models import (
    PublishedContentRepository,
    Topic,
    TopicRepository,
    TopicStatus,
)

logger = logging.getLogger(__name__)


class DupeVerdict(str, Enum):
    UNIQUE = "unique"
    PARTIAL_OVERLAP = "partial_overlap"
    DUPLICATE = "duplicate"


@dataclass
class DupeResult:
    verdict: DupeVerdict
    reason: str
    overlap_score: float  # 0.0 to 1.0
    similar_articles: list[str]


def get_gemini_client() -> genai.Client:
    """Initialize Gemini client."""
    settings = get_settings()
    return genai.Client(api_key=settings.gemini_api_key)


def get_next_topic() -> Optional[Topic]:
    """Fetch highest-priority pending topic."""
    return TopicRepository.get_next_pending()


def mark_status(
    topic_id: int,
    status: TopicStatus,
    reason: Optional[str] = None,
    article_id: Optional[int] = None,
) -> bool:
    """Update topic status."""
    return TopicRepository.update_status(topic_id, status, reason, article_id)


def semantic_dedupe_check(topic: Topic) -> DupeResult:
    """
    Check if topic overlaps with published content using Gemini.

    Returns a DupeResult with verdict:
    - UNIQUE: No significant overlap, proceed with generation
    - PARTIAL_OVERLAP: Some overlap, but unique angle possible
    - DUPLICATE: Too similar, skip this topic
    """
    published = PublishedContentRepository.get_all()

    if not published:
        logger.info("No published content to check against")
        return DupeResult(
            verdict=DupeVerdict.UNIQUE,
            reason="No existing content in database",
            overlap_score=0.0,
            similar_articles=[],
        )

    # Build context of existing articles
    existing_content = "\n\n".join(
        [
            f"ARTIKEL {i+1}:\nTitel: {p.title}\nZusammenfassung: {p.summary}\nKeywords: {p.main_keywords}"
            for i, p in enumerate(published)
        ]
    )

    prompt = _load_prompt("semantic_dedupe.txt").format(
        topic_title=topic.title,
        topic_keywords=topic.target_keywords,
        secondary_keywords=topic.secondary_keywords or "",
        search_intent=topic.search_intent,
        existing_content=existing_content,
    )

    try:
        client = get_gemini_client()
        response = client.models.generate_content(
            model="gemini-2.0-flash",
            contents=prompt,
        )
        result = _parse_dedupe_response(response.text)
        logger.info(
            f"Dedupe check for '{topic.title[:50]}...': {result.verdict.value} "
            f"(overlap: {result.overlap_score:.0%})"
        )
        return result
    except Exception as e:
        logger.error(f"Semantic dedupe failed: {e}")
        # On error, assume unique to avoid blocking pipeline
        return DupeResult(
            verdict=DupeVerdict.UNIQUE,
            reason=f"Dedupe check failed: {str(e)}",
            overlap_score=0.0,
            similar_articles=[],
        )


def _load_prompt(filename: str) -> str:
    """Load prompt template from prompts directory."""
    prompt_path = Path(__file__).parent.parent / "prompts" / filename
    if not prompt_path.exists():
        raise FileNotFoundError(f"Prompt file not found: {prompt_path}")
    return prompt_path.read_text()


def _parse_dedupe_response(response_text: str) -> DupeResult:
    """Parse Gemini response into DupeResult."""
    try:
        # Extract JSON from response (may be wrapped in markdown)
        text = response_text.strip()
        if "```json" in text:
            text = text.split("```json")[1].split("```")[0]
        elif "```" in text:
            text = text.split("```")[1].split("```")[0]

        data = json.loads(text)

        verdict_str = data.get("verdict", "unique").lower()
        if verdict_str == "duplicate":
            verdict = DupeVerdict.DUPLICATE
        elif verdict_str == "partial_overlap":
            verdict = DupeVerdict.PARTIAL_OVERLAP
        else:
            verdict = DupeVerdict.UNIQUE

        return DupeResult(
            verdict=verdict,
            reason=data.get("reason", ""),
            overlap_score=float(data.get("overlap_score", 0.0)),
            similar_articles=data.get("similar_articles", []),
        )
    except (json.JSONDecodeError, KeyError) as e:
        logger.warning(f"Failed to parse dedupe response: {e}")
        # Default to unique on parse error
        return DupeResult(
            verdict=DupeVerdict.UNIQUE,
            reason="Failed to parse response",
            overlap_score=0.0,
            similar_articles=[],
        )
