"""Backend abstraction for schema generation.

Each backend implements the SchemaGenerator protocol.
The active backend is selected via MANTARA_BACKEND env var.

To add a new backend (e.g., local transformers):
    1. Create backends/transformers_backend.py
    2. Implement the SchemaGenerator protocol (a generate() method)
    3. Register it in get_backend() below
"""

from __future__ import annotations
from typing import Protocol, runtime_checkable

from models import MantaraSchema


@runtime_checkable
class SchemaGenerator(Protocol):
    """Protocol that all LLM backends must implement."""

    def generate(self, system_prompt: str, user_input: str, model: str | None = None) -> MantaraSchema:
        """Generate a MantaraSchema from system prompt + user input.

        Args:
            system_prompt: The full system prompt text.
            user_input: The user's natural language description.
            model: Optional model override.

        Returns:
            A validated MantaraSchema object.

        Raises:
            ValueError: If the model refuses or returns empty output.
            RuntimeError: If all retries fail.
        """
        ...


def get_backend(backend_name: str | None = None) -> SchemaGenerator:
    """Factory function — returns the active backend based on config.

    Args:
        backend_name: Backend to use. If None, reads from config.BACKEND.

    Supported backends:
        - "openai": OpenAI API with Structured Outputs (default)
    """
    if backend_name is None:
        from config import BACKEND
        backend_name = BACKEND

    if backend_name == "openai":
        from backends.openai_backend import OpenAIBackend
        return OpenAIBackend()
    elif backend_name == "ollama":
        from backends.ollama_backend import OllamaBackend
        return OllamaBackend()
    elif backend_name == "llamacpp":
        from backends.llamacpp_backend import LlamaCppBackend
        return LlamaCppBackend()
    elif backend_name == "bedrock":
        from backends.bedrock_backend import BedrockBackend
        return BedrockBackend()
    elif backend_name == "anthropic":
        from backends.anthropic_backend import AnthropicBackend
        return AnthropicBackend()
    elif backend_name == "claude-cli":
        # Bridge to the local Claude Code CLI — uses user's OAuth session,
        # no API key required. See backends/claude_cli_backend.py.
        from backends.claude_cli_backend import ClaudeCliBackend
        return ClaudeCliBackend()
    else:
        raise ValueError(
            f"Unknown backend: '{backend_name}'. "
            f"Supported: 'openai', 'ollama', 'llamacpp', 'bedrock', 'anthropic', 'claude-cli'. "
            f"Set MANTARA_BACKEND env var to a valid backend name."
        )
