from datetime import datetime
from uuid import UUID
from sqlalchemy.orm import Session
from fastapi import HTTPException, status
from src.core.exceptions import APIException
from src.utils.pagination import QueryPaginator
from sqlalchemy import func, cast, Integer,and_,or_
from src.apps.wine.word_alias.models.word_alias import WordAlias
from src.apps.wine.validation.models.validation import WorkGroup, WorkGroupWebCrawler
from src.apps.web_crawler.schemas.work_group import (
    WorkGroupCreateSchema,
    WorkGroupOutputSchema,
    WorkGroupUpdateSchema,
    WorkGroupFilterSchema,
    WorkGroupWebCrawlerAdditionSchema,
    WorkGroupWebCrawlerUpdateSchema,
    WorkGroupWebCrawlerOutputSchema
)
from src.apps.web_crawler.models.web_crawler import WebCrawler
from src.utils.constants import API_PREFIXES
from src.core.config import settings
from typing import Dict, List
from sqlalchemy.sql.expression import nullslast
from src.core.exceptions import APIException


async def get_all_work_groups_service(
    db: Session, 
    payload: WorkGroupFilterSchema,
    page: int = 1, 
    per_page: int = 10, 
    sort_by: List[str] = None,
    paginated: bool = True
) -> Dict:
    """
    Get all work groups with pagination and filtering.
    Returns all if paginated is False.
    """
    try:
        query = db.query(WorkGroup).filter(WorkGroup.deleted_at.is_(None))
        
        # Apply search filter if provided
        query = apply_work_group_search_filter(query, payload)
        
        # Apply sorting if provided
        sort_query = build_work_group_sort_query(sort_by)
        
        if sort_query:
            query = query.order_by(*sort_query)
        else:
            query = query.order_by(nullslast(WorkGroup.name.asc()))

        if not paginated:
            records = query.all()
            return [WorkGroupOutputSchema.model_validate(it) for it in records]

        offset = (page - 1) * per_page
        paginator = QueryPaginator(
            query=query,
            schema=WorkGroupOutputSchema,
            url="".join([str(settings.api_base_url()), API_PREFIXES.WORK_GROUP]),
            offset=offset,
            limit=per_page,
            use_orm=True,
        )

        return paginator.paginate()
    except Exception as e:
        raise APIException(
            module="get_all_work_groups_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to get all work groups"
        )
        

def build_work_group_sort_query(sort_by: list = None):
    sort_query = []
    sort_fields = sort_by if sort_by else ["name"]
    for s in sort_fields:
        try:
            col = s.replace("-", "")
            if col in ["name", "created_at", "updated_at", "id"]:
                sort_order = getattr(WorkGroup, col)
                if s.startswith("-"):
                    sort_order = sort_order.desc()
                sort_query.append(nullslast(sort_order))
            else:
                raise APIException(
                    module="get_all_work_groups_service",
                    error=f"Cannot sort with unidentified column '{s}'",
                    status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
                    message="Error retrieving work groups."
                )
        except AttributeError:
            raise APIException(
                module="get_all_work_groups_service",
                error=f"Cannot sort with unidentified column '{s}'",
                status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
                message="Error retrieving work groups."
            )
    return sort_query


def apply_work_group_search_filter(query, payload):
    """
    Apply search filter to WorkGroup query.
    """
    if hasattr(payload, "search") and payload.search:
        search_term = f"%{payload.search}%"
        query = query.filter(WorkGroup.name.ilike(search_term))
    return query

async def get_work_group_by_id_service(
    db: Session,
    work_group_id: int
) -> WorkGroupOutputSchema:
    """
    Get a work group by its ID.
    Raises APIException if not found.
    """
    try:
        work_group = db.query(WorkGroup).filter(WorkGroup.id == work_group_id, WorkGroup.deleted_at.is_(None)).first()
        if not work_group:
            raise APIException(
                module="get_work_group_by_id_service",
                error={"work_group_id": work_group_id},
                status_code=status.HTTP_404_NOT_FOUND,
                message="Work group not found"
            )
        return work_group
    except Exception as e:
        raise APIException(
            module="get_work_group_by_id_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to get work group by ID"
        )

async def create_work_group_service(
    db: Session,
    payload: WorkGroupCreateSchema
) -> WorkGroupOutputSchema:
    """
    Create a new work group.
    """
    # Check for duplicate name
    await check_duplicate_work_group_name(db, payload.name)

    # Fetch web crawlers by retailer codes   
    web_crawler_map = await fetch_web_crawlers_for_work_group(db, payload.retailer_codes)
        
    try:
        new_group = WorkGroup(
            name=payload.name,
        )
        db.add(new_group)
        db.commit()
        
        # web crawlers association
        await associate_web_crawlers_with_work_group(db, new_group.id, list(web_crawler_map.keys()))

        db.refresh(new_group)
        return WorkGroupOutputSchema.model_validate(new_group)
    except Exception as e:
        raise APIException(
            module="create_work_group_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to create work group"
        )
        
async def update_work_group_service(
    db: Session,
    work_group_id: int, 
    payload: WorkGroupUpdateSchema
) -> WorkGroupOutputSchema:
    """
    Update an existing work group.
    """
    try:
        work_group = await get_work_group_by_id_service(db, work_group_id)
        
        # Check for duplicate name if name is being updated
        if payload.name and payload.name != work_group.name:
            await check_duplicate_work_group_name(db, payload.name)
        
        # Update fields
        if payload.name:
            work_group.name = payload.name
        
        db.commit()
        
        web_crawler_map = await fetch_web_crawlers_for_work_group(db, payload.retailer_codes)
        
        # Clear existing associations
        await clear_work_group_web_crawler_associations(db, work_group_id)
        
        # Re-associate web crawlers
        await associate_web_crawlers_with_work_group(db, work_group_id, list(web_crawler_map.keys()))
        
        db.refresh(work_group)
        
        return WorkGroupOutputSchema.model_validate(work_group)
    
    except Exception as e:
        raise APIException(
            module="update_work_group_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to update work group"
        )
        

async def delete_work_group_service(
    db: Session,
    work_group_id: int
) -> None:
    """
    Soft delete a work group by its ID.
    """
    try:
        work_group = await get_work_group_by_id_service(db, work_group_id)
        work_group.deleted_at = datetime.utcnow()
        db.commit()
        return {}
    except Exception as e:
        raise APIException(
            module="delete_work_group_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to delete work group"
        )
        
    
        
async def check_duplicate_work_group_name(db: Session, name: str):
    """
    Check if a work group with the given name already exists.
    Raises APIException if duplicate is found.
    """
    try:
        existing_group = db.query(WorkGroup).filter(WorkGroup.name == name, WorkGroup.deleted_at.is_(None)).first()
        if existing_group:
            raise APIException(
                module="create_work_group_service",
                error={"name": name},
                status_code=status.HTTP_409_CONFLICT,
                message="Work group with this name already exists"
            )
    except Exception as e:
        raise APIException(
            module="create_work_group_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to create work group"
        )
        
async def fetch_web_crawlers_for_work_group(db: Session, retailer_codes: List[str]) -> Dict[int, WebCrawler]:
    """
    Fetch web crawlers by retailer codes and return a map of id to WebCrawler.
    Raises APIException on error.
    """
    try:
        web_crawlers = db.query(WebCrawler).filter(
            WebCrawler.code.in_(retailer_codes if retailer_codes else []),
            WebCrawler.deleted_at.is_(None)
        ).all()
        return {wc.id: wc for wc in web_crawlers}
    except Exception as e:
        raise APIException(
            module="create_work_group_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to fetch web crawlers for the work group"
        )
        
async def associate_web_crawlers_with_work_group(db: Session, work_group_id: int, web_crawler_ids: List[int]):
    """
    Create associations between a work group and web crawlers.
    Raises APIException on error.
    """
    try:
        for wc_id in web_crawler_ids:
            association = WorkGroupWebCrawler(
                work_group_id=work_group_id,
                web_crawler_id=wc_id,
            )
            db.add(association)
        db.commit()
    except Exception as e:
        raise APIException(
            module="create_work_group_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to associate web crawlers with the work group"
        )
        
async def clear_work_group_web_crawler_associations(db: Session, work_group_id: int):
    """
    Delete all WorkGroupWebCrawler associations for a given work group.
    Raises APIException on error.
    """
    try:
        db.query(WorkGroupWebCrawler).filter(WorkGroupWebCrawler.work_group_id == work_group_id).delete()
        db.commit()
    except Exception as e:
        raise APIException(
            module="update_work_group_service",
            error={"exception": str(e)},
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            message="Failed to clear work group web crawler associations"
        )
    