from sqlalchemy.orm import Session
from fastapi import UploadFile, File as FastAPIFile, Form, APIRouter, Depends, HTTPException, status, Query, Path
from src.apps.base.schemas.response_model import ResponseModel
from src.apps.web_crawler.models.web_crawler import WebCrawler
from src.apps.web_crawler.schemas.web_crawler import (
    WebCrawlerCreate,
    WebCrawlerFilterSchema,
    WebCrawlerResponse,
    WebCrawlerUpdate,
)
from src.apps.web_crawler.services import web_crawler as web_crawler_service
from src.core.dependencies import get_db
from src.utils.guard import get_current_user
from src.apps.files.schemas.file import FileResponseSchema
from src.apps.web_crawler.services.web_crawler import create_file
from typing import Optional

router = APIRouter(prefix="/web-crawlers", tags=["web_crawlers"])


@router.get("", response_model=ResponseModel, summary="List all web crawlers with filters")
def list_web_crawlers(
    page: int = Query(1, ge=1, description="Page number for pagination"),
    per_page: int = Query(10, ge=1, le=100, description="Number of records per page"),
    filters: WebCrawlerFilterSchema = Depends(),
    db: Session = Depends(get_db),
):
    paginated = web_crawler_service.get_web_crawlers(db, page=page, per_page=per_page, filters=filters)
    return ResponseModel(data=paginated, status_code=200, success=True, message="Web crawlers fetched successfully")


@router.get("/{crawler_id}", response_model=ResponseModel, summary="Get a web crawler by ID")
async def get_web_crawler(
    crawler_id: int,
    db: Session = Depends(get_db),
):
    crawler = web_crawler_service.get_web_crawler(db, crawler_id)
    return ResponseModel(data=crawler, status_code=200, success=True, message="Web crawler fetched successfully")


@router.post("", response_model=ResponseModel, status_code=201, summary="Create a new web crawler")
async def create_web_crawler(
    payload: WebCrawlerCreate,
    db: Session = Depends(get_db),
):
    created = web_crawler_service.create_web_crawler(db, payload)
    return ResponseModel(
        data=WebCrawlerResponse.from_orm(created),
        status_code=201,
        success=True,
        message="Web crawler created successfully",
    )


@router.put("/{crawler_id}", response_model=ResponseModel, summary="Update a web crawler by ID")
async def update_web_crawler(
    crawler_id: int,
    payload: WebCrawlerUpdate,
    db: Session = Depends(get_db),
):
    updated = web_crawler_service.update_web_crawler(db, crawler_id, payload)
    return ResponseModel(
        data=WebCrawlerResponse.from_orm(updated),
        status_code=200,
        success=True,
        message="Web crawler updated successfully",
    )


@router.post("/{crawler_id}/upload", response_model=ResponseModel, summary="Upload a file to a web crawler")
async def upload_file_to_web_crawler(
    crawler_id: int = Path(..., description="Web Crawler ID"),
    is_history: Optional[bool] = None,
    file: UploadFile = FastAPIFile(..., description="File to upload"),
    db: Session = Depends(get_db),
):
    created_file = await create_file(db, upload_file=file, is_history=is_history, crawler_id=crawler_id)
    return ResponseModel(
        data=FileResponseSchema.model_validate(created_file),
        status_code=201,
        success=True,
        message="File uploaded and created successfully"
    )
    
@router.get("/{crawler_id}/preview-file", response_model=ResponseModel, summary="Preview a web crawler file")
async def preview_file(
    crawler_id: int = Path(..., description="Web Crawler ID"),
    rows: int = Query(..., description="Number of rows to preview"),
    db: Session = Depends(get_db),
):
    preview_data = await web_crawler_service.preview_file(db, crawler_id, rows)
    return ResponseModel(
        data=preview_data,
        status_code=200,
        success=True,
        message="File preview fetched successfully"
    )



@router.delete("/{crawler_id}", response_model=ResponseModel, summary="Soft delete a web crawler")
async def delete_web_crawler(
    crawler_id: int,
    db: Session = Depends(get_db),
):
    web_crawler_service.soft_delete_web_crawler(db, crawler_id)
    return ResponseModel(data=None, status_code=200, success=True, message="Web crawler deleted successfully")


# --------------------------------------------


@router.post("/{match_crawler_id}/run", summary="Run a match config job")
def run_config(
    match_crawler_id: int,
    db: Session = Depends(get_db),
    # current_user: dict = Depends(get_current_user),
):
    # Run and generate CSV file
    csv_file = web_crawler_service.generate_csv_from_xml(db=db, match_crawler_id=match_crawler_id, created_by_id=0)

    return ResponseModel(
        data={"file_url": csv_file.full_url},
        message="CSV generated and file attached to crawler successfully.",
        success=True,
        status_code=200,
    )


@router.get("/{webcrawler_id}/column-indexes", summary="Get column names and indexes from CSV file")
def get_column_indexes(webcrawler_id: int = Path(..., description="Web Crawler ID"), db: Session = Depends(get_db)):
    columns = web_crawler_service.get_column_indexes_service(db, webcrawler_id)

    return ResponseModel(
        data=columns,
        message="Column indexes retrieved successfully",
        success=True,
        status_code=200,
    )
