import asyncio
import json
import os
import re
import time

import aiomysql
from aiomysql import DictCursor
from dotenv import load_dotenv
from fastapi import APIRouter, Body, Query, BackgroundTasks, Depends, HTTPException
from fastapi.responses import JSONResponse
from pydantic import HttpUrl, BaseModel
from typing import List
from datetime import datetime, timezone
from tqdm.asyncio import tqdm
import state
from schemas import AddLinksResponse, StatusResponse, ParsedResultsResponse, FilterPayload, ProxyURLRequest
import logging

import aiohttp
from fastapi import APIRouter, HTTPException
from aiomysql import DictCursor
from aiohttp.client_exceptions import ClientConnectorError
import socket

router = APIRouter()
load_dotenv()


logger = logging.getLogger("uvicorn.error")
@router.post("/parser/add-links", response_model=AddLinksResponse, tags=["Парсинг"], summary="Додати лінк")
async def add_links(
    payload: List[HttpUrl] = Body(
        ...,
        example=[
            "https://example.com/page1",
            "https://example.com/page2"
        ]
    )
):
    pool = state.db_pool
    added_count = 0
    async with pool.acquire() as conn:
        async with conn.cursor() as cursor:
            for url in payload:
                await cursor.execute(
                    "INSERT INTO url_queue (url, date_added, status, recived) VALUES (%s, %s, %s, %s)",
                    (str(url), datetime.now(timezone.utc), 0, 0)
                )
                added_count += 1
    return {"added": added_count, "status": "success"}


@router.get("/parser/status", response_model=StatusResponse, tags=["Парсинг"], summary="Отримати статус")
async def get_status():
    pool = state.db_pool
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cursor:
            await cursor.execute("SELECT COUNT(*) as count FROM url_queue WHERE status = 2 AND recived = 0")
            processed = (await cursor.fetchone())["count"]
            await cursor.execute("SELECT COUNT(*) as count FROM url_queue WHERE status = 0")
            new_links = (await cursor.fetchone())["count"]
            await cursor.execute("SELECT COUNT(*) as count FROM url_queue WHERE status = 1")
            in_queue = (await cursor.fetchone())["count"]

    return {
        "processed_since_last": processed,
        "new_links": new_links,
        "in_queue": in_queue,
        "parser_status": state.parser_status.value
    }


@router.get("/parser/parsed-results", response_model=ParsedResultsResponse, tags=["Парсинг"], summary="Отримати результат")
async def get_parsed_results():
    pool = state.db_pool
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cursor:
            await cursor.execute("SELECT * FROM url_queue WHERE status = 2 AND recived = 0")
            status_2_rows = await cursor.fetchall()

            await cursor.execute("SELECT * FROM url_queue WHERE status = 3 AND recived = 0")
            status_3_rows = await cursor.fetchall()

            # Розпарсимо JSON у поле data, щоб воно було dict, а не str
            import json
            for row in status_2_rows:
                if row.get("data") and isinstance(row["data"], str):
                    try:
                        row["data"] = json.loads(row["data"])
                    except Exception:
                        row["data"] = None

            for row in status_3_rows:
                if row.get("data") and isinstance(row["data"], str):
                    try:
                        row["data"] = json.loads(row["data"])
                    except Exception:
                        row["data"] = None

            # Відмічаємо, що ці записи прочитані
            if status_2_rows:
                ids = [row["url_id"] for row in status_2_rows]
                await cursor.execute(
                    f"UPDATE url_queue SET recived = 1 WHERE url_id IN ({','.join(['%s']*len(ids))})",
                    ids
                )
                await conn.commit()

            if status_3_rows:
                ids_3 = [row["url_id"] for row in status_3_rows]
                await cursor.execute(
                    f"UPDATE url_queue SET recived = 1 WHERE url_id IN ({','.join(['%s'] * len(ids_3))})",
                    ids_3
                )
                await conn.commit()

    return {
        "parsed": status_2_rows,
        "error": status_3_rows,
    }

async def create_proxy_from_url(cursor, url: str):
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            proxies_list = await resp.json(content_type=None)

    inserted_proxies = []

    for proxy_data in proxies_list:
        query = """
            INSERT INTO proxy (proxy, port, username, password, country_code)
            VALUES (%s, %s, %s, %s, %s)
        """
        await cursor.execute(query, (
            proxy_data["ip"],
            proxy_data["port"],
            proxy_data.get("username"),
            proxy_data.get("password"),
            proxy_data.get("country_code")
        ))
        await cursor.execute("SELECT LAST_INSERT_ID() as id")
        row = await cursor.fetchone()
        inserted_proxies.append({
            "id": row["id"],
            "proxy": f'{proxy_data["ip"]}:{proxy_data["port"]}'
        })

    return inserted_proxies


@router.post("/fromUrl", tags=["Проксі"], summary="Завантажити проксі")
async def create_proxy_from_url_endpoint(data: ProxyURLRequest):
    async with state.db_pool.acquire() as conn:
        await conn.rollback()
        async with conn.cursor(DictCursor) as cursor:
            try:
                inserted = await create_proxy_from_url(cursor, str(data.url))
                await conn.commit()
                return {
                    "data": inserted,
                    "inserted_count": len(inserted),
                    "status": True
                }
            except ValueError as e:
                raise HTTPException(status_code=400, detail=str(e))
            except (ClientConnectorError, socket.gaierror) as e:
                raise HTTPException(status_code=400, detail=f"Cannot connect to host: {e}")
            except Exception as e:
                raise HTTPException(status_code=500, detail=f"Internal server error: {e}")




DB_CONFIG_FILTER = {
    "host": os.getenv("DB_HOST_FILTER"),
    "port": int(os.getenv("DB_PORT_FILTER", 3306)),
    "user": os.getenv("DB_USER_FILTER"),
    "password": os.getenv("DB_PASSWORD_FILTER"),
    "db": os.getenv("DB_NAME_FILTER"),
    "autocommit": True,
}

SECRET_KEY = os.getenv("SECRET_KEY")

processing_status = {
    "running": False,
    "total": 0,
    "processed": 0
}

class FilterResponse(BaseModel):
    data: str
    error: str
    status: bool

@router.post("/filter/filters", tags=["Фільтр"], summary="Завантажити фільтра", response_model=FilterResponse)
async def parse_data(payload: FilterPayload):
    now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
    await process_filter_payload(payload, now)
    return {
        "data": "Фільтри оброблені.",
        "error": "",
        "status": True
    }

async def process_filter_payload(payload: FilterPayload, timestamp: str):
    conn = await aiomysql.connect(**DB_CONFIG_FILTER)
    cursor = await conn.cursor(aiomysql.DictCursor)

    category = payload.Category_name
    category_id = payload.Category_id
    order_filters = []
    all_filter_groups = {}
    unique_filter_ids = set()

    for group in payload.FilterGroups:
        name = group.name
        sort_order = group.sort_order or 0
        group_id = group.id
        filter_group_id = await get_or_create_filter_group(cursor, name, sort_order)
        all_filter_groups[group_id] = name
        order_filters.append({
            "filter_group_id": filter_group_id,
            "category_id": category_id,
            "category_name": category,
            "sort_order": sort_order
        })

    all_filters = {}
    for f in payload.Filters:
        filter_group_id = f.group_id
        filter_id = await get_or_create_filter(cursor, f.name, filter_group_id)
        unique_filter_ids.add(filter_id)
        all_filters[f.id] = {
            "filter_name": f.name,
            "filter_group": all_filter_groups.get(f.group_id, "")
        }

    products = {}
    for p in payload.Products:
        products.setdefault(p.product_id, []).append(all_filters.get(p.filter_id, {}))

    to_insert = []
    to_update = []

    product_ids = list(products.keys())
    if product_ids:
        await cursor.execute(
            f"SELECT product_id FROM xpro_recived_filter WHERE product_id IN ({','.join(['%s'] * len(product_ids))})",
            product_ids
        )
        existing_products = {row['product_id'] for row in await cursor.fetchall()}
    else:
        existing_products = set()

    for product_id, filters in products.items():
        filters_json = json.dumps(filters, ensure_ascii=False)
        if product_id in existing_products:
            to_update.append((filters_json, timestamp, category, product_id))
        else:
            to_insert.append((product_id, filters_json, timestamp, timestamp, category))

    BATCH_SIZE = 10000

    # Вставка нових товарів батчами по 10 000
    for i in range(0, len(to_insert), BATCH_SIZE):
        batch = to_insert[i:i + BATCH_SIZE]
        await cursor.executemany(
            "INSERT INTO xpro_recived_filter (product_id, filters, date_added, date_update, category_name) VALUES (%s, %s, %s, %s, %s)",
            batch
        )
        await asyncio.sleep(0.001)  # коротка пауза щоб уникнути зависання

    # Оновлення існуючих товарів батчами по 10 000
    for i in range(0, len(to_update), BATCH_SIZE):
        batch = to_update[i:i + BATCH_SIZE]
        await cursor.executemany(
            "UPDATE xpro_recived_filter SET filters = %s, date_update = %s, category_name = %s WHERE product_id = %s",
            batch
        )
        await asyncio.sleep(0.001)

    for f in order_filters:
        await cursor.execute(
            "DELETE FROM xpro_category_filter_group WHERE category_id = %s AND filter_group_id = %s",
            (f["category_id"], f["filter_group_id"])
        )

    await cursor.executemany(
        "INSERT INTO xpro_category_filter_group (category_id, category_name, filter_group_id, sort_order) VALUES (%s, %s, %s, %s)",
        [(f["category_id"], f["category_name"], f["filter_group_id"], f["sort_order"]) for f in order_filters]
    )

    await conn.commit()
    await cursor.close()
    await conn.ensure_closed()

def translit(text: str) -> str:
    map_ = {
        'а':'a','б':'b','в':'v','г':'h','ґ':'g','д':'d','е':'e','є':'ie','ж':'zh','з':'z','и':'y','і':'i','ї':'i','й':'i','к':'k','л':'l','м':'m','н':'n','о':'o','п':'p','р':'r','с':'s','т':'t','у':'u','ф':'f','х':'kh','ц':'ts','ч':'ch','ш':'sh','щ':'shch','ь':'','ю':'iu','я':'ia','ʼ':'',
        'ё':'yo','ъ':'','ы':'y','э':'e',
        'А':'A','Б':'B','В':'V','Г':'H','Ґ':'G','Д':'D','Е':'E','Є':'Ye','Ж':'Zh','З':'Z','И':'Y','І':'I','Ї':'I','Й':'Y','К':'K','Л':'L','М':'M','Н':'N','О':'O','П':'P','Р':'R','С':'S','Т':'T','У':'U','Ф':'F','Х':'Kh','Ц':'Ts','Ч':'Ch','Ш':'Sh','Щ':'Shch','Ь':'','Ю':'Yu','Я':'Ya','Ё':'Yo','Ъ':'','Ы':'Y','Э':'E',
        ' ':'-','_':'-',',':'','.':'','/':'-',':':'',';':'','?':'','!':'','#':'','(':'',')':'','"':'','\'':'','«':'','»':'','&':'and','%':'','@':'','=':'','+':'','–':'-','—':'-','№':'','’':'',
    }
    text = ''.join(map_.get(c, c) for c in text)
    text = text.lower()
    text = re.sub(r'[^a-z0-9\-]', '', text)
    text = re.sub(r'-+', '-', text)
    return 'f-' + text.strip('-')


async def get_or_create_filter_group(cursor, name: str, sort_order: int = 0) -> int:
    if not sort_order:
        sort_order = 99999

    await cursor.execute("SELECT * FROM oc_filter_group_description WHERE name = %s", (name,))
    result = await cursor.fetchone()

    if not result:
        await cursor.execute("INSERT INTO oc_filter_group (sort_order) VALUES (%s)", (sort_order,))
        filter_group_id = cursor.lastrowid
        await cursor.execute(
            "INSERT INTO oc_filter_group_description (filter_group_id, language_id, name) VALUES (%s, 2, %s)",
            (filter_group_id, name)
        )
        keyword = translit(name)
        await cursor.execute(
            "INSERT INTO xpro_filter_group_seo (filter_group_id, keyword) VALUES (%s, %s)",
            (filter_group_id, keyword)
        )
        return filter_group_id
    else:
        filter_group_id = result["filter_group_id"]
        await cursor.execute(
            "UPDATE oc_filter_group SET sort_order = %s WHERE filter_group_id = %s",
            (sort_order, filter_group_id)
        )
        return filter_group_id


filter_cache: dict[tuple[str, int], int] = {}


async def get_or_create_filter(cursor, name: str, filter_group_id: int) -> int:
    key = (name, filter_group_id)
    if key in filter_cache:
        return filter_cache[key]

    await cursor.execute(
        "SELECT filter_id FROM oc_filter_description WHERE name = %s AND filter_group_id = %s",
        (name, filter_group_id)
    )
    result = await cursor.fetchone()

    if result:
        filter_cache[key] = result["filter_id"]
        return result["filter_id"]

    await cursor.execute(
        "INSERT INTO oc_filter (filter_group_id, sort_order) VALUES (%s, %s)",
        (filter_group_id, 0)
    )
    filter_id = cursor.lastrowid

    await cursor.execute(
        "INSERT INTO oc_filter_description (filter_id, language_id, filter_group_id, name) VALUES (%s, 2, %s, %s)",
        (filter_id, filter_group_id, name)
    )

    keyword = translit(name)
    keyword_sub = keyword.replace("f-", "f-not-")

    await cursor.execute(
        "INSERT INTO xpro_filter_seo (filter_id, type, keyword, filter_group_id) VALUES (%s, 'add', %s, %s), (%s, 'sub', %s, %s)",
        (filter_id, keyword, filter_group_id, filter_id, keyword_sub, filter_group_id)
    )

    filter_cache[key] = filter_id
    return filter_id


async def add_filter_categories(cursor, filter_ids: list[int], category_ids: list[int]):
    insert_values = []
    for filter_id in filter_ids:
        for category_id in category_ids:
            insert_values.append((category_id, filter_id))

    for value in insert_values:
        try:
            await cursor.execute(
                "INSERT INTO oc_category_filter (category_id, filter_id) VALUES (%s, %s)",
                value
            )
        except:
            pass


async def add_product_filter(cursor, filter_ids: list[int], product_id: int):
    insert_values = [(product_id, filter_id) for filter_id in filter_ids]

    for value in insert_values:
        try:
            await cursor.execute(
                "INSERT INTO oc_product_filter (product_id, filter_id) VALUES (%s, %s)",
                value
            )
        except:
            pass


processing_status = {
    "running": False,
    "total": 0,
    "processed": 0
}

@router.post("/process-filters", tags=["Фільтр"], summary="Запустити обробку фільтрів")
async def start_process_filters(background_tasks: BackgroundTasks):
    if processing_status["running"]:
        return {"status": False, "message": "Обробка вже запущена"}

    background_tasks.add_task(process_filters_loop)
    return {"status": True, "message": "Обробка фільтрів запущена у фоні"}

async def process_filters_loop():
    processing_status["running"] = True
    processing_status["processed"] = 0
    processing_status["total"] = 0

    conn = await aiomysql.connect(**DB_CONFIG_FILTER)
    cursor = await conn.cursor(aiomysql.DictCursor)

    try:
        await cursor.execute("SELECT * FROM xpro_process_filter ORDER BY process_filter_id DESC LIMIT 1")
        last_process = await cursor.fetchone()
        date = "2000-01-01 00:00:00"
        if last_process and "date_process" in last_process:
            date = last_process["date_process"]

        await cursor.execute("""
            SELECT * FROM xpro_recived_filter
        """)
        products_result = await cursor.fetchall()

        processing_status["total"] = len(products_result)

        if not products_result:
            return

        product_ids = [p["product_id"] for p in products_result]
        
        # Batch delete all product filters at once
        if product_ids:
            sql_delete = f"DELETE FROM oc_product_filter WHERE product_id IN ({','.join(['%s'] * len(product_ids))})"
            await cursor.execute(sql_delete, product_ids)

        # Pre-load all categories for all products in one query
        categories_map = {}
        if product_ids:
            placeholders = ','.join(['%s'] * len(product_ids))
            await cursor.execute(f"SELECT product_id, category_id FROM oc_product_to_category WHERE product_id IN ({placeholders})", product_ids)
            categories_result = await cursor.fetchall()
            for row in categories_result:
                if row["product_id"] not in categories_map:
                    categories_map[row["product_id"]] = []
                categories_map[row["product_id"]].append(row["category_id"])

        # Pre-load existing filter groups and filters
        filter_group_cache = {}
        filter_cache = {}
        
        await cursor.execute("SELECT filter_group_id, LOWER(name) as name FROM oc_filter_group_description WHERE language_id = 2")
        fg_results = await cursor.fetchall()
        for row in fg_results:
            filter_group_cache[row["name"]] = row["filter_group_id"]
            
        await cursor.execute("""
            SELECT f.filter_id, LOWER(fd.name) as name, f.filter_group_id 
            FROM oc_filter f 
            JOIN oc_filter_description fd ON f.filter_id = fd.filter_id 
            WHERE fd.language_id = 2
        """)
        f_results = await cursor.fetchall()
        for row in f_results:
            filter_cache[(row["name"], row["filter_group_id"])] = row["filter_id"]

        # Batch operations
        batch_product_filters = []
        batch_filter_categories = []
        batch_updates = []
        date_added = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")

        # Use tqdm for progress tracking
        pbar = tqdm(total=len(products_result), desc="Processing filters", unit="products")
        
        for i, product in enumerate(products_result, start=1):
            product_id = product["product_id"]
            filters = json.loads(product["filters"])
            filter_ids = []

            for filter_obj in filters:
                filter_group_name = filter_obj.get("filter_group")
                filter_name = filter_obj.get("filter_name")

                if not filter_group_name or not filter_name:
                    continue

                fg_key = filter_group_name.lower()
                if fg_key not in filter_group_cache:
                    filter_group_id = await get_or_create_filter_group(cursor, filter_group_name)
                    filter_group_cache[fg_key] = filter_group_id
                else:
                    filter_group_id = filter_group_cache[fg_key]

                f_key = (filter_name.lower(), filter_group_id)
                if f_key not in filter_cache:
                    filter_id = await get_or_create_filter(cursor, filter_name, filter_group_id)
                    filter_cache[f_key] = filter_id
                else:
                    filter_id = filter_cache[f_key]

                filter_ids.append(filter_id)

            # Prepare batch data for product filters
            for filter_id in filter_ids:
                batch_product_filters.append((product_id, filter_id))

            # Prepare batch data for filter categories
            categories = categories_map.get(product_id, [])
            for filter_id in filter_ids:
                for category_id in categories:
                    batch_filter_categories.append((filter_id, category_id))

            batch_updates.append((date_added, product_id))
            processing_status["processed"] = i
            
            # Update progress bar
            pbar.update(1)

        pbar.close()

        # Execute all batch operations
        if batch_product_filters:
            await cursor.executemany("INSERT IGNORE INTO oc_product_filter (product_id, filter_id) VALUES (%s, %s)", batch_product_filters)
            
        if batch_filter_categories:
            await cursor.executemany("INSERT IGNORE INTO oc_filter_to_category (filter_id, category_id) VALUES (%s, %s)", batch_filter_categories)
            
        if batch_updates:
            await cursor.executemany("UPDATE xpro_recived_filter SET date_process = %s WHERE product_id = %s", batch_updates)

        await conn.commit()
        
    finally:
        await cursor.execute("""
        TRUNCATE product_filter_index;
        INSERT INTO product_filter_index (
                product_id, category_id, filter_group_id, filter_id, group_name, filter_name
            )
            SELECT DISTINCT
                p2c.product_id,
                p2c.category_id,
                f.filter_group_id,
                f.filter_id,
                fgd.name AS group_name,
                fd.name AS filter_name
            FROM oc_product_to_category p2c
            JOIN oc_product_filter pf ON pf.product_id = p2c.product_id
            JOIN oc_filter f ON f.filter_id = pf.filter_id
            JOIN oc_filter_group_description fgd ON f.filter_group_id = fgd.filter_group_id AND fgd.language_id = 2
            JOIN oc_filter_description fd ON f.filter_id = fd.filter_id AND fd.language_id = 2;
        """)
        await conn.commit()
        processing_status["running"] = False
        await cursor.close()
        conn.close()


@router.get("/processing-status", tags=["Фільтр"], summary="Статус обробки фільтрів")
async def get_processing_status():
    return {
        "running": processing_status["running"],
        "processed": processing_status["processed"],
        "total": processing_status["total"],
        "percent": round((processing_status["processed"] / processing_status["total"]) * 100, 2)
        if processing_status["total"] > 0 else 0
    }
