import asyncio
import json
import os
import re
import time

import aiomysql
from dotenv import load_dotenv
from fastapi import APIRouter, Body, Query, BackgroundTasks, Depends
from fastapi.responses import JSONResponse
from pydantic import HttpUrl, BaseModel
from typing import List
from datetime import datetime, timezone
import state
from schemas import AddLinksResponse, StatusResponse, ParsedResultsResponse, FilterPayload
import logging
from tqdm import tqdm

router = APIRouter()
load_dotenv()


logger = logging.getLogger("uvicorn.error")
@router.post("/parser/add-links", response_model=AddLinksResponse, tags=["Парсинг"], summary="Додати лінк")
async def add_links(
    payload: List[HttpUrl] = Body(
        ...,
        example=[
            "https://example.com/page1",
            "https://example.com/page2"
        ]
    )
):
    pool = state.db_pool
    added_count = 0
    async with pool.acquire() as conn:
        async with conn.cursor() as cursor:
            for url in payload:
                await cursor.execute(
                    "INSERT INTO url_queue (url, date_added, status, recived) VALUES (%s, %s, %s, %s)",
                    (str(url), datetime.now(timezone.utc), 0, 0)
                )
                added_count += 1
    return {"added": added_count, "status": "success"}


@router.get("/parser/status", response_model=StatusResponse, tags=["Парсинг"], summary="Отримати статус")
async def get_status():
    pool = state.db_pool
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cursor:
            await cursor.execute("SELECT COUNT(*) as count FROM url_queue WHERE status = 2 AND recived = 0")
            processed = (await cursor.fetchone())["count"]
            await cursor.execute("SELECT COUNT(*) as count FROM url_queue WHERE status = 0")
            new_links = (await cursor.fetchone())["count"]
            await cursor.execute("SELECT COUNT(*) as count FROM url_queue WHERE status = 1")
            in_queue = (await cursor.fetchone())["count"]

    return {
        "processed_since_last": processed,
        "new_links": new_links,
        "in_queue": in_queue,
        "parser_status": state.parser_status.value
    }


@router.get("/parser/parsed-results", response_model=ParsedResultsResponse, tags=["Парсинг"], summary="Отримати результат")
async def get_parsed_results():
    pool = state.db_pool
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cursor:
            await cursor.execute("SELECT * FROM url_queue WHERE status = 2 AND recived = 0")
            status_2_rows = await cursor.fetchall()

            await cursor.execute("SELECT * FROM url_queue WHERE status = 3 AND recived = 0")
            status_3_rows = await cursor.fetchall()

            # Розпарсимо JSON у поле data, щоб воно було dict, а не str
            import json
            for row in status_2_rows:
                if row.get("data") and isinstance(row["data"], str):
                    try:
                        row["data"] = json.loads(row["data"])
                    except Exception:
                        row["data"] = None

            for row in status_3_rows:
                if row.get("data") and isinstance(row["data"], str):
                    try:
                        row["data"] = json.loads(row["data"])
                    except Exception:
                        row["data"] = None

            # Відмічаємо, що ці записи прочитані
            if status_2_rows:
                ids = [row["url_id"] for row in status_2_rows]
                await cursor.execute(
                    f"UPDATE url_queue SET recived = 1 WHERE url_id IN ({','.join(['%s']*len(ids))})",
                    ids
                )
                await conn.commit()

            if status_3_rows:
                ids_3 = [row["url_id"] for row in status_3_rows]
                await cursor.execute(
                    f"UPDATE url_queue SET recived = 1 WHERE url_id IN ({','.join(['%s'] * len(ids_3))})",
                    ids_3
                )
                await conn.commit()

    return {
        "parsed": status_2_rows,
        "error": status_3_rows,
    }




DB_CONFIG_FILTER = {
    "host": os.getenv("DB_HOST_FILTER"),
    "port": int(os.getenv("DB_PORT_FILTER", 3306)),
    "user": os.getenv("DB_USER_FILTER"),
    "password": os.getenv("DB_PASSWORD_FILTER"),
    "db": os.getenv("DB_NAME_FILTER"),
    "autocommit": True,
}

SECRET_KEY = os.getenv("SECRET_KEY")

processing_status = {
    "running": False,
    "total": 0,
    "processed": 0
}

class FilterResponse(BaseModel):
    data: str
    error: str
    status: bool

@router.post("/filter/filters", tags=["Фільтр"], summary="Завантажити фільтра", response_model=FilterResponse)
async def parse_data(payload: FilterPayload):
    now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
    await process_filter_payload(payload, now)
    return {
        "data": "Фільтри оброблені.",
        "error": "",
        "status": True
    }

async def process_filter_payload(payload: FilterPayload, timestamp: str):
    conn = await aiomysql.connect(**DB_CONFIG_FILTER)
    cursor = await conn.cursor(aiomysql.DictCursor)

    category = payload.Category_name
    category_id = payload.Category_id
    order_filters = []
    all_filter_groups = {}
    unique_filter_ids = set()

    for group in payload.FilterGroups:
        name = group.name
        sort_order = group.sort_order or 0
        group_id = group.id
        filter_group_id = await get_or_create_filter_group(cursor, name, sort_order)
        all_filter_groups[group_id] = name
        order_filters.append({
            "filter_group_id": filter_group_id,
            "category_id": category_id,
            "category_name": category,
            "sort_order": sort_order
        })

    all_filters = {}
    for f in payload.Filters:
        filter_group_id = f.group_id
        filter_id = await get_or_create_filter(cursor, f.name, filter_group_id)
        unique_filter_ids.add(filter_id)
        all_filters[f.id] = {
            "filter_name": f.name,
            "filter_group": all_filter_groups.get(f.group_id, "")
        }

    products = {}
    for p in payload.Products:
        products.setdefault(p.product_id, []).append(all_filters.get(p.filter_id, {}))

    to_insert = []
    to_update = []

    product_ids = list(products.keys())
    if product_ids:
        await cursor.execute(
            f"SELECT product_id FROM xpro_recived_filter WHERE product_id IN ({','.join(['%s'] * len(product_ids))})",
            product_ids
        )
        existing_products = {row['product_id'] for row in await cursor.fetchall()}
    else:
        existing_products = set()

    for product_id, filters in products.items():
        filters_json = json.dumps(filters, ensure_ascii=False)
        if product_id in existing_products:
            to_update.append((filters_json, timestamp, category, product_id))
        else:
            to_insert.append((product_id, filters_json, timestamp, timestamp, category))

    BATCH_SIZE = 10000

    # Вставка нових товарів батчами по 10 000
    for i in range(0, len(to_insert), BATCH_SIZE):
        batch = to_insert[i:i + BATCH_SIZE]
        await cursor.executemany(
            "INSERT INTO xpro_recived_filter (product_id, filters, date_added, date_update, category_name) VALUES (%s, %s, %s, %s, %s)",
            batch
        )
        await asyncio.sleep(0.001)  # коротка пауза щоб уникнути зависання

    # Оновлення існуючих товарів батчами по 10 000
    for i in range(0, len(to_update), BATCH_SIZE):
        batch = to_update[i:i + BATCH_SIZE]
        await cursor.executemany(
            "UPDATE xpro_recived_filter SET filters = %s, date_update = %s, category_name = %s WHERE product_id = %s",
            batch
        )
        await asyncio.sleep(0.001)

    for f in order_filters:
        await cursor.execute(
            "DELETE FROM xpro_category_filter_group WHERE category_id = %s AND filter_group_id = %s",
            (f["category_id"], f["filter_group_id"])
        )

    await cursor.executemany(
        "INSERT INTO xpro_category_filter_group (category_id, category_name, filter_group_id, sort_order) VALUES (%s, %s, %s, %s)",
        [(f["category_id"], f["category_name"], f["filter_group_id"], f["sort_order"]) for f in order_filters]
    )

    await conn.commit()
    await cursor.close()
    await conn.ensure_closed()

def translit(text: str) -> str:
    map_ = {
        'а':'a','б':'b','в':'v','г':'h','ґ':'g','д':'d','е':'e','є':'ie','ж':'zh','з':'z','и':'y','і':'i','ї':'i','й':'i','к':'k','л':'l','м':'m','н':'n','о':'o','п':'p','р':'r','с':'s','т':'t','у':'u','ф':'f','х':'kh','ц':'ts','ч':'ch','ш':'sh','щ':'shch','ь':'','ю':'iu','я':'ia','ʼ':'',
        'ё':'yo','ъ':'','ы':'y','э':'e',
        'А':'A','Б':'B','В':'V','Г':'H','Ґ':'G','Д':'D','Е':'E','Є':'Ye','Ж':'Zh','З':'Z','И':'Y','І':'I','Ї':'I','Й':'Y','К':'K','Л':'L','М':'M','Н':'N','О':'O','П':'P','Р':'R','С':'S','Т':'T','У':'U','Ф':'F','Х':'Kh','Ц':'Ts','Ч':'Ch','Ш':'Sh','Щ':'Shch','Ь':'','Ю':'Yu','Я':'Ya','Ё':'Yo','Ъ':'','Ы':'Y','Э':'E',
        ' ':'-','_':'-',',':'','.':'','/':'-',':':'',';':'','?':'','!':'','#':'','(':'',')':'','"':'','\'':'','«':'','»':'','&':'and','%':'','@':'','=':'','+':'','–':'-','—':'-','№':'','’':'',
    }
    text = ''.join(map_.get(c, c) for c in text)
    text = text.lower()
    text = re.sub(r'[^a-z0-9\-]', '', text)
    text = re.sub(r'-+', '-', text)
    return 'f-' + text.strip('-')


async def get_or_create_filter_group(cursor, name: str, sort_order: int = 0) -> int:
    if not sort_order:
        sort_order = 99999

    await cursor.execute("SELECT * FROM oc_filter_group_description WHERE name = %s", (name,))
    result = await cursor.fetchone()

    if not result:
        await cursor.execute("INSERT INTO oc_filter_group (sort_order) VALUES (%s)", (sort_order,))
        filter_group_id = cursor.lastrowid
        await cursor.execute(
            "INSERT INTO oc_filter_group_description (filter_group_id, language_id, name) VALUES (%s, 2, %s)",
            (filter_group_id, name)
        )
        keyword = translit(name)
        await cursor.execute(
            "INSERT INTO xpro_filter_group_seo (filter_group_id, keyword) VALUES (%s, %s)",
            (filter_group_id, keyword)
        )
        return filter_group_id
    else:
        filter_group_id = result["filter_group_id"]
        await cursor.execute(
            "UPDATE oc_filter_group SET sort_order = %s WHERE filter_group_id = %s",
            (sort_order, filter_group_id)
        )
        return filter_group_id


filter_cache: dict[tuple[str, int], int] = {}


async def get_or_create_filter(cursor, name: str, filter_group_id: int) -> int:
    key = (name, filter_group_id)
    if key in filter_cache:
        return filter_cache[key]

    await cursor.execute(
        "SELECT filter_id FROM oc_filter_description WHERE name = %s AND filter_group_id = %s",
        (name, filter_group_id)
    )
    result = await cursor.fetchone()

    if result:
        filter_cache[key] = result["filter_id"]
        return result["filter_id"]

    await cursor.execute(
        "INSERT INTO oc_filter (filter_group_id, sort_order) VALUES (%s, %s)",
        (filter_group_id, 0)
    )
    filter_id = cursor.lastrowid

    await cursor.execute(
        "INSERT INTO oc_filter_description (filter_id, language_id, filter_group_id, name) VALUES (%s, 2, %s, %s)",
        (filter_id, filter_group_id, name)
    )

    keyword = translit(name)
    keyword_sub = keyword.replace("f-", "f-not-")

    await cursor.execute(
        "INSERT INTO xpro_filter_seo (filter_id, type, keyword, filter_group_id) VALUES (%s, 'add', %s, %s), (%s, 'sub', %s, %s)",
        (filter_id, keyword, filter_group_id, filter_id, keyword_sub, filter_group_id)
    )

    filter_cache[key] = filter_id
    return filter_id


async def add_filter_categories(cursor, filter_ids: list[int], category_ids: list[int]):
    insert_values = []
    for filter_id in filter_ids:
        for category_id in category_ids:
            insert_values.append((category_id, filter_id))

    for value in insert_values:
        try:
            await cursor.execute(
                "INSERT INTO oc_category_filter (category_id, filter_id) VALUES (%s, %s)",
                value
            )
        except:
            pass


async def add_product_filter(cursor, filter_ids: list[int], product_id: int):
    insert_values = [(product_id, filter_id) for filter_id in filter_ids]

    for value in insert_values:
        try:
            await cursor.execute(
                "INSERT INTO oc_product_filter (product_id, filter_id) VALUES (%s, %s)",
                value
            )
        except:
            pass


processing_status = {
    "running": False,
    "total": 0,
    "processed": 0
}

@router.post("/process-filters", tags=["Фільтр"], summary="Запустити обробку фільтрів")
async def start_process_filters(background_tasks: BackgroundTasks):
    if processing_status["running"]:
        return {"status": False, "message": "Обробка вже запущена"}

    background_tasks.add_task(process_filters_loop)
    return {"status": True, "message": "Обробка фільтрів запущена у фоні"}

async def process_filters_loop():
    processing_status["running"] = True
    processing_status["processed"] = 0
    processing_status["total"] = 0

    conn = await aiomysql.connect(**DB_CONFIG_FILTER)
    cursor = await conn.cursor(aiomysql.DictCursor)

    try:
        await cursor.execute("SELECT * FROM xpro_process_filter ORDER BY process_filter_id DESC LIMIT 1")
        last_process = await cursor.fetchone()
        date = "2000-01-01 00:00:00"
        if last_process and "date_process" in last_process:
            date = last_process["date_process"]

        await cursor.execute("""
            SELECT * FROM xpro_recived_filter
            WHERE date_update > IFNULL(date_process, '2000-01-01 00:00:00')
        """)
        products_result = await cursor.fetchall()

        processing_status["total"] = len(products_result)

        bar = tqdm(total=processing_status["total"], desc="Опрацювання товарів")
        product_ids = [p["product_id"] for p in products_result]
        if product_ids:
            sql_delete = f"DELETE FROM oc_product_filter WHERE product_id IN ({','.join(['%s'] * len(product_ids))})"
            await cursor.execute(sql_delete, product_ids)

        filter_group_cache = {}
        filter_cache = {}

        for i, product in enumerate(products_result, start=1):
            product_id = product["product_id"]
            filters = json.loads(product["filters"])
            filter_ids = []

            for filter_obj in filters:
                filter_group_name = filter_obj.get("filter_group")
                filter_name = filter_obj.get("filter_name")

                # Пропускаємо, якщо немає імені групи або фільтра
                if not filter_group_name or not filter_name:
                    print(f"Warning: пропускаємо фільтр без імені групи або фільтра у product_id={product_id}")
                    continue

                fg_key = filter_group_name.lower()
                if fg_key not in filter_group_cache:
                    filter_group_id = await get_or_create_filter_group(cursor, filter_group_name)
                    filter_group_cache[fg_key] = filter_group_id
                else:
                    filter_group_id = filter_group_cache[fg_key]

                f_key = (filter_name.lower(), filter_group_id)
                if f_key not in filter_cache:
                    filter_id = await get_or_create_filter(cursor, filter_name, filter_group_id)
                    filter_cache[f_key] = filter_id
                else:
                    filter_id = filter_cache[f_key]

                filter_ids.append(filter_id)

            sql = "SELECT category_id FROM oc_product_to_category WHERE product_id = %s"
            await cursor.execute(sql, (product_id,))
            categories_result = await cursor.fetchall()
            categories = [row["category_id"] for row in categories_result]

            # print({
            #     "filter_ids" : filter_ids,
            #     "categories": categories,
            #     "product_id" : product_id,
            #     "sql" : sql
            # })

            await add_filter_categories(cursor, filter_ids, categories)
            await add_product_filter(cursor, filter_ids, product_id)

            date_added = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
            await cursor.execute("UPDATE xpro_recived_filter SET date_process = %s WHERE product_id = %s", (date_added, product_id))

            processing_status["processed"] = i
            bar.update(1)
            # print(f"{i} из {processing_status['total']}")

        await conn.commit()
    finally:
        processing_status["running"] = False
        await cursor.close()
        conn.close()


@router.get("/processing-status", tags=["Фільтр"], summary="Статус обробки фільтрів")
async def get_processing_status():
    return {
        "running": processing_status["running"],
        "processed": processing_status["processed"],
        "total": processing_status["total"],
        "percent": round((processing_status["processed"] / processing_status["total"]) * 100, 2)
        if processing_status["total"] > 0 else 0
    }
