# link_agregator/tasks.py

import asyncio
from celery import shared_task, chain
from link_agregator.source_managment import scrape_all_sources
from html_agregator.tasks import html_agregator_task
from abstractclass.tasks import clear_cache_task           # <-- import tasku czyszczącego
from link_agregator.check_active.run_check import run_status_only_checks, run_status_pipeline
from houslyspace.tasks import task_send_inactive_ads_to_cloud  # ⬅ import na górze pliku


PLAYWRIGHT_ARGS = ["--disable-blink-features=AutomationControlled","--no-sandbox","--disable-dev-shm-usage"]
CTX_KW = dict(
    user_agent=("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
                "(KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36"),
    locale="pl-PL",
    timezone_id="Europe/Warsaw",
    viewport={"width": 1366, "height": 768},
    extra_http_headers={"Accept-Language": "pl-PL,pl;q=0.9,en;q=0.8","Upgrade-Insecure-Requests": "1"},
)
STEALTH_INIT_SCRIPT = """
Object.defineProperty(Navigator.prototype, 'webdriver', { get: () => undefined });
window.chrome = window.chrome || { runtime: {} };
Object.defineProperty(navigator, 'languages', { get: () => ['pl-PL','pl'] });
Object.defineProperty(navigator, 'plugins',  { get: () => [1,2,3,4,5] });
"""


@shared_task(name="link_agregator.fetch_all_and_queue_html", queue="link")
def link_agregator_task(*, auto_queue_next=True):
    async def runner():
        try:
            print("Uruchamiam scrape_all_sources (mode=fetch)")
            await scrape_all_sources(source_id=None, params=None, mode="fetch", headless=True)
        except Exception as e:
            print(f"Błąd w link_agregator_task: {e}")

    asyncio.run(runner())

    if auto_queue_next:
        chain(
            clear_cache_task.si(
                keys=[],
                patterns=["manual_pipeline_lock:*", "img_upload_lock:*"],
                clear_all=False,
                log_prefix="[link->html] "
            ).set(queue="default"),                     # 👈 jawnie na default
            html_agregator_task.si(
                enable="true", names=None, include_fetched=False, auto_queue_next=True
            ).set(queue="html")                          # 👈 jawnie na html
        ).apply_async()

    return {"status": "ok", "queued_html": bool(auto_queue_next)}





@shared_task(name="link_agregator.check_active", queue="check")
def is_active_check_task(
    *, clear_cache_before=False, clear_all=False, clear_keys=None, clear_patterns=None
):
    if clear_cache_before:
        clear_cache_task.apply_async(kwargs=dict(
            keys=clear_keys,
            patterns=clear_patterns,
            clear_all=clear_all,
            log_prefix="[is_active_check] "
        ), queue="default")

    async def runner():
        try:
            print("Uruchamiam scrape_all_sources (mode=check)")
            await scrape_all_sources(
                source_id=None, params=None, mode="check", headless=True
            )
        except Exception as e:
            print(f"Błąd w is_active_check_task: {e}")

    asyncio.run(runner())

    # ⬇⬇⬇ ZAMIAST bezpośrednio odpalać wysyłkę inactive,
    #      kolejkowane jest sprawdzanie "flagged" w trybie pipeline
    check_flagged_ads_pipeline_task.apply_async(
        kwargs={
            "batch_size": 150,
            "headless": True,
            "source_id": None,
            "source_name": None,
            "max_batches": None,
            "run_actions": True,
        },
        queue="check",
    )

    return {"status": "ok", "queued": "check_flagged_ads_pipeline_task"}



@shared_task(name="link_agregator.check_flagged_ads", queue="check")
def check_flagged_ads_task(
    limit: int | None = 300,
    headless: bool = True,
    source_id=None,
    source_name: str | None = None,
    run_actions: bool = True,
):
    out = asyncio.run(run_status_only_checks(
        limit=limit,
        headless=headless,
        source_id=source_id,
        source_name=source_name,
        run_actions=run_actions,
    ))

    # 🔔 po zakończeniu sprawdzania — odpal pipeline wysyłki nieaktywnych
    task_send_inactive_ads_to_cloud.apply_async(
        kwargs={"mode": "pipeline", "limit": None, "timeout": None},
        queue="inactive",
    )

    # możesz dorzucić metadane w odpowiedzi
    return {
        "status": "ok",
        "checked": out.get("checked", 0),
        "errors": out.get("errors", 0),
        "queued_inactive_pipeline": True,
    }

    
@shared_task(name="link_agregator.check_flagged_ads_pipeline", queue="check")
def check_flagged_ads_pipeline_task(
    batch_size: int = 150,
    headless: bool = True,
    source_id=None,
    source_name: str | None = None,
    max_batches: int | None = None,
    run_actions: bool = True,
):
    out = asyncio.run(run_status_pipeline(
        batch_size=batch_size,
        headless=headless,
        source_id=source_id,
        source_name=source_name,
        max_batches=max_batches,
        run_actions=run_actions,
    ))

    # 🔔 po zakończeniu całego pipeline — odpal pipeline wysyłki nieaktywnych
    task_send_inactive_ads_to_cloud.apply_async(
        kwargs={"mode": "pipeline", "limit": None, "timeout": None},
        queue="inactive",
    )

    return {
        "status": "ok",
        "mode": "pipeline",
        "batches": out.get("batches", 0),
        "checked": out.get("checked", 0),
        "errors": out.get("errors", 0),
        "queued_inactive_pipeline": True,
    }
