import asyncio
from urllib.parse import urljoin, urlencode

from link_agregator.utils.logger import logger

async def paginate_next(
    page,
    selectors,
    pagination_strategy,
    page_num,
    base_url,
    permutation_url,
    pagination_pattern,
    first_page_pagination,
    section_selector=None,
    next_selector=None
):
    if pagination_strategy == "click":
        return await click_next(page, next_selector)
    elif pagination_strategy == "scroll":
        return await scroll_and_detect(page, section_selector)
    elif pagination_strategy == "url_change":
        return await next_by_url_change(
            page=page,
            page_num=page_num + 1,  # następna strona
            base_url=base_url,
            permutation_url=permutation_url,
            pagination_pattern=pagination_pattern,
            first_page_pagination=first_page_pagination,
            extra_params=selectors.get("extra_params"),
            pagination_numbers_selector=selectors.get("pagination_numbers_selector") or selectors.get("total_pages_selector"),
            goto_wait_until=selectors.get("goto_wait_until", "domcontentloaded"),
            goto_timeout=int(selectors.get("goto_timeout", 60000)),
        )
    else:
        logger.error(f"[ERROR] Nieznana strategia paginacji: {pagination_strategy}. Przerywam.")
        return False


async def click_next(page, next_selector, retry=2, wait=1500):
    for attempt in range(retry):
        try:
            next_button = page.locator(next_selector)
            if await next_button.count() == 0:
                logger.info("⛔ Brak następnej strony — koniec.")
                return False
            await next_button.click()
            await page.wait_for_timeout(wait)
            logger.info(f"➡️ Przechodzę do kolejnej strony (próba {attempt + 1})")
            return True
        except Exception as e:
            logger.warning(f"❌ Błąd podczas klikania next ({attempt + 1}): {e}")
            await asyncio.sleep(2)
    logger.warning("❌ Nie udało się przejść do następnej strony po wszystkich próbach.")
    return False


async def scroll_and_detect(page, item_selector, max_scrolls=10, scroll_step=1500):
    last_count = 0
    for i in range(max_scrolls):
        await page.mouse.wheel(0, scroll_step)
        await page.wait_for_timeout(1000)
        items = await page.query_selector_all(item_selector)
        new_count = len(items)
        if new_count > last_count:
            logger.info(f"🆕 Znalazłem {new_count} elementów po scrollu.")
            last_count = new_count
        else:
            logger.info("🔚 Koniec scrollowania (brak nowych elementów).")
            break
    return last_count > 0


def build_page_url_from_permutation(permutation_url, pagination_pattern, page_num, first_page_pagination):
    if page_num == 1:
        return permutation_url.replace("{pagination}", first_page_pagination)
    else:
        return permutation_url.replace("{pagination}", pagination_pattern.replace("{{number}}", str(page_num)))


async def safe_goto(page, url, *, wait_until="domcontentloaded", timeout=60000, referer=None, retries=1):
    """
    Stabilne przejście na URL:
      - od razu lżejszy waitUntil,
      - fallback 1x do waitUntil='commit' przy timeoucie.
    """
    last_err = None
    for attempt in range(retries + 1):
        try:
            logger.info(f"[GOTO] {url} (waitUntil={wait_until}, timeout={timeout}ms, try={attempt+1})")
            return await page.goto(url, wait_until=wait_until, timeout=timeout, referer=referer)
        except Exception as e:
            last_err = e
            logger.warning(f"[GOTO] Timeout/błąd (try={attempt+1}): {e}")
            if attempt < retries:
                # fallback do 'commit'
                try:
                    logger.info(f"[GOTO] Fallback → waitUntil=commit")
                    return await page.goto(url, wait_until="commit", timeout=timeout, referer=referer)
                except Exception as e2:
                    last_err = e2
    raise last_err


async def next_by_url_change(
    page,
    page_num,
    base_url,
    permutation_url,
    pagination_pattern,
    first_page_pagination,
    extra_params=None,
    pagination_numbers_selector=None,
    goto_wait_until="domcontentloaded",
    goto_timeout=60000,
):
    # (opcjonalne) sprawdzenie numeru strony po widocznych numerach
    if pagination_numbers_selector:
        try:
            elements = await page.query_selector_all(pagination_numbers_selector)
            numbers = []
            for el in elements:
                txt = await el.inner_text()
                try:
                    num = int(''.join(filter(str.isdigit, txt)))
                    numbers.append(num)
                except Exception:
                    continue
            logger.info(f"[PAGINATION] Numery stron na stronie: {numbers}")
            if page_num not in numbers and numbers:
                logger.info(f"⛔ Brak numeru {page_num} w paginacji ({pagination_numbers_selector}) — koniec (url_change).")
                return False
        except Exception as e:
            logger.warning(f"[PAGINATION] Nie udało się sprawdzić paginacji: {e}")
            # nie przerywamy — spróbujmy mimo to wejść po URL-u

    next_url = build_page_url_from_permutation(permutation_url, pagination_pattern, page_num, first_page_pagination)
    next_url = urljoin(base_url, next_url)
    if "{pagination}" in next_url:
        logger.warning(f"[ZABEZPIECZENIE] Próbowałem wejść na niepodstawiony link z '{{pagination}}': {next_url} — przerywam paginację!")
        return False
    if next_url == str(page.url):
        logger.info("⛔ Brak następnej strony — koniec (url_change, url identyczny).")
        return False

    logger.info(f"[URL_CHANGE] Przechodzę do kolejnej strony: {next_url}")
    try:
        await safe_goto(
            page,
            next_url,
            wait_until=goto_wait_until,
            timeout=goto_timeout,
            referer=str(page.url),
            retries=1,
        )
    except Exception as e:
        logger.warning(f"[URL_CHANGE] goto failed for {next_url}: {e}")
        return False

    # krótki oddech — i tak potem czekasz na wrapper/listę
    await page.wait_for_timeout(500)
    return True
