# management/commands/send_ads_to_cloud.py

from django.core.management.base import BaseCommand
from django.core.paginator import Paginator
from time import sleep
import requests

from django.db.models import Q
from extractly.models import AdsManual
from extractly.serializers import NetworkMonitoringManualSerializer
from houslyspace.utils.get_secret import get_secret_key

CLOUD_URL   = "https://www.hously.cloud/space/receive-advertisement/"
BATCH_SIZE  = 150
RETRY_LIMIT = 3
BASE_DELAY  = 0.5
TIMEOUT     = 40


class Command(BaseCommand):
    help = (
        "Wysyła ogłoszenia do chmury (upsert po URL).\n"
        "Domyślnie tylko rekordy isSendToMainServer=False i z niepustym images.\n"
        "Użyj --force, aby wysłać wszystko jak leci."
    )

    def add_arguments(self, parser):
        parser.add_argument(
            "--force",
            action="store_true",
            help="Wyślij WSZYSTKO (ignoruje isSendToMainServer i puste images)."
        )
        parser.add_argument(
            "--batch-size",
            type=int,
            default=BATCH_SIZE,
            help=f"Rozmiar paczki (domyślnie: {BATCH_SIZE})."
        )
        parser.add_argument(
            "--timeout",
            type=int,
            default=TIMEOUT,
            help=f"Timeout requestu w sekundach (domyślnie: {TIMEOUT})."
        )

    def handle(self, *args, **kwargs):
        force = kwargs.get("force", False)
        batch_size = int(kwargs.get("batch_size", BATCH_SIZE))
        timeout = int(kwargs.get("timeout", TIMEOUT))

        send_advertisements_to_cloud(force=force, batch_size=batch_size, timeout=timeout)
        self.stdout.write(self.style.SUCCESS("Done."))


def _extract_failed_urls_from_207(response_json):
    failed = set()
    try:
        for e in (response_json or {}).get("errors", []):
            u = e.get("url")
            if u:
                failed.add(u)
    except Exception:
        pass
    return failed


def _mark_sent_by_urls(urls: set[str]):
    if not urls:
        return
    AdsManual.objects.filter(url__in=list(urls)).update(isSendToMainServer=True)


def _base_queryset(force: bool):
    """
    Domyślnie: wysyłamy tylko rekordy nie wysłane i z niepustym images.
    --force: wysyłamy wszystko jak leci (ale dalej wymagamy URLa przy serializacji).
    """
    if force:
        return AdsManual.objects.all().order_by("id")

    empty_images = (
        Q(images__isnull=True) |
        Q(images__exact='') |
        Q(images__exact=[]) |
        Q(images__exact={})
    )

    return (
        AdsManual.objects
        .filter(isSendToMainServer=False)
        .exclude(Q(is_active=False) & empty_images)
        .order_by("id")
    )


def send_advertisements_to_cloud(*, force: bool = False, batch_size: int = BATCH_SIZE, timeout: int = TIMEOUT):
    secret_key = get_secret_key()
    print(f"[cloud] Using Authorization key: {secret_key[:6]}***")
    qs = _base_queryset(force=force)

    total = qs.count()
    if total == 0:
        print("[cloud] No advertisements to send.")
        return

    paginator = Paginator(qs, batch_size)
    total_pages = paginator.num_pages
    print(f"[cloud] Will send {total} ads in {total_pages} pages (batch={batch_size}, force={force}).")

    headers = {"Authorization": secret_key, "Content-Type": "application/json"}

    for page_num in paginator.page_range:
        page = paginator.page(page_num)
        serialized = NetworkMonitoringManualSerializer(page.object_list, many=True).data

        # odfiltruj rekordy bez URL (bez sensu wysyłać)
        serialized = [it for it in serialized if it.get("url")]
        if not serialized:
            print(f"[cloud] Page {page_num}/{total_pages}: no valid items with URL – skipping.")
            continue

        page_urls = {it["url"] for it in serialized}

        attempt = 0
        while attempt < RETRY_LIMIT:
            attempt += 1
            try:
                resp = requests.post(CLOUD_URL, json={"data": serialized}, headers=headers, timeout=timeout)

                if resp.status_code in (200, 201):
                    _mark_sent_by_urls(page_urls)
                    print(f"[cloud] Page {page_num}/{total_pages} OK (all {len(page_urls)}).")
                    break

                elif resp.status_code == 207:
                    payload = {}
                    try:
                        payload = resp.json()
                    except Exception:
                        pass
                    failed = _extract_failed_urls_from_207(payload)
                    succeeded = page_urls - failed
                    if succeeded:
                        _mark_sent_by_urls(succeeded)
                    print(f"[cloud] Page {page_num}/{total_pages} PARTIAL: success={len(succeeded)}, failed={len(failed)}")
                    break

                elif resp.status_code == 400 and "already exists" in (resp.text or "").lower():
                    _mark_sent_by_urls(page_urls)
                    print(f"[cloud] Page {page_num}/{total_pages} DUPLICATES -> marked as sent ({len(page_urls)}).")
                    break

                else:
                    delay = BASE_DELAY * (2 ** (attempt - 1))
                    print(f"[cloud] Page {page_num}/{total_pages} attempt {attempt}/{RETRY_LIMIT} "
                          f"HTTP {resp.status_code}. Retrying in {delay:.1f}s…")
                    sleep(delay)

            except requests.RequestException as e:
                delay = BASE_DELAY * (2 ** (attempt - 1))
                print(f"[cloud] Page {page_num}/{total_pages} attempt {attempt}/{RETRY_LIMIT} "
                      f"NETWORK error: {e}. Retrying in {delay:.1f}s…")
                sleep(delay)
        else:
            print(f"[cloud] Page {page_num}/{total_pages} FAILED after {RETRY_LIMIT} retries.")

        sleep(0.1)
