from extractly.models import AdsManual
from django.conf import settings
from django.core.cache import cache
import hashlib, json, time
from houslyspace.serializers import InactiveAdPayloadSerializer
from houslyspace.models import AdsInactiveSync


INACTIVE_CLOUD_URL   = getattr(settings, "ADS_INACTIVE_CLOUD_URL", "https://www.hously.cloud/space/receive-inactive/")
INACTIVE_BATCH_SIZE  = getattr(settings, "ADS_INACTIVE_BATCH_SIZE", 150)
INACTIVE_RETRY_LIMIT = getattr(settings, "ADS_INACTIVE_RETRY_LIMIT", 3)
INACTIVE_BASE_DELAY  = getattr(settings, "ADS_INACTIVE_BASE_DELAY", 0.5)
INACTIVE_TIMEOUT     = getattr(settings, "ADS_INACTIVE_TIMEOUT", 20)

# Lock
INACTIVE_LOCK_TTL = 15 * 60
def _inactive_lock_key():
    return "send_inactive_ads_lock"
def _acquire_lock(key, ttl=INACTIVE_LOCK_TTL): return cache.add(key, "1", ttl)
def _refresh_lock(key): cache.touch(key, INACTIVE_LOCK_TTL)
def _release_lock(key): cache.delete(key)

# normalizacja limitu – zabezpiecza przed None/0/ujemnym
INACTIVE_DEFAULT_LIMIT = INACTIVE_BATCH_SIZE







def _normalize_limit(limit) -> int:
    try:
        limit = int(limit)
    except (TypeError, ValueError):
        limit = 0
    return limit if (limit and limit > 0) else int(INACTIVE_DEFAULT_LIMIT or 150)

def _payload_hash(item: dict) -> str:
    raw = json.dumps({
        "url":            item.get("url"),
        "inactive_date":  item.get("inactive_date"),
        "inactive_reason": item.get("inactive_reason"),
    }, ensure_ascii=False, sort_keys=True)
    return hashlib.sha256(raw.encode("utf-8")).hexdigest()






# importy do filtra SQL:
from django.db.models import Q, F
from django.utils import timezone

INACTIVE_RESEND_EQUAL_TS_AFTER_MIN = 180  # minuty

def _inactive_qs():
    """
    Zwraca tylko:
    - nieaktywne z ustawioną inactive_date i url,
    - które nigdy nie były wysłane (brak inactive_sync)
      LUB mają nowszą wartość inactive_date niż ostatnio wysłana.
    """
    return (
        AdsManual.objects
        .filter(
            is_active=False,
            inactive_date__isnull=False,
        )
        .exclude(url__isnull=True)
        .select_related("inactive_sync")
        .filter(
            Q(inactive_sync__isnull=True) |
            Q(inactive_date__gt=F("inactive_sync__last_inactive_value"))
        )
        .order_by("id")
    )


def take_inactive_batch(limit: int, *, force: bool = False):
    limit = _normalize_limit(limit)
    items = []
    now = timezone.now()
    equal_age = INACTIVE_RESEND_EQUAL_TS_AFTER_MIN * 60  # sekundy

    for ad in _inactive_qs().iterator(chunk_size=1000):
        payload = InactiveAdPayloadSerializer(ad).data
        if not payload.get("url") or not payload.get("inactive_date"):
            continue

        h = _payload_hash(payload)
        sync = getattr(ad, "inactive_sync", None)

        should_send = False
        if force or sync is None:
            should_send = True
        else:
            # 1) value diff (nowa wartość inactive_date) – wstępnie odfiltrowane już w SQL,
            #    ale zostawiamy warunek dla bezpieczeństwa.
            newer_value = (
                ad.inactive_date and
                (not sync.last_inactive_value or ad.inactive_date > sync.last_inactive_value)
            )
            # 2) zmiana hasha payloadu (np. doszła przyczyna)
            hash_changed = (sync.last_payload_hash != h)

            if newer_value or hash_changed:
                should_send = True
            else:
                # 3) ta sama wartość -> throttling wg czasu realnej wysyłki
                equal_val = (sync.last_inactive_value and ad.inactive_date == sync.last_inactive_value)
                sent_enough_time_ago = (
                    sync.last_sent_at and (now - sync.last_sent_at).total_seconds() >= equal_age
                )
                should_send = bool(equal_val and sent_enough_time_ago)

        if should_send:
            items.append((ad, payload, h))
            if len(items) >= limit:
                break

    return items
