from rest_framework import viewsets
import subprocess
from django.http import JsonResponse
import re
from datetime import datetime, timedelta
import subprocess
import json
from django.http import JsonResponse
from houslyspace.models import *
from extractly.models import AdsManual


# Zmienna przechowująca czas ostatniego logu (do sprawdzania 3 minut)
last_log_time = datetime.now()

def otodom_service_status(request):
    """Zwraca status i logi usługi otodom.service z analizą błędów"""
    global last_log_time  # Używamy zmiennej globalnej do sprawdzania czasu logów

    try:
        # Pobieramy status usługi
        status_output = subprocess.run(
            ["sudo", "systemctl", "is-active", "otodom.service"],
            capture_output=True,
            text=True
        )
        is_active = status_output.stdout.strip()  # "active" / "inactive"

        # Pobieramy ostatnie 50 linii logów
        logs_output = subprocess.run(
            ["sudo", "journalctl", "-u", "otodom.service", "-n", "50", "--no-pager", "--output=short"],
            capture_output=True,
            text=True
        )
        logs = logs_output.stdout.strip().split("\n")

        # Analiza logów: sprawdzamy kody błędów
        error_pattern = re.compile(r"\b(403|404|500|502|503|504)\b")  # Lista błędów HTTP
        error_logs = [log for log in logs if error_pattern.search(log)]
        
        # Pobieramy czas ostatniego logu (jeśli logi istnieją)
        if logs:
            last_log_time = datetime.now()

        # Określenie statusu na podstawie logów
        time_since_last_log = datetime.now() - last_log_time

        if time_since_last_log > timedelta(minutes=3):
            status_emoji = "⚠️"  # Żółty - brak logów przez 3 minuty
            status_description = "Brak nowych logów przez ponad 3 minuty"
        elif len(error_logs) == len(logs) and logs:  # Jeśli 50 logów to błędy
            status_emoji = "❌"  # Czerwony - wszystkie logi błędne
            status_description = "Wszystkie logi zawierają błędy HTTP"
        else:
            status_emoji = "✅"  # Zielony - logi się pojawiają i są różne
            status_description = "Usługa działa poprawnie"

        return JsonResponse({
            "status": is_active,
            "status_emoji": status_emoji,
            "status_description": status_description,
            "logs": logs
        })

    except Exception as e:
        return JsonResponse({"error": str(e)}, status=500)





def check_listing_status(request, listing_id):
    """API do sprawdzenia, czy konkretne ogłoszenie jest aktywne"""
    try:
        # Pobieramy URL ogłoszenia na podstawie ID
        listing = AdsManual.objects.filter(id=listing_id).first()
        if not listing:
            return JsonResponse({"error": "Ogłoszenie nie istnieje"}, status=404)

        url = listing.url

        # Uruchomienie Scrapy dla pojedynczego ogłoszenia
        process = subprocess.run(
            ["scrapy", "crawl", "nonline_single_ad_is_active", "-a", f"url={url}"],
            capture_output=True,
            text=True
        )

        # Pobranie i przetworzenie wyników Scrapy
        output = process.stdout
        if "Ad id" in output and "is not active" in output:
            is_active = False
        else:
            is_active = True

        # Zwrócenie statusu ogłoszenia
        return JsonResponse({"id": listing_id, "is_active": is_active})

    except Exception as e:
        return JsonResponse({"error": str(e)}, status=500)






# views.py
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from .models import DataSetSnapshot
from .serializers import DataSetSnapshotSerializer
from django.db.models import Q
from datetime import timedelta

@api_view(['POST'])
def refresh_stats(request):
    models = {
    }

    snapshots = []
    for name, model in models.items():
        total = model.objects.count()
        title_empty = model.objects.filter(Q(title__isnull=True) | Q(title='')).count()
        title_filled = total - title_empty
        original_empty = model.objects.filter(Q(original_image_urls__isnull=True) | Q(original_image_urls='')).count()
        original_filled = total - original_empty
        images_empty = model.objects.filter(Q(images__isnull=True) | Q(images='')).count()
        images_filled = total - images_empty

        snap = DataSetSnapshot.objects.create(
            model_name=name,
            total=total,
            title_filled=title_filled,
            title_empty=title_empty,
            original_filled=original_filled,
            original_empty=original_empty,
            images_filled=images_filled,
            images_empty=images_empty,
        )
        snapshots.append(snap)

    serializer = DataSetSnapshotSerializer(snapshots, many=True)
    return Response(serializer.data, status=status.HTTP_201_CREATED)


@api_view(['GET'])
def latest_stats(request):
    latest = DataSetSnapshot.objects.all().order_by('model_name', '-created_at')
    result = {}
    for snap in latest:
        if snap.model_name not in result:
            result[snap.model_name] = DataSetSnapshotSerializer(snap).data
    return Response(result.values())


@api_view(['GET'])
def compare_last_two(request):
    from collections import defaultdict

    snapshots = DataSetSnapshot.objects.all().order_by('model_name', '-created_at')
    latest = defaultdict(list)
    for snap in snapshots:
        if len(latest[snap.model_name]) < 2:
            latest[snap.model_name].append(snap)

    result = []
    for model_name, snaps in latest.items():
        if len(snaps) < 2:
            continue
        new, old = snaps[0], snaps[1]
        diff = {
            "model_name": model_name,
            "time_diff": str(new.created_at - old.created_at),
            "title_delta": new.title_filled - old.title_filled,
            "original_delta": new.original_filled - old.original_filled,
            "images_delta": new.images_filled - old.images_filled,
        }
        result.append(diff)
    return Response(result)














# views.py (API for checking photo consistency)
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
import json
from archive.utils import move_to_archive


@api_view(['POST'])
def check_photos_consistency(request):
    models = {
    }

    total_checked = 0
    total_updated = 0
    details = []

    for model_name, model in models.items():
        updated_ads = 0

        for ad in model.objects.all().only("original_image_urls", "images", "haveAllPhotos"):
            original_count = 0
            processed_count = 0

            # original_image_urls count
            if ad.original_image_urls:
                try:
                    original_list = json.loads(ad.original_image_urls) if isinstance(ad.original_image_urls, str) else ad.original_image_urls
                    if isinstance(original_list, list):
                        original_count = len(original_list)
                except json.JSONDecodeError:
                    continue

            # images count
            if ad.images:
                try:
                    images_list = json.loads(ad.images) if isinstance(ad.images, str) else ad.images
                    if isinstance(images_list, list):
                        processed_count = len(images_list)
                except json.JSONDecodeError:
                    continue

            # Compare
            have_all_photos = original_count == processed_count

            if ad.haveAllPhotos != have_all_photos:
                ad.haveAllPhotos = have_all_photos
                ad.save(update_fields=["haveAllPhotos"])
                updated_ads += 1
                
            PhotoConsistencyLog.objects.create(
                model_name=model_name,
                checked=model.objects.count(),
                updated=updated_ads
            )

        total_checked += model.objects.count()
        total_updated += updated_ads
        details.append({"model": model_name, "checked": model.objects.count(), "updated": updated_ads})

    return Response({
        "total_checked": total_checked,
        "total_updated": total_updated,
        "details": details
    }, status=status.HTTP_200_OK)

# urls.py (append to urlpatterns)
# path('stats/check-photos/', views.check_photos_consistency, name='check-photos'),




@api_view(['POST'])
def archive_inactive_listings(request):
    listings = AdsManual.objects.filter(is_active=False)
    count = 0
    for listing in listings:
        move_to_archive(listing.id)
        count += 1

    return Response({
        "archived_count": count,
        "message": f"Zarchiwizowano {count} ogłoszeń."
    }, status=status.HTTP_200_OK)

# urls.py (append to urlpatterns)
# path('stats/check-photos/', views.check_photos_consistency, name='check-photos'),
# path('stats/archive-inactive/', views.archive_inactive_listings, name='archive-inactive'),




@api_view(['GET'])
def image_link_statistics(request):
    models = {
    }

    total_ads = 0
    total_detailed_ads = 0
    total_processed_ads = 0
    total_links_original = 0
    total_links_processed = 0
    details = []

    for model_name, model in models.items():
        ads_count = model.objects.count()
        total_ads += ads_count

        links_original = 0
        links_processed = 0
        detailed_ads_count = 0
        processed_ads_count = 0

        for ad in model.objects.all().only("original_image_urls", "images", "title"):
            has_title = bool(ad.title and ad.title.strip())
            has_original_images = ad.original_image_urls and isinstance(ad.original_image_urls, (list, str))
            has_processed_images = ad.images and isinstance(ad.images, (list, str))

            if has_title and has_original_images:
                detailed_ads_count += 1
            if has_title and has_original_images and has_processed_images:
                processed_ads_count += 1

            if ad.original_image_urls:
                try:
                    original_list = json.loads(ad.original_image_urls) if isinstance(ad.original_image_urls, str) else ad.original_image_urls
                    if isinstance(original_list, list):
                        links_original += len(original_list)
                except json.JSONDecodeError:
                    continue

            if ad.images:
                try:
                    images_list = json.loads(ad.images) if isinstance(ad.images, str) else ad.images
                    if isinstance(images_list, list):
                        links_processed += len(images_list)
                except json.JSONDecodeError:
                    continue

        total_links_original += links_original
        total_links_processed += links_processed
        total_detailed_ads += detailed_ads_count
        total_processed_ads += processed_ads_count

        details.append({
            "model": model_name,
            "ads": ads_count,
            "with_title_and_original": detailed_ads_count,
            "with_full_data": processed_ads_count,
            "links_in_original": links_original,
            "links_in_images": links_processed
        })

    avg_links_original = total_links_original / total_detailed_ads if total_detailed_ads else 0
    avg_links_processed = total_links_processed / total_detailed_ads if total_detailed_ads else 0
    avg_links_original_processed = total_links_original / total_processed_ads if total_processed_ads else 0
    avg_links_processed_processed = total_links_processed / total_processed_ads if total_processed_ads else 0

    return Response({
        "total_ads": total_ads,
        "detailed_ads": total_detailed_ads,
        "fully_processed_ads": total_processed_ads,
        "total_links_original": total_links_original,
        "total_links_processed": total_links_processed,
        "avg_links_original": round(avg_links_original, 2),
        "avg_links_processed": round(avg_links_processed, 2),
        "avg_links_original_filled": round(avg_links_original_processed, 2),
        "avg_links_processed_filled": round(avg_links_processed_processed, 2),
        "details": details
    }, status=status.HTTP_200_OK)
    
    
    
    
    
    
    
    
    
@api_view(['POST'])
def update_source_status(request):

    from archive.utils import move_to_archive
    from django.utils.timezone import now

    action_log = []

    def log(msg):
        print(msg)
        action_log.append(msg)

    # Obsługiwane źródła
    SOURCES = {
        # "gethome": {
        #     "listings": GetHomeListings,
        #     "status": GethomeUpdateStatus,
        #     "spider": "gethome_update_status",
        #     "path": "/var/www/html/gethome/gethome_scraper"
        # },
        # "otodom": {
        #     "listings": ListingOtodom,
        #     "status": OtodomUpdateStatus,
        #     "spider": "otodom_update_status",
        #     "path": "/var/www/html/otodom/otodom_scraper"
        # },
        # "morizon": {
        #     "listings": ListingMorizon,
        #     "status": MorizonUpdateStatus,
        #     "spider": "otodom_update_status",
        #     "path": "/var/www/html/otodom/otodom_scraper"
        # },
        # "nonline": {
        #     "listings": NonlineListings,
        #     "status": NonlineUpdateStatus,
        #     "spider": "otodom_update_status",
        #     "path": "/var/www/html/otodom/otodom_scraper"
        # }

        # Dodaj kolejne jak tylko masz modele
        # "otodom": {...}, "morizon": {...}, ...
    }

    source = request.data.get("source")
    if not source or source not in SOURCES:
        return Response({"error": "Nieprawidłowy lub brakujący 'source'"}, status=400)

    config = SOURCES[source]
    model_listings = config["listings"]
    model_status = config["status"]
    spider = config["spider"]
    spider_path = config["path"]

    def clean():
        count = model_status.objects.count()
        model_status.objects.all().delete()
        log(f"🧹 Usunięto {count} wpisów z {model_status.__name__}")

    def check():
        listings_urls = set(model_listings.objects.values_list("url", flat=True))
        update_status_urls = set(model_status.objects.values_list("url", flat=True))

        new = update_status_urls - listings_urls
        if new:
            log(f"🆕 Dodano {len(new)} nowych ogłoszeń")
            model_listings.objects.bulk_create([
                model_listings(url=u, is_active=True, created_at=now()) for u in new
            ])

        inactive = listings_urls - update_status_urls
        for url in inactive:
            try:
                obj = model_listings.objects.get(url=url)
                move_to_archive(obj.url)
            except model_listings.DoesNotExist:
                log(f"⚠️ Ogłoszenie {url} już nie istnieje")

        log(f"✅ Przeniesiono {len(inactive)} nieaktualnych ogłoszeń do archiwum")

    try:
        log(f"🔁 Uruchamianie pełnej aktualizacji dla: {source}")
        clean()
        log(f"🚀 Uruchamiam scrapy {spider}")
        subprocess.run(["scrapy", "crawl", spider], cwd=spider_path, check=True)
        check()
        clean()
        log("✅ Aktualizacja zakończona!")
    except Exception as e:
        log(f"❌ Błąd: {e}")

    return Response({"source": source, "log": action_log}, status=200)












from collections import defaultdict
import ast
import json
from datetime import datetime
from django.core.exceptions import FieldError
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status


@api_view(['GET'])
def get_unique_listing_values(request):
    # 🔧 Skonfiguruj model i kolumny do pobrania
    tables_and_columns = {
    }

    values_dict = defaultdict(set)

    for model, columns in tables_and_columns.items():
        for column in columns:
            if not column:
                continue
            try:
                unique_values = model.objects.values_list(column, flat=True).distinct()
                for value in unique_values:
                    if isinstance(value, str):
                        cleaned = value.strip()
                        if cleaned.startswith("[") and cleaned.endswith("]"):
                            try:
                                items = ast.literal_eval(cleaned)
                                if isinstance(items, list):
                                    for item in items:
                                        values_dict[column].add(item.strip())
                            except:
                                values_dict[column].add(cleaned)
                        else:
                            values_dict[column].add(cleaned)
                    elif value is not None:
                        values_dict[column].add(str(value))
            except FieldError:
                continue

    # 📁 Zapisz do pliku (opcjonalnie)
    data = {key: sorted(values) for key, values in values_dict.items()}
    filename = f"unique_values_{datetime.today().date()}.json"
    with open(filename, "w", encoding="utf-8") as f:
        json.dump(data, f, indent=4, ensure_ascii=False)

    return Response({
        "status": "success",
        "unique_values": data,
        "saved_file": filename
    }, status=status.HTTP_200_OK)











from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status

@api_view(['POST'])
def merge_listings_to_monitoring(request):
    try:
        # merge_listings_to_network_monitoring()
        return Response({"message": "✅ Ogłoszenia zostały pomyślnie zmergowane."}, status=status.HTTP_200_OK)
    except Exception as e:
        return Response({"error": f"❌ Wystąpił błąd: {str(e)}"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)




@api_view(["GET"])
def scraper_health(request):
    statuses = ScraperStatus.objects.all().values("name", "last_run", "success", "message")
    return Response({"scrapers": list(statuses)})


import subprocess

def get_scraper_status(service_name):
    try:
        output = subprocess.check_output(
            ["systemctl", "is-active", service_name],
            stderr=subprocess.STDOUT
        ).decode().strip()

        return {"status": output}
    except subprocess.CalledProcessError as e:
        return {"status": "unknown", "error": e.output.decode().strip()}


@api_view(["GET"])
def scraper_status(request):
    service = request.GET.get("service", "scraper-gethome.service")
    result = get_scraper_status(service)
    return Response(result)



# monitoring/views.py
import subprocess
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from houslyspace.models import ScraperStatus
from django.utils.timezone import now


def check_systemctl(service_name):
    try:
        status = subprocess.check_output(
            ["systemctl", "is-active", service_name],
            stderr=subprocess.STDOUT
        ).decode().strip()

        return status
    except subprocess.CalledProcessError as e:
        return f"error: {e.output.decode().strip()}"


def get_recent_logs(service_name, limit=10):
    try:
        logs = subprocess.check_output(
            ["journalctl", "-u", service_name, "-n", str(limit), "--no-pager"],
            stderr=subprocess.STDOUT
        ).decode().strip()

        return logs
    except subprocess.CalledProcessError as e:
        return f"error: {e.output.decode().strip()}"


@api_view(["GET"])
def scraper_diagnostics(request):
    service_name = request.GET.get("service", "scraper-gethome.service")
    logical_name = service_name.replace(".service", "").replace("scraper-", "")

    status_systemctl = check_systemctl(service_name)
    logs = get_recent_logs(service_name)

    try:
        scraper = ScraperStatus.objects.get(name=logical_name)
        last_run = scraper.last_run
        success = scraper.success
        message = scraper.message
    except ScraperStatus.DoesNotExist:
        last_run = None
        success = False
        message = "Brak wpisu w bazie"

    return Response({
        "service": service_name,
        "logical_name": logical_name,
        "systemctl_status": status_systemctl,
        "last_run": last_run,
        "success": success,
        "message": message,
        "recent_logs": logs,
        "checked_at": now()
    }, status=status.HTTP_200_OK)










# views.py

from rest_framework.decorators import api_view
from rest_framework.response import Response
from houslyspace.models import ScraperConfig

@api_view(['GET', 'POST'])
def scraper_config_view(request):
    if request.method == 'GET':
        configs = {config.name: config.value for config in ScraperConfig.objects.all()}
        return Response(configs)

    elif request.method == 'POST':
        data = request.data
        for key, value in data.items():
            ScraperConfig.objects.update_or_create(name=key, defaults={"value": str(value)})
        return Response({"status": "updated", "data": data})
