import json
import os
import datetime
from typing import Dict, Any, Optional, List

from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.db.models import Q

from extractly.models import SourceManual


class Command(BaseCommand):
    help = (
        "Two-way sync for SourceManual configs with the 'Configuration' folder.\n"
        "Usage: manage.py reload_config_from_folder [apply|export] [--name <portal>[,p2]] [--all] [--only selectors,rules,...] [--dry-run]\n"
        "- apply: files -> DB (update database from JSON files)\n"
        "- export: DB -> files (write current DB configs to JSON files)\n"
        "Folder structure: Configuration/<portal>/{selector.json,rules.json,transferred.json,inactive.json,actions.json}."
    )

    def add_arguments(self, parser):
        parser.add_argument(
            "action",
            nargs="?",
            choices=["apply", "export"],
            help="Operation mode: 'apply' updates DB from files, 'export' writes files from DB. If omitted, defaults to apply (or --export if provided).",
        )
        parser.add_argument(
            "--name",
            "-n",
            dest="names",
            help="Comma-separated portal names to update (matches SourceManual.name or linked SourceNetwork.name).",
            type=str,
        )
        parser.add_argument(
            "--all",
            dest="update_all",
            help="Update all portals found under Configuration/",
            action="store_true",
        )
        parser.add_argument(
            "--dry-run",
            dest="dry_run",
            help="Do not write to DB, only print intended changes.",
            action="store_true",
        )
        parser.add_argument(
            "--export",
            dest="export_to_files",
            help="Export current DB configs to Configuration/<portal> JSON files instead of applying from files to DB.",
            action="store_true",
        )
        parser.add_argument(
            "--overwrite",
            dest="overwrite",
            help="When exporting, overwrite existing JSON files (otherwise they will be updated regardless).",
            action="store_true",
        )
        parser.add_argument(
            "--only",
            dest="only",
            help="Limit fields to process (comma-separated). Allowed: selectors,rules,transferred,inactive,actions",
            type=str,
        )

    def handle(self, *args, **options):
        base_dir = getattr(settings, "BASE_DIR", None)
        if not base_dir:
            # Fallback: traverse from this file up to the project root (which holds manage.py)
            base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "..", ".."))

        config_root = os.path.join(base_dir, "Configuration")
        if not os.path.isdir(config_root):
            raise CommandError(f"Configuration folder not found at: {config_root}")

        action: Optional[str] = options.get("action")
        update_all: bool = options.get("update_all")
        dry_run: bool = options.get("dry_run")
        names_arg: Optional[str] = options.get("names")
        export_to_files: bool = options.get("export_to_files")
        overwrite: bool = options.get("overwrite")
        only_arg: Optional[str] = options.get("only")

        # Determine fields subset
        allowed_fields = ["selectors", "rules", "transferred", "inactive", "actions"]
        only_fields = None
        if only_arg:
            only_fields = [f.strip().lower() for f in only_arg.split(",") if f.strip()]
            invalid = [f for f in only_fields if f not in allowed_fields]
            if invalid:
                raise CommandError(f"Invalid --only values: {', '.join(invalid)}. Allowed: {', '.join(allowed_fields)}")

        file_to_field = {
            "selector.json": "selectors",
            "rules.json": "rules",
            "transferred.json": "transferred",
            "inactive.json": "inactive",
            "actions.json": "actions",
        }

        # Normalize action vs legacy --export flag
        if action is None:
            action = "export" if export_to_files else "apply"
        else:
            # If action is explicitly set, prefer it over legacy flag
            pass

        portals: List[str] = []
        if update_all:
            portals = [d for d in os.listdir(config_root) if os.path.isdir(os.path.join(config_root, d))]
            if not portals:
                self.stdout.write(self.style.WARNING("No portal folders found under Configuration/. Nothing to do."))
                return
        else:
            if not names_arg and not export_to_files:
                # For export mode we can export all DB entries without folders existing yet
                raise CommandError("Provide --name <portal>[,portal2] or use --all")
            if names_arg:
                portals = [n.strip() for n in names_arg.split(",") if n.strip()]
            if not update_all and not export_to_files and not portals:
                raise CommandError("No valid portal names parsed from --name argument")

        total_updated = 0

        # If exporting from DB to files
        if action == "export":
            # Build queryset of portals to export
            if update_all:
                qs = SourceManual.objects.all()
            else:
                if not names_arg:
                    raise CommandError("Provide --name <portal>[,portal2] or use --all")
                q = Q()
                for p in portals:
                    q |= Q(name__iexact=p) | Q(source__name__iexact=p)
                qs = SourceManual.objects.filter(q)

            if not qs.exists():
                self.stdout.write(self.style.WARNING("No SourceManual entries found to export."))
                return

            for sm in qs:
                portal_name = (sm.name or (sm.source.name if sm.source else None) or f"portal_{sm.id}")
                portal_dir = os.path.join(config_root, portal_name.lower())
                if dry_run:
                    self.stdout.write(self.style.WARNING(f"Would export configs for '{portal_name}' to {portal_dir}"))
                else:
                    os.makedirs(portal_dir, exist_ok=True)

                files_map = {
                    "selector.json": sm.selectors or {},
                    "rules.json": sm.rules or {},
                    "transferred.json": sm.transferred or {},
                    "inactive.json": sm.inactive or {},
                    "actions.json": sm.actions or [],
                }

                # Apply --only filter for export
                if only_fields is not None:
                    files_map = {fn: data for fn, data in files_map.items() if file_to_field.get(fn) in only_fields}

                for filename, data in files_map.items():
                    path = os.path.join(portal_dir, filename)
                    if dry_run:
                        self.stdout.write(self.style.SUCCESS(f"Would write {filename} for '{portal_name}'"))
                        continue
                    try:
                        with open(path, "w", encoding="utf-8") as fh:
                            json.dump(data, fh, ensure_ascii=False, indent=2)
                    except Exception as e:
                        self.stdout.write(self.style.ERROR(f"Failed to write {path}: {e}"))

            self.stdout.write(self.style.SUCCESS("Export complete."))
            return

        # Default path: apply files to DB
        for portal in portals:
            portal_dir = os.path.join(config_root, portal)
            if not os.path.isdir(portal_dir):
                self.stdout.write(self.style.WARNING(f"Portal folder missing: {portal_dir}. Skipping."))
                continue

            payloads: Dict[str, Any] = {}
            # Supported files mapping => model field
            mapping = {
                "selector.json": "selectors",
                "selectors.json": "selectors",  # allow both names
                "rules.json": "rules",
                "transferred.json": "transferred",
                "inactive.json": "inactive",
                "actions.json": "actions",
            }

            for filename, field in mapping.items():
                if only_fields is not None and field not in only_fields:
                    continue
                path = os.path.join(portal_dir, filename)
                if os.path.isfile(path):
                    try:
                        with open(path, "r", encoding="utf-8") as fh:
                            payloads[field] = json.load(fh)
                    except json.JSONDecodeError as e:
                        self.stdout.write(self.style.ERROR(f"JSON parse error in {path}: {e}. Skipping this file."))
                    except Exception as e:
                        self.stdout.write(self.style.ERROR(f"Error reading {path}: {e}. Skipping this file."))

            if not payloads:
                self.stdout.write(self.style.WARNING(f"No known config files found for portal '{portal}'. Skipping."))
                continue

            # Find matching SourceManual entries
            qs = SourceManual.objects.all()
            qs = qs.filter(Q(name__iexact=portal) | Q(source__name__iexact=portal))
            count = qs.count()
            if count == 0:
                self.stdout.write(self.style.WARNING(f"No SourceManual found for portal name '{portal}'. Skipping."))
                continue

            for sm in qs:
                # Show intended changes
                changes = []
                for field, new_val in payloads.items():
                    old_val = getattr(sm, field)
                    if old_val != new_val:
                        changes.append(field)

                if not changes:
                    self.stdout.write(self.style.SUCCESS(
                        f"No changes for {sm.name} (portal '{portal}')."))
                    continue

                self.stdout.write(self.style.WARNING(
                    f"Updating {sm.name} (portal '{portal}') fields: {', '.join(changes)}"))

                if dry_run:
                    continue

                # Ensure backup dir and create a backup only when writing
                backup_dir = os.path.join(config_root, "_backups")
                os.makedirs(backup_dir, exist_ok=True)
                timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
                try:
                    backup_payload = {
                        "id": getattr(sm, "id", None),
                        "name": sm.name,
                        "source_name": sm.source.name if sm.source else None,
                        "selectors": sm.selectors or {},
                        "rules": sm.rules or {},
                        "transferred": sm.transferred or {},
                        "inactive": sm.inactive or {},
                        "actions": sm.actions or [],
                    }
                    backup_path = os.path.join(backup_dir, f"{portal}_{sm.id}_{timestamp}.json")
                    with open(backup_path, "w", encoding="utf-8") as bf:
                        json.dump(backup_payload, bf, ensure_ascii=False, indent=2)
                except Exception as e:
                    self.stdout.write(self.style.WARNING(f"Backup failed for {sm}: {e}"))

                # Apply changes
                for field in changes:
                    setattr(sm, field, payloads[field])

                sm.save(update_fields=list(set(changes)))
                total_updated += 1

        if dry_run:
            self.stdout.write(self.style.SUCCESS(f"Dry run complete. Portals processed: {len(portals)}. Records with changes: {total_updated}"))
        else:
            self.stdout.write(self.style.SUCCESS(f"Update complete. Portals processed: {len(portals)}. Records updated: {total_updated}"))
