# main_downloader.py (Pass shutdown_requested to downloaders)
import os
import time
import sys
from urllib.parse import urlparse
import concurrent.futures
import threading
import traceback
import shutil
import signal
import re
import requests

# Import project modules
import config_manager
import logger
import parser
import selenium_setup
import downloader
import selenium_handlers
import console_ui

# --- Constants ---
SELENIUM_HOSTNAMES = {"mega.nz", "mediafire.com", "pixeldrain.com"}

# --- Global State ---
status_lock = threading.Lock()
total_items_overall = 0
completed_items_overall = 0
failed_items_overall = 0
total_downloaded_size_bytes = 0
# <<< shutdown_requested is defined globally here >>>
shutdown_requested = threading.Event()
selenium_drivers_pool = []
driver_lock = threading.Lock()

# --- Global UI instance ---
ui: console_ui.ConsoleUI | None = None


# --- Signal Handler ---
def signal_handler(signum, frame):
    # Access global shutdown_requested
    global shutdown_requested
    if not shutdown_requested.is_set():
        logger.app_logger.warning(
            "! Shutdown requested (Signal received). Finishing current tasks..."
        )
        print("\n! Shutdown requested. Finishing current tasks, please wait...")
        shutdown_requested.set()


signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)


# --- Helper Functions ---
def needs_selenium(url):
    try:
        parsed = urlparse(url)
        hostname = parsed.hostname.lower() if parsed.hostname else ""
        if not hostname:
            return False
        if hostname == "pixeldrain.com" and (
            parsed.path.lower().startswith("/u/")
            or parsed.path.lower().startswith("/l/")
        ):
            return True
        for selenium_host in SELENIUM_HOSTNAMES:
            if hostname == selenium_host or hostname.endswith(f".{selenium_host}"):
                return True
        return False
    except Exception as e:
        logger.app_logger.error(f"Error checking URL {url} for Selenium need: {e}")
        return False


def sanitize_hostname(hostname):
    if not hostname:
        return "unknown_host"
    safe_hostname = re.sub(r"[^a-zA-Z0-9._-]", "_", hostname).strip(" .")
    safe_hostname = re.sub(r"\.+", ".", safe_hostname)
    if safe_hostname in (".", ".."):
        safe_hostname = "_" + safe_hostname + "_"
    if len(safe_hostname) > 100:
        safe_hostname = safe_hostname[:100]
    return safe_hostname


# --- Worker Function ---
def process_item(
    item_data,
    category_dir,
    config,
    dynamic_success_log,
    dynamic_failure_log,
    shared_driver_pool,
    shared_driver_lock,
):
    """
    Processes item. Checks downloader status for duplicates. Passes shutdown event.
    """
    # Access globals needed
    global completed_items_overall, failed_items_overall, total_downloaded_size_bytes, ui, shutdown_requested

    original_name, url, desired_filename_base = item_data
    item_info = {
        "name": original_name,
        "url": url,
        "base_filename": desired_filename_base,
    }
    thread_name = threading.current_thread().name

    logger.app_logger.info(f"[{thread_name}] Processing: {original_name} ({url})")

    success = False
    final_path_or_status = None
    downloaded_size = 0
    status_reason = "Download failed"
    driver_tuple_used = None

    try:
        # Check shutdown at the very beginning
        if shutdown_requested.is_set():
            status_reason = "Skipped due to shutdown request"
            logger.app_logger.warning(
                f"[{thread_name}] {status_reason}: {original_name}"
            )
            return False, 0

        use_sel = needs_selenium(url)

        # Acquire driver if needed (includes shutdown check)
        if use_sel:
            acquired = False
            wait_start_time = time.time()
            while not acquired and not shutdown_requested.is_set():
                with shared_driver_lock:
                    if shared_driver_pool:
                        driver_tuple_used = shared_driver_pool.pop(0)
                        acquired = True
                        logger.app_logger.debug(
                            f"[{thread_name}] Acquired Selenium driver {driver_tuple_used[1]}"
                        )
                if not acquired:
                    if time.time() - wait_start_time > 60:
                        logger.app_logger.debug(
                            f"[{thread_name}] Waiting for Selenium driver for {original_name}..."
                        )
                        wait_start_time = time.time()
                    time.sleep(0.5)
            if not acquired:
                status_reason = "Skipped: No Selenium driver acquired (shutdown likely)"
                logger.app_logger.warning(f"[{thread_name}] {status_reason}")
                return False, 0

        # --- Perform Download ---
        if use_sel:
            driver, selenium_dl_dir_actual = driver_tuple_used
            logger.app_logger.debug(
                f"[{thread_name}] Using Selenium for {original_name} (Worker Temp Dir: {selenium_dl_dir_actual})"
            )
            hostname = urlparse(url).hostname.lower()
            handler_func = None
            if "pixeldrain" in hostname:
                handler_func = selenium_handlers.handle_pixeldrain
            elif "mega.nz" in hostname:
                handler_func = selenium_handlers.handle_mega
            elif "mediafire" in hostname:
                handler_func = selenium_handlers.handle_mediafire

            if handler_func:
                # <<< Pass shutdown_requested event >>>
                success, final_path_or_status, downloaded_size, reason = (
                    downloader.download_with_selenium(
                        driver,
                        url,
                        category_dir,
                        desired_filename_base,
                        handler_func,
                        config,
                        selenium_dl_dir_actual,
                        None,
                        item_info,
                        shutdown_event=shutdown_requested,  # Pass event object
                    )
                )
                status_reason = reason
            else:
                status_reason = f"No specific Selenium handler found for {hostname}"
                logger.app_logger.error(f"[{thread_name}] {status_reason}")
                success = False
                final_path_or_status = None
                downloaded_size = 0
        else:  # Direct Download
            logger.app_logger.debug(
                f"[{thread_name}] Using direct download for {original_name}"
            )
            # <<< Pass shutdown_requested event >>>
            success, final_path_or_status, downloaded_size, reason = (
                downloader.download_direct_file(
                    url,
                    category_dir,
                    desired_filename_base,
                    config,
                    None,
                    shutdown_event=shutdown_requested,  # Pass event object
                )
            )
            status_reason = reason

        # --- Update Overall Status and Log Final Outcome ---
        with status_lock:
            is_duplicate_skipped = (
                success and final_path_or_status == "DUPLICATE_SKIPPED"
            )
            is_truly_successful = (
                success
                and isinstance(final_path_or_status, str)
                and os.path.exists(final_path_or_status)
            )

            if is_truly_successful:
                completed_items_overall += 1
                if isinstance(downloaded_size, (int, float)) and downloaded_size > 0:
                    total_downloaded_size_bytes += downloaded_size
                else:
                    downloaded_size = 0
                final_sanitized_filename = os.path.basename(final_path_or_status)
                size_str = downloader._format_size(downloaded_size)
                logger.log_success(
                    dynamic_success_log, final_sanitized_filename, size_str
                )
                log_entry_failure_check = (
                    f"Name: {item_info.get('name', 'N/A')} | URL: {url}"
                )
                logger.remove_from_log_file(
                    dynamic_failure_log, log_entry_failure_check, is_success_log=False
                )
                logger.app_logger.info(
                    f"[{thread_name}] FINAL STATUS: SUCCESS - {original_name} -> {final_sanitized_filename} ({size_str})"
                )

            elif is_duplicate_skipped:
                completed_items_overall += 1
                logger.app_logger.info(
                    f"[{thread_name}] FINAL STATUS: SKIPPED (Duplicate) - {original_name} - Reason: {status_reason}"
                )
                log_entry_failure_check = (
                    f"Name: {item_info.get('name', 'N/A')} | URL: {url}"
                )
                logger.remove_from_log_file(
                    dynamic_failure_log, log_entry_failure_check, is_success_log=False
                )

            else:  # Download failed (and wasn't skipped at start)
                if status_reason != "Skipped due to shutdown request":
                    failed_items_overall += 1
                    logger.log_failure(
                        dynamic_failure_log, item_info, reason=status_reason
                    )
                    logger.app_logger.error(
                        f"[{thread_name}] FINAL STATUS: FAILURE - {original_name} - Reason: {status_reason}"
                    )

            if ui:
                ui.update_progress(completed_items_overall, failed_items_overall)

        return is_truly_successful, downloaded_size

    except Exception as e:
        status_reason = f"Unhandled worker error: {e}"
        logger.app_logger.error(f"[{thread_name}] {status_reason}", exc_info=True)
        logger.log_failure(dynamic_failure_log, item_info, reason=status_reason)
        with status_lock:
            failed_items_overall += 1
            if ui:
                ui.update_progress(completed_items_overall, failed_items_overall)
        return False, 0
    finally:
        if driver_tuple_used:
            with shared_driver_lock:
                shared_driver_pool.append(driver_tuple_used)
                logger.app_logger.debug(
                    f"[{thread_name}] Released Selenium driver {driver_tuple_used[1]}"
                )


# --- Main Execution ---
# (Keep the main function exactly as it was in the previous response
# - it already handles the correct log parsing and queue building)
def main():
    global ui, total_items_overall, completed_items_overall, failed_items_overall, total_downloaded_size_bytes, selenium_drivers_pool, driver_lock, shutdown_requested

    start_time_main = time.time()
    config = config_manager.load_config()
    logger.setup_logging(config)
    logger.app_logger.info("=" * 20 + " Application Start " + "=" * 20)

    # --- URL Input ---
    while True:
        rentry_url = input(
            "Enter the Rentry URL to process (e.g., https://rentry.org/xxxx): "
        ).strip()
        parsed_uri = urlparse(rentry_url)
        if (
            parsed_uri.scheme == "https"
            and parsed_uri.netloc in ["rentry.org", "rentry.co"]
            and parsed_uri.path
            and len(parsed_uri.path) > 1
        ):
            url_path_part = parsed_uri.path.lstrip("/")
            url_base_name = sanitize_hostname(url_path_part)
            if not url_base_name:
                url_base_name = "rentry_download"
            output_dir_base = url_base_name
            dynamic_success_log = f"{url_base_name}_successful.log"
            dynamic_failure_log = f"{url_base_name}_failed.log"
            logger.app_logger.info(f"Using Base Output Directory: '{output_dir_base}'")
            logger.app_logger.info(f"Using Success Log: '{dynamic_success_log}'")
            logger.app_logger.info(f"Using Failure Log: '{dynamic_failure_log}'")
            break
        else:
            print("Invalid URL. Please enter a valid Rentry.org or Rentry.co URL.")

    # --- Load Logs ---
    logger.app_logger.info(f"Loading previous logs...")
    initial_successful_filenames_only = logger.load_log_file(
        dynamic_success_log, is_success_log=True
    )
    initial_failed_log_entries_full = logger.load_log_file(
        dynamic_failure_log, is_success_log=False
    )
    initial_failed_items_map = {}  # URL -> Name
    for line in initial_failed_log_entries_full:
        match = re.match(r"^Name: (.*?) \| URL: (.*?)( \| Reason:.*)?$", line)
        if match:
            name, url = match.group(1).strip(), match.group(2).strip()
            if url not in initial_failed_items_map:
                initial_failed_items_map[url] = name
    logger.app_logger.info(
        f"Found {len(initial_successful_filenames_only)} previously successful files."
    )
    logger.app_logger.info(
        f"Found {len(initial_failed_items_map)} previously failed items to retry."
    )
    print(
        f"INFO: Loaded {len(initial_successful_filenames_only)} successful and {len(initial_failed_items_map)} failed previous items to determine skips/retries."
    )

    # --- Fetch and Parse ---
    headers = {"User-Agent": config.get("Misc", "user_agent")}
    html_content = parser.fetch_rentry_html(rentry_url, headers)
    if not html_content:
        sys.exit("Failed to fetch Rentry page content.")
    categorized_items_from_html = parser.parse_rentry_items(html_content, rentry_url)
    if categorized_items_from_html is None:
        sys.exit("Failed to parse items from Rentry page.")
    if not categorized_items_from_html:
        logger.app_logger.warning("Parser returned empty dictionary.")

    # --- Build Processing Queue (Prioritizing Direct, Using correct skip logic) ---
    items_to_retry_direct, items_to_retry_selenium = [], []
    items_to_process_new_direct, items_to_process_new_selenium = [], []
    skipped_count, processed_urls = 0, set()
    failed_identifiers_to_remove_from_log = set()

    logger.app_logger.info("Building processing queue with Direct DL priority...")
    all_html_items_map = {}
    for cat, items in categorized_items_from_html.items():
        for item_tuple in items:
            all_html_items_map[item_tuple[1]] = (item_tuple, cat)
    # Process Failed Items (Retries)
    for failed_url, logged_name in initial_failed_items_map.items():
        processed_urls.add(failed_url)
        if failed_url in all_html_items_map:
            item_tuple, category = all_html_items_map[failed_url]
            name_to_use = item_tuple[0] if item_tuple[0] else logged_name
            base_filename_to_use = parser.sanitize_filename(name_to_use)
            final_item_tuple = (name_to_use, failed_url, base_filename_to_use)
            if needs_selenium(failed_url):
                items_to_retry_selenium.append((final_item_tuple, category))
            else:
                items_to_retry_direct.append((final_item_tuple, category))
        else:
            identifier = (
                f"Name: {logged_name} | URL: {failed_url}"  # Original format identifier
            )
            failed_identifiers_to_remove_from_log.add(identifier)
    if failed_identifiers_to_remove_from_log:
        logger.app_logger.info(
            f"Updating '{dynamic_failure_log}' to remove {len(failed_identifiers_to_remove_from_log)} outdated entries..."
        )
        for identifier in list(failed_identifiers_to_remove_from_log):
            removed = logger.remove_from_log_file(
                dynamic_failure_log, identifier, is_success_log=False
            )
    logger.app_logger.info(f"Items to retry (Direct): {len(items_to_retry_direct)}")
    logger.app_logger.info(f"Items to retry (Selenium): {len(items_to_retry_selenium)}")
    # Process New Items
    for category, items in categorized_items_from_html.items():
        for item_tuple in items:  # (item_name, url, desired_filename_base)
            url = item_tuple[1]
            sanitized_base_name = item_tuple[2]
            if url in processed_urls:
                continue
            processed_urls.add(url)
            # Use Regex Check for successful items (handles suffixes)
            pattern_str = rf"^{re.escape(sanitized_base_name)}(?:_\d+)?(\..+)?$"
            try:
                success_pattern = re.compile(pattern_str, re.IGNORECASE)
                is_successful = any(
                    success_pattern.match(s_file)
                    for s_file in initial_successful_filenames_only
                )
            except re.error as re_err:
                logger.app_logger.warning(
                    f"Regex error checking success for '{sanitized_base_name}': {re_err}. Skipping check."
                )
                is_successful = False

            if is_successful:
                skipped_count += 1
                # logger.app_logger.debug(f"Skipping '{item_tuple[0]}' (found matching pattern '{pattern_str}' in success log).")
            else:
                if needs_selenium(url):
                    items_to_process_new_selenium.append((item_tuple, category))
                else:
                    items_to_process_new_direct.append((item_tuple, category))
    logger.app_logger.info(
        f"New items to process (Direct): {len(items_to_process_new_direct)}"
    )
    logger.app_logger.info(
        f"New items to process (Selenium): {len(items_to_process_new_selenium)}"
    )
    logger.app_logger.info(
        f"Skipped {skipped_count} items potentially already in success log."
    )
    # Combine lists prioritizing direct
    processing_queue = (
        items_to_retry_direct
        + items_to_process_new_direct
        + items_to_retry_selenium
        + items_to_process_new_selenium
    )

    total_items_overall = len(processing_queue)
    if not processing_queue:
        logger.app_logger.info("No items to download.")
        print("\nNo items.")
        sys.exit(0)
    logger.app_logger.info(f"Total items to attempt this run: {total_items_overall}")

    # --- Determine Max Workers & Setup Selenium ---
    # (Same as before)
    max_selenium_workers_config = config.getint(
        "Parallel", "max_selenium_workers", fallback=1
    )
    max_direct_downloads = config.getint("Parallel", "max_direct_downloads", fallback=5)
    selenium_needed_in_queue = any(
        needs_selenium(item[0][1]) for item in processing_queue
    )
    selenium_drivers_pool.clear()
    active_selenium_workers = 0
    if selenium_needed_in_queue and max_selenium_workers_config > 0:
        logger.app_logger.info(
            f"Setting up {max_selenium_workers_config} Selenium workers..."
        )
        selenium_temp_base = os.path.join(output_dir_base, "selenium_temp")
        for i in range(max_selenium_workers_config):
            if shutdown_requested.is_set():
                break
            driver, download_dir = selenium_setup.setup_selenium_driver(
                config, selenium_temp_base, worker_id=i, ui=None
            )
            if driver and download_dir:
                selenium_drivers_pool.append((driver, download_dir))
                active_selenium_workers += 1
            else:
                logger.app_logger.error(f"Failed to set up Selenium driver {i+1}.")
        if not selenium_drivers_pool:
            logger.app_logger.critical("No Selenium drivers setup.")
            active_selenium_workers = 0
    elif selenium_needed_in_queue:
        logger.app_logger.warning("Selenium needed but max_selenium_workers <= 0.")
        active_selenium_workers = 0
    max_workers = max_direct_downloads + active_selenium_workers
    logger.app_logger.info(
        f"Using up to {max_workers} parallel workers (Direct: {max_direct_downloads}, Selenium: {active_selenium_workers})."
    )

    # --- Initialize Simple UI ---
    # (Same as before)
    if sys.stdout.isatty() or sys.stderr.isatty():
        ui = console_ui.ConsoleUI(total_items_overall)
    else:
        logger.app_logger.warning("Not a TTY. Rich UI disabled.")
        ui = None

    # --- Start UI and Execute Tasks ---
    # (Same as before)
    if ui:
        ui.start()
    with concurrent.futures.ThreadPoolExecutor(
        max_workers=max_workers, thread_name_prefix="Worker"
    ) as executor:
        futures = []
        for item_tuple, category in processing_queue:
            if shutdown_requested.is_set():
                break
            sanitized_category = parser.sanitize_filename(category) or "Uncategorized"
            category_dir = os.path.join(output_dir_base, sanitized_category)
            if not os.path.exists(category_dir):
                try:
                    os.makedirs(category_dir)
                    logger.app_logger.info(f"Created dir: {category_dir}")
                except OSError as e:
                    logger.app_logger.error(
                        f"Cannot create dir {category_dir}: {e}. Skipping {item_tuple[0]}."
                    )
                    logger.log_failure(
                        dynamic_failure_log,
                        {
                            "name": item_tuple[0],
                            "url": item_tuple[1],
                            "base_filename": item_tuple[2],
                        },
                        reason=f"Failed to create dir: {e}",
                    )
                    with status_lock:
                        failed_items_overall += 1
                    if ui:
                        ui.update_progress(
                            completed_items_overall, failed_items_overall
                        )
                    continue

            future = executor.submit(
                process_item,
                item_tuple,
                category_dir,
                config,
                dynamic_success_log,
                dynamic_failure_log,
                selenium_drivers_pool,
                driver_lock,
            )
            futures.append(future)

        logger.app_logger.info(f"Submitted {len(futures)} tasks to executor.")
        try:
            for future in concurrent.futures.as_completed(futures):
                try:
                    future.result()
                except concurrent.futures.CancelledError:
                    logger.app_logger.warning(f"A task was cancelled.")
                except Exception as exc:
                    logger.app_logger.error(
                        f"Task generated unhandled exception: {exc}", exc_info=True
                    )
                if shutdown_requested.is_set():
                    logger.app_logger.warning(
                        "Shutdown requested. Cancelling remaining tasks."
                    )
                    for f in futures:
                        if not f.done():
                            f.cancel()
                    break
        except KeyboardInterrupt:
            logger.app_logger.warning("Keyboard interrupt. Initiating shutdown.")
            shutdown_requested.set()
            for f in futures:
                f.cancel()

    # --- Cleanup ---
    # (Same as before)
    logger.app_logger.info("Performing final Selenium driver cleanup...")
    drivers_to_quit = list(selenium_drivers_pool)
    selenium_drivers_pool.clear()
    for driver, download_dir in drivers_to_quit:
        try:
            logger.app_logger.debug(f"Quitting driver for temp dir: {download_dir}")
            driver.quit()
            if os.path.exists(download_dir):
                try:
                    shutil.rmtree(download_dir)
                    logger.app_logger.debug(f"Cleaned up temp dir: {download_dir}")
                except OSError as e:
                    logger.app_logger.warning(
                        f"Could not remove temp dir {download_dir}: {e}"
                    )
        except Exception as e:
            logger.app_logger.error(
                f"Error during driver cleanup for {download_dir}: {e}"
            )
    selenium_temp_base = os.path.join(output_dir_base, "selenium_temp")
    if os.path.exists(selenium_temp_base):
        try:
            if not os.listdir(selenium_temp_base):
                os.rmdir(selenium_temp_base)
                logger.app_logger.info(f"Removed empty base Selenium temp dir")
        except OSError as e:
            logger.app_logger.warning(
                f"Could not remove base Selenium temp dir {selenium_temp_base}: {e}"
            )

    if ui:
        ui.stop()

    # --- Final Summary ---
    # (Same as before)
    end_time_main = time.time()
    duration_main = end_time_main - start_time_main
    final_size_str = downloader._format_size(total_downloaded_size_bytes)
    final_successful_count = len(
        logger.load_log_file(dynamic_success_log, is_success_log=True)
    )
    final_failed_count = len(logger.load_log_file(dynamic_failure_log))

    summary_header = "--- Download Summary ---"
    summary_attempted = f"Attempted items (this run): {completed_items_overall + failed_items_overall}/{total_items_overall}"
    summary_completed = f"Completed items (this run): {completed_items_overall}"
    summary_failed = f"Failed items (this run):     {failed_items_overall}"
    summary_size = f"Total downloaded size (this run): {final_size_str}"
    summary_time = f"Total execution time: {duration_main:.2f} seconds"
    summary_log_success = (
        f"Total successful (in log '{dynamic_success_log}'): {final_successful_count}"
    )
    summary_log_failed = f"Total currently unsuccessful (in log '{dynamic_failure_log}'): {final_failed_count}"

    log_summary = [
        "=" * 20 + " Run Finished " + "=" * 20,
        summary_header,
        summary_attempted,
        summary_completed,
        summary_failed,
        summary_size,
        summary_time,
        summary_log_success,
        summary_log_failed,
    ]
    if shutdown_requested.is_set():
        log_summary.append("Run terminated early due to shutdown request.")
    for line in log_summary:
        logger.app_logger.info(line)

    print(f"\n{summary_header}")
    print(summary_attempted)
    print(summary_completed)
    print(summary_failed)
    print(summary_size)
    print(summary_time)
    print("-" * 40)
    print(summary_log_success)
    print(summary_log_failed)
    print("=" * 40)
    print(f"Check '{output_dir_base}' folder for downloads.")
    print(f"Check '{dynamic_success_log}' and '{dynamic_failure_log}' for details.")
    print(f"Check '{logger.APP_LOG_FILE}' for detailed logs.")
    if shutdown_requested.is_set():
        print("\nRun terminated early due to shutdown request.")

    sys.exit(0 if failed_items_overall == 0 else 1)


if __name__ == "__main__":
    main()
