# logger.py (Original Format Data, No Timestamps in Summary Logs)
import os
import threading
import sys
import logging
import time
import re

# --- Constants ---
APP_LOG_FILE = "rentry_downloader.log"  # Main application log

# --- Global Logger Instance ---
app_logger = logging.getLogger("RentryDownloader")
app_logger.propagate = False

# Lock for writing to SUCCESS/FAILURE summary files
_summary_log_lock = threading.Lock()

# --- Debugging Flag ---
_debug_enabled = False


# --- Setup Logging ---
def setup_logging(config):
    """Sets up file logging (overwriting main log) and optional console debug."""
    global _debug_enabled, app_logger
    if app_logger.hasHandlers():
        app_logger.handlers.clear()
    log_level_str = config.get("Misc", "log_level", fallback="INFO").upper()
    log_level = getattr(logging, log_level_str, logging.INFO)
    app_logger.setLevel(log_level)

    try:  # Main app log handler (with timestamp, overwrites)
        file_handler = logging.FileHandler(APP_LOG_FILE, encoding="utf-8", mode="w")
        file_handler.setLevel(log_level)
        formatter = logging.Formatter(
            "%(asctime)s - %(levelname)-8s - %(threadName)-15s - %(message)s"
        )
        file_handler.setFormatter(formatter)
        app_logger.addHandler(file_handler)
    except Exception as e:
        print(
            f"CRITICAL: Failed to set up file logging for {APP_LOG_FILE}: {e}",
            file=sys.stderr,
        )

    _debug_enabled = config.getboolean("Misc", "debug_mode", fallback=False)
    if _debug_enabled:  # Console debug handler (no timestamp)
        if not any(
            isinstance(h, logging.StreamHandler) and h.stream == sys.stderr
            for h in app_logger.handlers
        ):
            stream_handler = logging.StreamHandler(sys.stderr)
            stream_handler.setLevel(logging.DEBUG)
            debug_formatter = logging.Formatter(
                "%(levelname)-8s - %(threadName)-15s - %(message)s"
            )
            stream_handler.setFormatter(debug_formatter)
            app_logger.addHandler(stream_handler)
            app_logger.info("Debug mode enabled. Verbose logging to console and file.")
    else:  # Initial message logic if not debugging to console
        if len(app_logger.handlers) == 1 and isinstance(
            app_logger.handlers[0], logging.FileHandler
        ):
            print(
                f"Logging initialized. Level: {log_level_str}. Details in {APP_LOG_FILE}"
            )
            app_logger.info(
                f"Logging initialized. Level: {log_level_str}. Details in {APP_LOG_FILE}"
            )
        elif not app_logger.hasHandlers():
            print(
                f"Logging initialized. Level: {log_level_str}. Details WILL BE ATTEMPTED in {APP_LOG_FILE}"
            )


# --- Success/Failure Summary Log Functions ---


# <<< NEW FUNCTION: Writes summary logs WITHOUT timestamp >>>
def append_summary_log(filepath, entry):
    """Appends a single entry to a summary log file WITHOUT a timestamp."""
    with _summary_log_lock:
        try:
            log_dir = os.path.dirname(filepath)
            if log_dir and not os.path.exists(log_dir):
                os.makedirs(log_dir)
            # Open in append mode, write entry directly + newline
            with open(filepath, "a", encoding="utf-8") as f:
                f.write(f"{entry}\n")
        except Exception as e:
            app_logger.error(f"Error appending to summary log file '{filepath}': {e}")


def load_log_file(filepath, is_success_log=False):
    """
    Loads lines from a summary log file into a set.
    Assumes logs have NO timestamps.
    Returns set of filenames for success, raw data lines for failure.
    """
    entries = set()
    if not os.path.exists(filepath):
        return entries
    with _summary_log_lock:
        try:
            with open(filepath, "r", encoding="utf-8") as f:
                for line in f:
                    entry = line.strip()
                    if not entry:
                        continue
                    # <<< MODIFIED: No timestamp stripping >>>
                    # Process based on type
                    if is_success_log:
                        # Entry format: "Filename | Size: ..."
                        # Extract just the filename part
                        processed_entry = entry.split(" | Size:")[0].strip()
                    else:
                        # Failure log entry format: "Name: ... | URL: ... | Reason: ..."
                        # Return the full line for failure processing
                        processed_entry = entry

                    if processed_entry:
                        entries.add(processed_entry)
        except Exception as e:
            app_logger.warning(f"Could not read summary log file '{filepath}': {e}")
    return entries


def remove_from_log_file(filepath, identifier, is_success_log=False):
    """
    Removes entries from a summary log file based on an identifier.
    Assumes logs have NO timestamps.
    Success log: identifier is filename. Failure log: identifier is "Name: ... | URL: ..."
    """
    lines_to_keep = []
    entry_found_and_removed = False
    if not os.path.exists(filepath):
        return False

    with _summary_log_lock:
        try:
            with open(filepath, "r", encoding="utf-8") as f:
                all_lines = f.readlines()

            for line in all_lines:
                current_entry_full = line.strip()
                if not current_entry_full:
                    continue

                # <<< MODIFIED: No timestamp stripping >>>
                data_part = current_entry_full

                # Determine the part to compare
                compare_part = ""
                if is_success_log:
                    # Identifier is filename, data_part is "filename | Size: ..."
                    compare_part = data_part.split(" | Size:")[0].strip()
                else:  # Failure log
                    # Identifier is "Name: ... | URL: ..."
                    # Data part is "Name: ... | URL: ... | Reason: ..."
                    compare_part = data_part.split(" | Reason:")[0].strip()

                # Check if this line should be removed
                if compare_part == identifier:
                    entry_found_and_removed = True
                    # app_logger.debug(f"Removing line matching '{identifier}' from {os.path.basename(filepath)}")
                else:
                    lines_to_keep.append(current_entry_full)  # Keep original line

            if entry_found_and_removed:
                with open(filepath, "w", encoding="utf-8") as f:
                    for line_to_keep in lines_to_keep:
                        f.write(f"{line_to_keep}\n")
                return True
        except Exception as e:
            app_logger.error(
                f"Error processing summary log file '{filepath}' for removal: {e}"
            )
            return False
    return entry_found_and_removed


# <<< MODIFIED: Uses original format string but calls append_summary_log >>>
def log_success(dynamic_success_log_path, item_name_or_filename, final_size_str):
    """Logs successful download with format: name | Size: size (NO timestamp)."""
    entry = f"{item_name_or_filename} | Size: {final_size_str}"
    # Calls the new function which DOES NOT add timestamp
    append_summary_log(dynamic_success_log_path, entry)


# <<< MODIFIED: Uses original format string but calls append_summary_log >>>
def log_failure(dynamic_failure_log_path, item_info, reason="Unknown error"):
    """Logs failed download: Name: name | URL: url | Reason: reason (NO timestamp)."""
    entry_data = f"Name: {item_info.get('name', 'N/A')} | URL: {item_info.get('url', 'N/A')} | Reason: {reason}"
    # Identifier based on original name and URL
    check_identifier = (
        f"Name: {item_info.get('name', 'N/A')} | URL: {item_info.get('url', 'N/A')}"
    )

    # Load failure log comparing only the identifier part (Name | URL)
    current_failures_identifiers = set()
    if os.path.exists(dynamic_failure_log_path):
        with _summary_log_lock:
            try:
                with open(dynamic_failure_log_path, "r", encoding="utf-8") as f:
                    for line in f:
                        entry = line.strip()
                        if not entry:
                            continue
                        # <<< MODIFIED: No timestamp stripping needed >>>
                        identifier_part = entry.split(" | Reason:")[0].strip()
                        if identifier_part:
                            current_failures_identifiers.add(identifier_part)
            except Exception as e:
                app_logger.warning(
                    f"Could not read failure log '{dynamic_failure_log_path}' during check: {e}"
                )

    if check_identifier not in current_failures_identifiers:
        # Calls the new function which DOES NOT add timestamp
        append_summary_log(dynamic_failure_log_path, entry_data)
