import time
import threading
import json
import os
import pickle
import schedule
from datetime import datetime
from franai.models.attendance_model import AttendanceLog
from franai.models.registered_user_model import RegisteredUser
from franai.models.staff_model import Staff

STATE_FILE = "scheduler_state.json"
USER_DATA_FILE = "user_data.json"
EMBEDDINGS_DIR = "insightface_embeddings"
INTERVAL_SECONDS = 300.0  # 5 minutes

class TimerScheduler:
    def __init__(self, interval, task):
        self._timer = None
        self.interval = interval
        self.task = task
        self.is_running = False

    def _run(self):
        self.is_running = False
        self.start()
        self.task()

    def start(self):
        if not self.is_running:
            self._timer = threading.Timer(self.interval, self._run)
            self._timer.daemon = True
            self._timer.start()
            self.is_running = True

def get_last_sync_time():
    """Reads the last sync timestamp from the state file."""
    if not os.path.exists(STATE_FILE):
        return None
    try:
        with open(STATE_FILE, 'r') as f:
            data = json.load(f)
            return datetime.fromisoformat(data['last_sync_timestamp'])
    except (IOError, json.JSONDecodeError, KeyError):
        return None

def set_last_sync_time(sync_time):
    """Writes the last sync timestamp to the state file."""
    try:
        with open(STATE_FILE, 'w') as f:
            json.dump({'last_sync_timestamp': sync_time.isoformat()}, f)
    except IOError as e:
        print(f"❌ Error writing to state file: {e}")

def user_sync_task():
    """Fetches all users from the database and updates user_data.json."""
    print("Scheduler: Waking up to run user sync task...")
    try:
        users_from_db = RegisteredUser.get_all_with_hris_info()
        if users_from_db is None:
            print("Scheduler: Could not fetch users from DB. Skipping user sync.")
            return

        new_user_data = {}
        for user in users_from_db:
            pid = str(user['pid'])
            new_user_data[pid] = {
                "name": user.get('name', 'N/A'),
                "email": user.get('email', 'N/A'),
                "team": user.get('team_name', 'N/A'),
                "status": "active" 
            }

        # Check if the user data has actually changed before writing to the file
        existing_user_data = {}
        if os.path.exists(USER_DATA_FILE):
            try:
                with open(USER_DATA_FILE, 'r') as f:
                    existing_user_data = json.load(f)
            except (IOError, json.JSONDecodeError):
                print(f"Scheduler: Could not read existing {USER_DATA_FILE}. Overwriting.")

        if new_user_data != existing_user_data:
            try:
                with open(USER_DATA_FILE, 'w') as f:
                    json.dump(new_user_data, f, indent=4)
                print(f"🔄 User Sync: Successfully updated {USER_DATA_FILE} with {len(new_user_data)} users.")
            except IOError as e:
                print(f"❌ Error writing to {USER_DATA_FILE}: {e}")
        else:
            print("Scheduler: No changes in user data. Sync skipped.")

    except Exception as e:
        print(f"❌ An error occurred in the user sync task: {e}")


def incremental_sync_task():
    """
    The task to be run by the scheduler. It performs an incremental sync
    based on the last sync time.
    """
    print("Scheduler: Waking up to run incremental sync task...")
    try:
        last_sync_time = get_last_sync_time()
        
        new_latest_timestamp = AttendanceLog.sync_from_csv_incremental(last_sync_time=last_sync_time)

        if new_latest_timestamp:
            set_last_sync_time(new_latest_timestamp)
            print(f"Scheduler: State updated. Last sync time is now {new_latest_timestamp.isoformat()}")

    except Exception as e:
        print(f"❌ An error occurred in the incremental sync task: {e}")

def update_pkl_files():
    """
    Periodically updates name and department in .pkl files with the latest info
    from the HRIS database.
    """
    print("🔄 Starting scheduled update of .pkl files...")
    
    try:
        # 1. Get all hired employees from the database
        all_hired_staff = Staff.get_all_hired_staff_for_update()
        if not all_hired_staff:
            print("⚠️ No hired staff found in the database. Aborting update.")
            return

        # Create a dictionary for quick lookups by pid
        staff_dict = {str(staff['pid']): staff for staff in all_hired_staff}
        
        # 2. Loop through all .pkl files
        if not os.path.exists(EMBEDDINGS_DIR):
            print(f"⚠️ Embeddings directory '{EMBEDDINGS_DIR}' not found. Aborting.")
            return

        embedding_files = [f for f in os.listdir(EMBEDDINGS_DIR) if f.endswith('.pkl')]
        
        updated_files = 0
        for filename in embedding_files:
            file_path = os.path.join(EMBEDDINGS_DIR, filename)
            try:
                with open(file_path, 'rb') as f:
                    data = pickle.load(f)
                
                user_id = str(data.get('user_id'))
                current_name = data.get('name')
                current_department = data.get('department')

                # 3. Match user_id with the list of hired employees
                if user_id in staff_dict:
                    employee = staff_dict[user_id]
                    new_name = f"{employee['first_name']} {employee['last_name']}"
                    new_department = employee.get('team_name', '') # team_name is the department

                    # 4. Check if an update is needed
                    if new_name != current_name or new_department != current_department:
                        print(f"🔄 Updating user {user_id}: Name '{current_name}' -> '{new_name}', Dept '{current_department}' -> '{new_department}'")
                        data['name'] = new_name
                        data['department'] = new_department
                        
                        # 5. Update the .pkl file
                        with open(file_path, 'wb') as f:
                            pickle.dump(data, f)
                        updated_files += 1
                else:
                    print(f"⚠️ User ID {user_id} from '{filename}' not found in the list of hired employees.")

            except Exception as e:
                print(f"❌ Error processing file {filename}: {e}")

        print(f"✅ Scheduled update finished. Updated {updated_files} files.")

    except Exception as e:
        print(f"❌ An unexpected error occurred during the scheduled update: {e}")

def combined_sync_task():
    """Runs both the user sync and attendance sync tasks."""
    user_sync_task()
    incremental_sync_task()

def run_scheduler():
    """Runs the scheduler in a background thread."""
    # Schedule for tasks that run every 5 minutes
    # schedule.every(1).minutes.do(combined_sync_task)
    
    while True:
        schedule.run_pending()
        time.sleep(1)

def start_scheduler():
    """
    Initializes and starts the background scheduler using threading.Timer.
    """
    # print("🚀 Starting background scheduler...")
    # Run the tasks once immediately at startup
    # combined_sync_task()
    
    # Start the recurring scheduler in a separate thread
    scheduler_thread = threading.Thread(target=run_scheduler)
    scheduler_thread.daemon = True
    scheduler_thread.start()
    # print("✅ Background scheduler thread has been started.")
