import os
import logging
from datetime import datetime, timedelta
from urllib.parse import unquote
import json
from collections import defaultdict
import smbclient
import shutil
import re
import argparse

# List of IP addresses we care about
WHITELIST_IPS = [
    "199.111.212.5",
    "175.159.122.63",
    "109.245.193.97",
    "158.195.18.232",
    "2607:fea8:4f40:4b00:e5b9:9806:6b69:233b",
    "66.254.231.49",
    "129.74.154.194", 
    "175.196.44.217",
    "2601:600:8d00:9510:1d77:b610:9358:f443",
    "74.90.222.68",
    "2a02:169:3e9:0:6ce8:e76f:faed:c830",
    "70.50.179.57",
    "2a02:842a:24:5a01:8cd6:5b22:1189:6035",
    "2408:8418:6390:7603:40b:555f:774:a05d"
]

logging.basicConfig(level=logging.WARNING)
log = logging.getLogger(__name__)

def get_ip_from_jsonl(file_path):
    """Extract IP from the first line of a JSONL file"""
    try:
        with smbclient.open_file(file_path, mode='r') as f:
            first_line = f.readline()
            data = json.loads(first_line)
            return data.get('ip')
    except Exception as e:
        log.error(f"Error reading file {file_path}: {e}")
        return None

def get_chat_session_id(file_path):
    """Extract chat_session_id based on the file location:
    - For files under conv_logs: extract from filename
    - For files under sandbox_logs: read from file content
    """
    try:
        if 'conv_logs' in file_path:
            # Extract from filename for conv_logs
            # Handle Windows UNC path format
            filename = file_path.split('\\')[-1]  # Get the last component of the path
            match = re.match(r'conv-log-([a-f0-9]+)\.json', filename)
            if match:
                return match.group(1)
        elif 'sandbox_logs' in file_path:
            # Read from file content for sandbox_logs
            with smbclient.open_file(file_path, mode='r') as f:
                data = json.loads(f.read())
                return data['sandbox_state'].get('chat_session_id')
        return None
    except Exception as e:
        log.error(f"Error getting chat_session_id from {file_path}: {e}")
        return None

def get_sandbox_session_ids(server, share, date_str):
    """Get all chat_session_ids from sandbox logs for a given date"""
    sandbox_folder = f"\\\\{server}\\{share}\\{date_str}\\sandbox_logs"
    session_ids = set()
    
    if not smbclient.path.exists(sandbox_folder):
        return session_ids
    
    try:
        for file_info in smbclient.scandir(sandbox_folder):
            if file_info.name.endswith('.json'):
                file_path = f"{sandbox_folder}\\{file_info.name}"
                session_id = get_chat_session_id(file_path)
                if session_id:
                    session_ids.add(session_id)
    except Exception as e:
        log.error(f"Error scanning sandbox folder {sandbox_folder}: {e}")
    
    return session_ids

def check_vote_conditions(file_path):
    """Check if the last line of the file has type:vote and feedback dict with 6 keys"""
    try:
        with smbclient.open_file(file_path, mode='r') as f:
            # Read all lines and get the last non-empty line
            lines = [line.strip() for line in f if line.strip()]
            if not lines:
                return False
            last_line = lines[-1]
            try:
                data = json.loads(last_line)
                feedback = data.get('feedback')
                return (data.get('type') == 'vote' and 
                       isinstance(feedback, dict) and 
                       len(feedback) == 6)
            except json.JSONDecodeError:
                return False
    except Exception as e:
        log.error(f"Error checking vote conditions in file {file_path}: {e}")
        return False

def get_file_data(file_path):
    """Read file and return IP and vote condition status"""
    try:
        with smbclient.open_file(file_path, mode='r') as f:
            lines = [line.strip() for line in f if line.strip()]
            if not lines:
                return None, False
            
            # Get IP from first line
            try:
                first_line_data = json.loads(lines[0])
                ip = first_line_data.get('ip')
                # Early return if IP is not in whitelist
                if ip not in WHITELIST_IPS:
                    return None, False
            except json.JSONDecodeError:
                ip = None
            
            # Check vote conditions from last line
            try:
                last_line_data = json.loads(lines[-1])
                feedback = last_line_data.get('feedback')
                vote_conditions_met = (last_line_data.get('type') == 'vote' and 
                                    isinstance(feedback, dict) and 
                                    len(feedback) == 6)
            except json.JSONDecodeError:
                vote_conditions_met = False
                
            return ip, vote_conditions_met
    except Exception as e:
        log.error(f"Error reading file {file_path}: {e}")
        return None, False

def count_files_per_ip(smb_url, start_date_str="2025_02_18"):
    """Count files per IP address from the given start date"""
    # Remove 'smb://' prefix and parse URL components
    url = smb_url[6:]
    creds_server, share = url.split('/', 1)
    creds, server = creds_server.rsplit('@', 1)
    username, password = creds.split(':', 1)
    password = unquote(password)
    
    # Register the SMB session
    smbclient.register_session(server, username=username, password=password, port=8080)
    
    # Convert start date string to datetime
    start_date = datetime.strptime(start_date_str, "%Y_%m_%d")
    ip_counts = defaultdict(int)
    
    try:
        # Get current date for iteration
        current_date = start_date
        today = datetime.now()
        
        while current_date <= today:
            date_str = current_date.strftime("%Y_%m_%d")
            folder_path = f"\\\\{server}\\{share}\\{date_str}\\conv_logs\\battle_anony"
            
            try:
                # List all JSON files in the battle_anony folder
                if smbclient.path.exists(folder_path):
                    for file_info in smbclient.scandir(folder_path, search_pattern="conv-log-*.json"):
                        file_path = f"{folder_path}\\{file_info.name}"
                        ip, vote_conditions_met = get_file_data(file_path)
                        if vote_conditions_met and ip:
                            ip_counts[ip] += 1
            except Exception as e:
                log.error(f"Error processing folder {date_str}: {e}")
            
            # Move to next day
            current_date += timedelta(days=1)
                
    except Exception as e:
        log.error(f"Error accessing SMB share: {e}")
    
    return dict(ip_counts)

def download_files_by_ip(smb_url, start_date_str="2025_02_18", check_sandbox=True):
    """Download files and organize them by IP address
    
    Args:
        smb_url (str): The SMB URL to connect to
        start_date_str (str): The start date in YYYY_MM_DD format
        check_sandbox (bool): Whether to check for matching sandbox logs
    """
    # Remove 'smb://' prefix and parse URL components
    url = smb_url[6:]
    creds_server, share = url.split('/', 1)
    creds, server = creds_server.rsplit('@', 1)
    username, password = creds.split(':', 1)
    password = unquote(password)
    
    # Register the SMB session
    smbclient.register_session(server, username=username, password=password)
    
    # Create base data directory
    data_dir = os.path.join(os.getcwd(), "data")
    os.makedirs(data_dir, exist_ok=True)
    
    # Convert start date string to datetime
    start_date = datetime.strptime(start_date_str, "%Y_%m_%d")
    
    try:
        # Get current date for iteration
        current_date = start_date
        today = datetime.now()
        
        while current_date <= today:
            date_str = current_date.strftime("%Y_%m_%d")
            folder_path = f"\\\\{server}\\{share}\\{date_str}\\conv_logs\\battle_anony"
            
            # Get all sandbox session IDs for this date
            sandbox_session_ids = get_sandbox_session_ids(server, share, date_str) if check_sandbox else set()
            try:
                # List all JSON files in the battle_anony folder
                if smbclient.path.exists(folder_path):
                    for file_info in smbclient.scandir(folder_path):
                        # Skip macOS metadata files
                        if file_info.name.startswith('._'):
                            continue
                        if file_info.name.endswith('.json'):
                            file_path = f"{folder_path}\\{file_info.name}"
                            ip = get_ip_from_jsonl(file_path)
                            if ip:
                                # Create directory structure for this IP
                                ip_dir = os.path.join(data_dir, ip)
                                valid_dir = os.path.join(ip_dir, "valid")
                                invalid_dir = os.path.join(ip_dir, "invalid")
                                os.makedirs(valid_dir, exist_ok=True)
                                os.makedirs(invalid_dir, exist_ok=True)
                                
                                # Check if chat_session_id exists in sandbox logs
                                if check_sandbox:
                                    chat_session_id = get_chat_session_id(file_path)
                                    has_sandbox = chat_session_id in sandbox_session_ids if chat_session_id else False
                                    target_dir = valid_dir if has_sandbox else invalid_dir
                                else:
                                    # When sandbox checking is disabled, put everything in valid
                                    target_dir = valid_dir
                                
                                # Download the file
                                local_file_path = os.path.join(target_dir, file_info.name)
                                try:
                                    with smbclient.open_file(file_path, mode='rb') as remote_file:
                                        with open(local_file_path, 'wb') as local_file:
                                            shutil.copyfileobj(remote_file, local_file)
                                    log.info(f"Downloaded {file_info.name} to {target_dir}")
                                except Exception as e:
                                    log.error(f"Error downloading file {file_info.name}: {e}")
            
            except Exception as e:
                log.error(f"Error processing folder {date_str}: {e}")
            
            # Move to next day
            current_date += timedelta(days=1)
                
    except Exception as e:
        log.error(f"Error accessing SMB share: {e}")

def main():
    smb_url = os.getenv("SMB_URL")
    
    # Add argument parser for optional parameters
    parser = argparse.ArgumentParser(description='Download and organize conversation files by IP')
    parser.add_argument('--sandbox-check', action='store_true', help='Check for matching sandbox logs')
    parser.add_argument('--download', action='store_true', help='Enable file download')
    args = parser.parse_args()
    
    # Download files if enabled
    if args.download:
        print("\nDownloading files and organizing by IP address...")
        download_files_by_ip(smb_url, check_sandbox=args.sandbox_check)
    
    # Count and display statistics
    ip_counts = count_files_per_ip(smb_url)
    print("\nFile counts per IP address:")
    for ip, count in sorted(ip_counts.items(), key=lambda x: x[1], reverse=True):
        print(f"IP: {ip:<15} Count: {count}")

if __name__ == "__main__":
    main()