|
import os |
|
import logging |
|
from datetime import datetime, timedelta |
|
import json |
|
from collections import defaultdict |
|
import shutil |
|
import re |
|
import argparse |
|
from typing import Dict, Set, Tuple, Optional, List, Union |
|
from log_reader import RemoteLogReader |
|
|
|
|
|
WHITELIST_IPS_DICT = { |
|
"Chen Gong": ["128.143.67.19"], |
|
"Juyong Jiang": ["175.159.122.63"], |
|
"Kenneth Hamilton": ["109.245.193.97"], |
|
"Marek Suppa": ["158.195.18.232"], |
|
"Max Tian": ["2607:fea8:4f40:4b00:e5b9:9806:6b69:233b", "2607:fea8:4f40:4b00:bcef:571:6124:f01", "2607:fea8:7c9d:3800:d9c0:7295:3e2e:6287", "2607:fea8:7c9d:3800:fd51:2c91:c9e2:3c8"], |
|
"Mengzhao Jia": ["66.254.231.49", "160.32.74.89"], |
|
"Noah Ziems": ["2601:245:c500:92c0:633c:c0d2:dcc1:1f48", "2601:245:c500:92c0:961e:9ac7:e02:c266", "2601:245:c500:92c0:ae74:d1d5:ca3b:da90"], |
|
"Sabina A": ["175.196.44.217", "58.235.174.122", "14.52.175.55"], |
|
"Wenhao Yu": ["2601:600:8d00:9510:1d77:b610:9358:f443", "2601:600:8d00:9510:513f:6c4e:5960:fdc7", "174.164.6.99"], |
|
"Vaisakhi Mishra": ["74.90.222.68"], |
|
"Kumar Shridhar": ["129.132.145.250"], |
|
"Viktor Gal": ["2a02:169:3e9:0:6ce8:e76f:faed:c830"], |
|
"Guangyu Song": ["70.50.179.57", "209.226.139.83"], |
|
"Bhupesh Bishnoi": ["37.65.177.22", "195.220.58.237", "194.57.114.147", "195.220.58.234"], |
|
"Zheng Liu": ["128.143.71.67"], |
|
"Ming Xu": ["2601:600:8d00:9510:185b:955d:275b:7685", "2601:600:8d00:9510:5150:468c:ab7d:518d"], |
|
"Ayush Sunil Munot": ["10.145.76.56"], |
|
"Saiteja Utpala": ["192.168.31.185"] |
|
} |
|
|
|
|
|
WHITELIST_USERNAMES_DICT = { |
|
"Chen Gong": ["Chen Gong"], |
|
"Juyong Jiang": ["juyongjiang"], |
|
"Kenneth Hamilton": [], |
|
"Marek Suppa": [], |
|
"Max Tian": [], |
|
"Mengzhao Jia": ["Mengzhao Jia"], |
|
"Noah Ziems": [], |
|
"Sabina A": [], |
|
"Wenhao Yu": [], |
|
"Vaisakhi Mishra": [], |
|
"Kumar Shridhar": [], |
|
"Viktor Gal": [], |
|
"Guangyu Song": [], |
|
"Bhupesh Bishnoi": ["BB"], |
|
"Zheng Liu": ["ZL"], |
|
"Ming Xu": [], |
|
"Ayush Sunil Munot": [], |
|
"Terry Yue Zhuo": ["test"], |
|
"Saiteja Utpala": ["saitejautpala"] |
|
} |
|
|
|
|
|
WHITELIST_IPS = [ip for ips in WHITELIST_IPS_DICT.values() for ip in ips] |
|
|
|
|
|
WHITELIST_USERNAMES = [username for usernames in WHITELIST_USERNAMES_DICT.values() for username in usernames] |
|
|
|
EXAMPLES = [ |
|
["使用SVG绘制春节主题的动态图案,包括:1)一个红色的灯笼,带有金色的流苏 2)一个金色的福字,使用书法字体 3)背景添加一些烟花效果 4)在灯笼和福字周围添加一些祥云图案。确保图案布局美观,颜色搭配符合春节传统风格。"], |
|
["SVGを使用して日本の伝統的な和柄パターンを描画してください。1)波紋(さざなみ)模様 2)市松模様 3)麻の葉模様 4)雷文(らいもん)模様を含めてください。色は伝統的な日本の色(藍色、朱色、金色など)を使用し、レイアウトはバランスよく配置してください。"], |
|
["Write HTML with P5.js that simulates 25 particles in a vacuum space of a cylindrical container, bouncing within its boundaries. Use different colors for each ball and ensure they leave a trail showing their movement. Add a slow rotation of the container to give better view of what's going on in the scene. Make sure to create proper collision detection and physic rules to ensure particles remain in the container. Add an external spherical container. Add a slow zoom in and zoom out effect to the whole scene."], |
|
["Write a Python script to scrape NVIDIA's stock price for the past month using the yfinance library. Clean the data and create an interactive visualization using Matplotlib. Include: 1) A candlestick chart showing daily price movements 2) A line chart with 7-day and 30-day moving averages. Add hover tooltips showing exact values and date. Make the layout professional with proper titles and axis labels."], |
|
["Write a Python script that uses the Gradio library to create a functional calculator. The calculator should support basic arithmetic operations: addition, subtraction, multiplication, and division. It should have two input fields for numbers and a dropdown menu to select the operation."], |
|
["Write a Todo list app using React.js. The app should allow users to add, delete, and mark tasks as completed. Include features like filtering tasks by status (completed, active), sorting tasks by priority, and displaying the total number of tasks."], |
|
["Write a Python script using the Streamlit library to create a web application for uploading and displaying files. The app should allow users to upload files of type .csv or .txt. If a .csv file is uploaded, display its contents as a table using Streamlit's st.dataframe() method. If a .txt file is uploaded, display its content as plain text."], |
|
["Write a Python function to solve the Trapping Rain Water problem. The function should take a list of non-negative integers representing the height of bars in a histogram and return the total amount of water trapped between the bars after raining. Use an efficient algorithm with a time complexity of O(n)."], |
|
["Create a simple Pygame script for a game where the player controls a bouncing ball that changes direction when it collides with the edges of the window. Add functionality for the player to control a paddle using arrow keys, aiming to keep the ball from touching the bottom of the screen. Include basic collision detection and a scoring system that increases as the ball bounces off the paddle. You need to add clickable buttons to start the game, and reset the game."], |
|
["Create a financial management Dashboard using Vue.js, focusing on local data handling without APIs. Include features like a clean dashboard for tracking income and expenses, dynamic charts for visualizing finances, and a budget planner. Implement functionalities for adding, editing, and deleting transactions, as well as filtering by date or category. Ensure responsive design and smooth user interaction for an intuitive experience."], |
|
["Create a Mermaid diagram to visualize a flowchart of a user login process. Include the following steps: User enters login credentials; Credentials are validated; If valid, the user is directed to the dashboard; If invalid, an error message is shown, and the user can retry or reset the password."], |
|
["Write a Python function to calculate the Fibonacci sequence up to n numbers. Then write test cases to verify the function works correctly for edge cases like negative numbers, zero, and large inputs."], |
|
["Build an HTML page for a Kanban board with three columns with Vue.js: To Do, In Progress, and Done. Each column should allow adding, moving, and deleting tasks. Implement drag-and-drop functionality using Vue Draggable and persist the state using Vuex."], |
|
["Develop a Streamlit app that takes a CSV file as input and provides: 1) Basic statistics about the data 2) Interactive visualizations using Plotly 3) A data cleaning interface with options to handle missing values 4) An option to download the cleaned data."], |
|
["Write an HTML page with embedded JavaScript that creates an interactive periodic table. Each element should display its properties on hover and allow filtering by category (metals, non-metals, etc.). Include a search bar to find elements by name or symbol."], |
|
["Here's a Python function that sorts a list of dictionaries by a specified key:\n\n```python\ndef sort_dicts(data, key):\n return sorted(data, key=lambda x: x[key])\n```\n\nWrite test cases to verify the function works correctly for edge cases like empty lists, missing keys, and different data types. If you use unittest, please use `unittest.main(argv=['first-arg-is-ignored'], exit=False)` to run the tests."], |
|
["Create a React component for a fitness tracker that shows: 1) Daily step count 2) Calories burned 3) Distance walked 4) A progress bar for daily goals."], |
|
["Build a Vue.js dashboard for monitoring server health. Include: 1) Real-time CPU and memory usage graphs 2) Disk space visualization 3) Network activity monitor 4) Alerts for critical thresholds."], |
|
["Write a C program that calculates and prints the first 100 prime numbers in a formatted table with 10 numbers per row. Include a function to check if a number is prime and use it in your solution."], |
|
["Write a C++ program that implements a simple calculator using object-oriented programming. Create a Calculator class with methods for addition, subtraction, multiplication, and division. Include error handling for division by zero."], |
|
["Write a Rust program that generates and prints a Pascal's Triangle with 10 rows. Format the output to center-align the numbers in each row."], |
|
["Write a Java program that simulates a simple bank account system. Create a BankAccount class with methods for deposit, withdrawal, and balance inquiry. Include error handling for insufficient funds and demonstrate its usage with a few transactions."], |
|
["Write a Go program that calculates and prints the Fibonacci sequence up to the 50th number. Format the output in a table with 5 numbers per row and include the index of each Fibonacci number."], |
|
["Write a C program that calculates and prints a histogram of letter frequencies from a predefined string. Use ASCII art to display the histogram vertically."], |
|
["Write a C++ program that implements a simple stack data structure with push, pop, and peek operations. Demonstrate its usage by reversing a predefined string using the stack."], |
|
["Write a Rust program that calculates and prints the first 20 happy numbers. Include a function to check if a number is happy and use it in your solution."], |
|
["Write a Java program that implements a simple binary search algorithm. Create a sorted array of integers and demonstrate searching for different values, including cases where the value is found and not found."], |
|
["Write a Go program that generates and prints a multiplication table from 1 to 12. Format the output in a neat grid with proper alignment."], |
|
] |
|
|
|
EXAMPLES = [e[0] for e in EXAMPLES] |
|
|
|
logging.basicConfig(level=logging.WARNING) |
|
log = logging.getLogger(__name__) |
|
|
|
def get_ip_from_jsonl(content: str) -> Optional[str]: |
|
"""Extract IP from the first line of a JSONL content""" |
|
try: |
|
first_line = content.split('\n')[0] |
|
data = json.loads(first_line) |
|
return data.get('ip') |
|
except Exception as e: |
|
log.error(f"Error extracting IP from content: {e}") |
|
return None |
|
|
|
def get_username_from_jsonl(content: str) -> Optional[str]: |
|
"""Extract username from the last line of a JSONL content if it's a vote""" |
|
try: |
|
lines = [line.strip() for line in content.split('\n') if line.strip()] |
|
if not lines: |
|
return None |
|
|
|
last_line = json.loads(lines[-1]) |
|
if last_line.get('type') == 'vote': |
|
return last_line.get('username') |
|
return None |
|
except Exception as e: |
|
log.error(f"Error extracting username from content: {e}") |
|
return None |
|
|
|
def get_chat_session_id(file_name: str, content: str = None) -> Optional[str]: |
|
"""Extract chat_session_id based on the file location: |
|
- For files under conv_logs: extract from filename |
|
- For files under sandbox_logs: read from file content |
|
""" |
|
try: |
|
if 'conv_logs' in file_name: |
|
|
|
match = re.match(r'conv-log-([a-f0-9]+)\.json', file_name) |
|
if match: |
|
return match.group(1) |
|
elif 'sandbox_logs' in file_name and content: |
|
|
|
data = json.loads(content) |
|
return data['sandbox_state'].get('chat_session_id') |
|
return None |
|
except Exception as e: |
|
log.error(f"Error getting chat_session_id from {file_name}: {e}") |
|
return None |
|
|
|
def get_sandbox_session_ids(reader: 'RemoteLogReader', date_str: str) -> Set[str]: |
|
"""Get all chat_session_ids from sandbox logs for a given date""" |
|
session_ids = set() |
|
try: |
|
sandbox_logs = reader.get_sandbox_logs(date_str) |
|
for log in sandbox_logs: |
|
if isinstance(log, dict): |
|
session_id = log.get('sandbox_state', {}).get('chat_session_id') |
|
if session_id: |
|
session_ids.add(session_id) |
|
except Exception as e: |
|
log.error(f"Error getting sandbox session IDs for date {date_str}: {e}") |
|
|
|
return session_ids |
|
|
|
def get_file_data(content: str) -> Tuple[Optional[str], Optional[str], bool]: |
|
"""Read file content and return IP, username, and vote condition status""" |
|
try: |
|
lines = [line.strip() for line in content.split('\n') if line.strip()] |
|
user_prompt = lines[0]["messages"][0][1] |
|
if not lines: |
|
return None, None, False, user_prompt |
|
|
|
|
|
try: |
|
first_line_data = json.loads(lines[0]) |
|
ip = first_line_data.get('ip') |
|
except json.JSONDecodeError: |
|
ip = None |
|
|
|
|
|
ip_in_whitelist = ip in WHITELIST_IPS |
|
|
|
|
|
username = None |
|
vote_conditions_met = False |
|
vote_line_index = -1 |
|
|
|
|
|
for i, line in enumerate(lines): |
|
try: |
|
line_data = json.loads(line) |
|
if line_data.get('type') == 'vote': |
|
vote_line_index = i |
|
break |
|
except json.JSONDecodeError: |
|
continue |
|
|
|
|
|
if vote_line_index >= 0: |
|
try: |
|
vote_line_data = json.loads(lines[vote_line_index]) |
|
|
|
|
|
if 'username' in vote_line_data: |
|
username = vote_line_data.get('username') |
|
|
|
feedback = vote_line_data.get('feedback') |
|
|
|
|
|
|
|
relevant_lines = lines[:vote_line_index + 1] |
|
vote_conditions_met = ( |
|
isinstance(feedback, dict) and |
|
len([1 for v in feedback.values() if v]) == 6 and |
|
len(relevant_lines) >= 4 |
|
) |
|
except (json.JSONDecodeError, TypeError): |
|
pass |
|
|
|
|
|
username_in_whitelist = username in WHITELIST_USERNAMES if username else False |
|
|
|
|
|
if not (ip_in_whitelist or username_in_whitelist): |
|
return ip, username, False, user_prompt |
|
|
|
return ip, username, vote_conditions_met |
|
except Exception as e: |
|
log.error(f"Error processing file content: {e}") |
|
return None, None, False, user_prompt |
|
|
|
def get_annotator_name(ip: Optional[str], username: Optional[str]) -> Optional[str]: |
|
"""Get annotator name from IP or username""" |
|
|
|
if ip: |
|
for name, ips in WHITELIST_IPS_DICT.items(): |
|
if ip in ips: |
|
return name |
|
|
|
|
|
if username: |
|
for name, usernames in WHITELIST_USERNAMES_DICT.items(): |
|
if username in usernames: |
|
return name |
|
|
|
return None |
|
|
|
def count_files_per_annotator(reader: 'RemoteLogReader', start_date_str: str = "2025_02_18") -> Dict[str, int]: |
|
"""Count files per annotator name from the given start date, considering both IP and username""" |
|
|
|
start_date = datetime.strptime(start_date_str, "%Y_%m_%d") |
|
name_prompts = defaultdict(set) |
|
try: |
|
|
|
current_date = start_date |
|
today = datetime.now() |
|
|
|
while current_date <= today: |
|
date_str = current_date.strftime("%Y_%m_%d") |
|
|
|
try: |
|
|
|
conv_logs = reader.get_conv_logs(date_str) |
|
battle_anony_logs = conv_logs.get('battle_anony', {}) |
|
|
|
for conv_id, messages in battle_anony_logs.items(): |
|
if messages: |
|
|
|
content = '\n'.join(json.dumps(msg) for msg in messages) |
|
ip, username, vote_conditions_met, user_prompt = get_file_data(content) |
|
|
|
if vote_conditions_met: |
|
|
|
annotator_name = get_annotator_name(ip, username) |
|
if annotator_name and user_prompt not in EXAMPLES: |
|
name_prompts[annotator_name].add(user_prompt.lower()) |
|
|
|
except Exception as e: |
|
log.error(f"Error processing logs for date {date_str}: {e}") |
|
|
|
|
|
current_date += timedelta(days=1) |
|
|
|
except Exception as e: |
|
log.error(f"Error accessing logs: {e}") |
|
|
|
return {name: len(prompts) for name, prompts in name_prompts.items()} |
|
|
|
def download_files_by_name(reader: 'RemoteLogReader', start_date_str: str = "2025_02_18", check_sandbox: bool = True) -> None: |
|
"""Download files and organize them by annotator name |
|
|
|
Args: |
|
reader: RemoteLogReader instance |
|
start_date_str: The start date in YYYY_MM_DD format |
|
check_sandbox: Whether to check for matching sandbox logs |
|
""" |
|
|
|
data_dir = os.path.join(os.getcwd(), "data") |
|
os.makedirs(data_dir, exist_ok=True) |
|
|
|
|
|
start_date = datetime.strptime(start_date_str, "%Y_%m_%d") |
|
|
|
try: |
|
|
|
current_date = start_date |
|
today = datetime.now() |
|
|
|
while current_date <= today: |
|
date_str = current_date.strftime("%Y_%m_%d") |
|
|
|
|
|
sandbox_session_ids = get_sandbox_session_ids(reader, date_str) if check_sandbox else set() |
|
|
|
try: |
|
|
|
conv_logs = reader.get_conv_logs(date_str) |
|
battle_anony_logs = conv_logs.get('battle_anony', {}) |
|
|
|
|
|
for conv_id, messages in battle_anony_logs.items(): |
|
if not messages: |
|
continue |
|
|
|
|
|
content = '\n'.join(json.dumps(msg) for msg in messages) |
|
ip = get_ip_from_jsonl(content) |
|
username = get_username_from_jsonl(content) |
|
|
|
|
|
annotator_name = get_annotator_name(ip, username) |
|
|
|
if annotator_name: |
|
|
|
name_dir = os.path.join(data_dir, annotator_name) |
|
valid_dir = os.path.join(name_dir, "valid") |
|
invalid_dir = os.path.join(name_dir, "invalid") |
|
os.makedirs(valid_dir, exist_ok=True) |
|
os.makedirs(invalid_dir, exist_ok=True) |
|
|
|
|
|
if check_sandbox: |
|
has_sandbox = conv_id in sandbox_session_ids |
|
target_dir = valid_dir if has_sandbox else invalid_dir |
|
else: |
|
|
|
target_dir = valid_dir |
|
|
|
|
|
file_name = f"conv-log-{conv_id}.json" |
|
local_file_path = os.path.join(target_dir, file_name) |
|
try: |
|
with open(local_file_path, 'w') as f: |
|
f.write(content) |
|
log.info(f"Saved {file_name} to {target_dir}") |
|
except Exception as e: |
|
log.error(f"Error saving file {file_name}: {e}") |
|
|
|
except Exception as e: |
|
log.error(f"Error processing logs for date {date_str}: {e}") |
|
|
|
|
|
current_date += timedelta(days=1) |
|
|
|
except Exception as e: |
|
log.error(f"Error accessing logs: {e}") |
|
|
|
def main(): |
|
|
|
reader = RemoteLogReader() |
|
|
|
|
|
parser = argparse.ArgumentParser(description='Download and organize conversation files by annotator name') |
|
parser.add_argument('--sandbox-check', action='store_true', help='Check for matching sandbox logs') |
|
parser.add_argument('--download', action='store_true', help='Enable file download') |
|
args = parser.parse_args() |
|
|
|
|
|
if args.download: |
|
print("\nDownloading files and organizing by annotator name...") |
|
download_files_by_name(reader, check_sandbox=args.sandbox_check) |
|
|
|
|
|
name_counts = count_files_per_annotator(reader) |
|
print("\nFile counts per annotator:") |
|
for name, count in sorted(name_counts.items(), key=lambda x: x[1], reverse=True): |
|
print(f"Name: {name:<20} Count: {count}") |
|
|
|
if __name__ == "__main__": |
|
main() |