improve structure; move stuff to relevant folders
This commit is contained in:
183
scripts/osrs/autoclicker/click_server.py
Executable file
183
scripts/osrs/autoclicker/click_server.py
Executable file
@@ -0,0 +1,183 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Simple web server for autoclicker visualization
|
||||
Serves the HTML visualizer and provides API endpoints for CSV log files
|
||||
Converts CSV to JSON for the frontend
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import csv
|
||||
import http.server
|
||||
import socketserver
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
from datetime import datetime
|
||||
|
||||
PORT = 8661
|
||||
LOG_DIR = "/tmp/autoclicker_logs"
|
||||
|
||||
class ClickServerHandler(http.server.SimpleHTTPRequestHandler):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, directory=".", **kwargs)
|
||||
|
||||
def do_GET(self):
|
||||
# Handle API requests
|
||||
if self.path.startswith('/api/'):
|
||||
self.handle_api_request()
|
||||
return
|
||||
|
||||
# Serve the visualizer HTML for root path
|
||||
if self.path == '/' or self.path == '/index.html':
|
||||
self.path = '/click-visualizer.html'
|
||||
|
||||
# Serve static files normally
|
||||
return super().do_GET()
|
||||
|
||||
def handle_api_request(self):
|
||||
"""Handle API endpoints for log file operations"""
|
||||
try:
|
||||
if self.path.startswith('/api/logs'):
|
||||
self.handle_logs_request()
|
||||
elif self.path.startswith('/api/log/'):
|
||||
self.handle_log_file_request()
|
||||
else:
|
||||
self.send_error(404, "API endpoint not found")
|
||||
except Exception as e:
|
||||
self.send_error(500, f"Server error: {str(e)}")
|
||||
|
||||
def handle_logs_request(self):
|
||||
"""Return list of available log files"""
|
||||
if not os.path.exists(LOG_DIR):
|
||||
self.send_json_response({"error": "Log directory not found", "logs": []})
|
||||
return
|
||||
|
||||
try:
|
||||
files = []
|
||||
for filename in sorted(os.listdir(LOG_DIR), reverse=True):
|
||||
if filename.endswith('.csv'):
|
||||
filepath = os.path.join(LOG_DIR, filename)
|
||||
stat = os.stat(filepath)
|
||||
|
||||
# Check if CSV is valid
|
||||
if self.is_valid_csv(filepath):
|
||||
files.append({
|
||||
"name": filename,
|
||||
"size": self.format_file_size(stat.st_size),
|
||||
"date": datetime.fromtimestamp(stat.st_mtime).strftime('%Y-%m-%d %H:%M:%S'),
|
||||
"path": filename,
|
||||
"valid": True
|
||||
})
|
||||
else:
|
||||
files.append({
|
||||
"name": filename,
|
||||
"size": self.format_file_size(stat.st_size),
|
||||
"date": datetime.fromtimestamp(stat.st_mtime).strftime('%Y-%m-%d %H:%M:%S'),
|
||||
"path": filename,
|
||||
"valid": False,
|
||||
"error": "Invalid CSV format"
|
||||
})
|
||||
|
||||
self.send_json_response({"logs": files})
|
||||
except Exception as e:
|
||||
self.send_json_response({"error": str(e), "logs": []})
|
||||
|
||||
def handle_log_file_request(self):
|
||||
"""Return contents of a specific log file as JSON"""
|
||||
# Extract filename from path
|
||||
parts = self.path.split('/')
|
||||
if len(parts) < 4:
|
||||
self.send_error(400, "Invalid log file request")
|
||||
return
|
||||
|
||||
filename = parts[3]
|
||||
filepath = os.path.join(LOG_DIR, filename)
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
self.send_error(404, "Log file not found")
|
||||
return
|
||||
|
||||
try:
|
||||
if filename.endswith('.csv'):
|
||||
data = self.csv_to_json(filepath)
|
||||
if data is None:
|
||||
self.send_json_response({
|
||||
"error": "Failed to parse CSV file",
|
||||
"filename": filename,
|
||||
"valid": False
|
||||
})
|
||||
return
|
||||
|
||||
self.send_json_response({
|
||||
"data": data,
|
||||
"filename": filename,
|
||||
"valid": True
|
||||
})
|
||||
else:
|
||||
self.send_error(400, "Unsupported file format")
|
||||
except Exception as e:
|
||||
self.send_error(500, f"Error reading log file: {str(e)}")
|
||||
|
||||
def csv_to_json(self, filepath):
|
||||
"""Convert CSV file to JSON array"""
|
||||
try:
|
||||
data = []
|
||||
with open(filepath, 'r') as f:
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
data.append({
|
||||
"click": int(row["click_number"]),
|
||||
"delay_ms": int(row["delay_ms"]),
|
||||
"timestamp": int(row["timestamp"])
|
||||
})
|
||||
return data
|
||||
except Exception as e:
|
||||
print(f"Error parsing CSV {filepath}: {str(e)}")
|
||||
return None
|
||||
|
||||
def is_valid_csv(self, filepath):
|
||||
"""Check if a file contains valid CSV"""
|
||||
try:
|
||||
with open(filepath, 'r') as f:
|
||||
reader = csv.DictReader(f)
|
||||
# Try to read first row
|
||||
next(reader)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def send_json_response(self, data):
|
||||
"""Send JSON response with proper headers"""
|
||||
response = json.dumps(data, indent=2)
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.send_header('Content-Length', str(len(response)))
|
||||
self.end_headers()
|
||||
self.wfile.write(response.encode())
|
||||
|
||||
def format_file_size(self, size_bytes):
|
||||
"""Format file size in human-readable format"""
|
||||
for unit in ['B', 'KB', 'MB', 'GB']:
|
||||
if size_bytes < 1024.0:
|
||||
return f"{size_bytes:.1f} {unit}"
|
||||
size_bytes /= 1024.0
|
||||
return f"{size_bytes:.1f} TB"
|
||||
|
||||
def run_server():
|
||||
"""Start the web server"""
|
||||
print(f"Starting autoclicker visualization server on port {PORT}")
|
||||
print(f"Log directory: {LOG_DIR}")
|
||||
print("Open your browser to: http://localhost:8661")
|
||||
print("Press Ctrl+C to stop the server")
|
||||
|
||||
try:
|
||||
with socketserver.TCPServer(("", PORT), ClickServerHandler) as httpd:
|
||||
httpd.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
print("\nServer stopped")
|
||||
except Exception as e:
|
||||
print(f"Server error: {str(e)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_server()
|
||||
Reference in New Issue
Block a user