Release 0.9.0
This commit is contained in:
345
bot/resources/services/analyze_callback_usage.py
Executable file
345
bot/resources/services/analyze_callback_usage.py
Executable file
@@ -0,0 +1,345 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Callback Dict Usage Analyzer
|
||||
|
||||
Scans the codebase for all callback_dict usage:
|
||||
- dom.data.upsert() calls
|
||||
- dom.data.append() calls
|
||||
- dom.data.remove_key_by_path() calls
|
||||
- widget_meta["handlers"] registrations
|
||||
|
||||
Generates a complete inventory for Phase 0 of the refactoring plan.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Tuple
|
||||
|
||||
|
||||
class CallbackAnalyzer:
|
||||
def __init__(self, root_dir: str):
|
||||
self.root_dir = Path(root_dir)
|
||||
self.results = {
|
||||
"upsert_calls": [],
|
||||
"append_calls": [],
|
||||
"remove_calls": [],
|
||||
"handler_registrations": []
|
||||
}
|
||||
|
||||
def analyze(self):
|
||||
"""Run all analysis passes."""
|
||||
print("🔍 Analyzing callback_dict usage...")
|
||||
print(f"📁 Root directory: {self.root_dir}\n")
|
||||
|
||||
# Find all Python files in bot/modules
|
||||
modules_dir = self.root_dir / "bot" / "modules"
|
||||
if not modules_dir.exists():
|
||||
print(f"❌ Error: {modules_dir} does not exist!")
|
||||
return
|
||||
|
||||
python_files = list(modules_dir.rglob("*.py"))
|
||||
print(f"📄 Found {len(python_files)} Python files\n")
|
||||
|
||||
# Analyze each file
|
||||
for py_file in python_files:
|
||||
self._analyze_file(py_file)
|
||||
|
||||
# Print results
|
||||
self._print_results()
|
||||
|
||||
# Generate markdown report
|
||||
self._generate_report()
|
||||
|
||||
def _analyze_file(self, file_path: Path):
|
||||
"""Analyze a single Python file."""
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
lines = content.split('\n')
|
||||
|
||||
relative_path = file_path.relative_to(self.root_dir)
|
||||
|
||||
# Find upsert calls
|
||||
self._find_upsert_calls(relative_path, content, lines)
|
||||
|
||||
# Find append calls
|
||||
self._find_append_calls(relative_path, content, lines)
|
||||
|
||||
# Find remove calls
|
||||
self._find_remove_calls(relative_path, content, lines)
|
||||
|
||||
# Find handler registrations
|
||||
self._find_handler_registrations(relative_path, content, lines)
|
||||
|
||||
except Exception as e:
|
||||
print(f"⚠️ Error analyzing {file_path}: {e}")
|
||||
|
||||
def _find_upsert_calls(self, file_path: Path, content: str, lines: List[str]):
|
||||
"""Find all dom.data.upsert() calls."""
|
||||
# Pattern: module.dom.data.upsert( or self.dom.data.upsert(
|
||||
pattern = r'(module|self)\.dom\.data\.upsert\('
|
||||
|
||||
for match in re.finditer(pattern, content):
|
||||
line_num = content[:match.start()].count('\n') + 1
|
||||
|
||||
# Try to extract callback levels
|
||||
min_level, max_level = self._extract_callback_levels(lines, line_num)
|
||||
|
||||
# Try to determine data depth
|
||||
depth = self._estimate_data_depth(lines, line_num)
|
||||
|
||||
self.results["upsert_calls"].append({
|
||||
"file": str(file_path),
|
||||
"line": line_num,
|
||||
"min_callback_level": min_level,
|
||||
"max_callback_level": max_level,
|
||||
"estimated_depth": depth
|
||||
})
|
||||
|
||||
def _find_append_calls(self, file_path: Path, content: str, lines: List[str]):
|
||||
"""Find all dom.data.append() calls."""
|
||||
pattern = r'(module|self)\.dom\.data\.append\('
|
||||
|
||||
for match in re.finditer(pattern, content):
|
||||
line_num = content[:match.start()].count('\n') + 1
|
||||
|
||||
self.results["append_calls"].append({
|
||||
"file": str(file_path),
|
||||
"line": line_num
|
||||
})
|
||||
|
||||
def _find_remove_calls(self, file_path: Path, content: str, lines: List[str]):
|
||||
"""Find all dom.data.remove_key_by_path() calls."""
|
||||
pattern = r'(module|self)\.dom\.data\.remove_key_by_path\('
|
||||
|
||||
for match in re.finditer(pattern, content):
|
||||
line_num = content[:match.start()].count('\n') + 1
|
||||
|
||||
self.results["remove_calls"].append({
|
||||
"file": str(file_path),
|
||||
"line": line_num
|
||||
})
|
||||
|
||||
def _find_handler_registrations(self, file_path: Path, content: str, lines: List[str]):
|
||||
"""Find all widget_meta handler registrations."""
|
||||
# Look for widget_meta = { ... "handlers": { ... } ... }
|
||||
|
||||
if 'widget_meta' not in content:
|
||||
return
|
||||
|
||||
if '"handlers"' not in content and "'handlers'" not in content:
|
||||
return
|
||||
|
||||
# Find line with handlers dict
|
||||
in_handlers = False
|
||||
handlers_start = None
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
if '"handlers"' in line or "'handlers'" in line:
|
||||
in_handlers = True
|
||||
handlers_start = i + 1
|
||||
continue
|
||||
|
||||
if in_handlers:
|
||||
# Look for path patterns
|
||||
# Pattern: "path/pattern": handler_function,
|
||||
path_match = re.search(r'["\']([^"\']+)["\']:\s*(\w+)', line)
|
||||
|
||||
if path_match:
|
||||
path_pattern = path_match.group(1)
|
||||
handler_name = path_match.group(2)
|
||||
|
||||
# Calculate depth
|
||||
depth = path_pattern.count('/')
|
||||
|
||||
self.results["handler_registrations"].append({
|
||||
"file": str(file_path),
|
||||
"line": i + 1,
|
||||
"path_pattern": path_pattern,
|
||||
"handler_function": handler_name,
|
||||
"depth": depth
|
||||
})
|
||||
|
||||
# Check if we've left the handlers dict
|
||||
if '}' in line and ',' not in line:
|
||||
in_handlers = False
|
||||
|
||||
def _extract_callback_levels(self, lines: List[str], start_line: int) -> Tuple[str, str]:
|
||||
"""Try to extract min_callback_level and max_callback_level from upsert call."""
|
||||
min_level = "None"
|
||||
max_level = "None"
|
||||
|
||||
# Look at next ~10 lines for callback level params
|
||||
for i in range(start_line - 1, min(start_line + 10, len(lines))):
|
||||
line = lines[i]
|
||||
|
||||
if 'min_callback_level' in line:
|
||||
match = re.search(r'min_callback_level\s*=\s*(\d+|None)', line)
|
||||
if match:
|
||||
min_level = match.group(1)
|
||||
|
||||
if 'max_callback_level' in line:
|
||||
match = re.search(r'max_callback_level\s*=\s*(\d+|None)', line)
|
||||
if match:
|
||||
max_level = match.group(1)
|
||||
|
||||
# Stop at closing paren
|
||||
if ')' in line:
|
||||
break
|
||||
|
||||
return min_level, max_level
|
||||
|
||||
def _estimate_data_depth(self, lines: List[str], start_line: int) -> int:
|
||||
"""Estimate the depth of data being upserted by counting dict nesting."""
|
||||
depth = 0
|
||||
brace_count = 0
|
||||
|
||||
# Look backwards for opening brace
|
||||
for i in range(start_line - 1, max(0, start_line - 30), -1):
|
||||
line = lines[i]
|
||||
|
||||
if 'upsert({' in line or 'upsert( {' in line:
|
||||
# Count colons/keys in following lines until we reach reasonable depth
|
||||
for j in range(i, min(i + 50, len(lines))):
|
||||
check_line = lines[j]
|
||||
|
||||
# Count dictionary depth by tracking braces
|
||||
brace_count += check_line.count('{')
|
||||
brace_count -= check_line.count('}')
|
||||
|
||||
if brace_count < 0:
|
||||
break
|
||||
|
||||
# Rough estimate: each key-value pair increases depth
|
||||
if '":' in check_line or "\':" in check_line:
|
||||
depth += 1
|
||||
|
||||
break
|
||||
|
||||
# Depth is usually around 4-6 for locations/players
|
||||
return min(depth, 10) # Cap at reasonable number
|
||||
|
||||
def _print_results(self):
|
||||
"""Print analysis results to console."""
|
||||
print("\n" + "=" * 70)
|
||||
print("📊 ANALYSIS RESULTS")
|
||||
print("=" * 70 + "\n")
|
||||
|
||||
print(f"🔹 Upsert Calls: {len(self.results['upsert_calls'])}")
|
||||
print(f"🔹 Append Calls: {len(self.results['append_calls'])}")
|
||||
print(f"🔹 Remove Calls: {len(self.results['remove_calls'])}")
|
||||
print(f"🔹 Handler Registrations: {len(self.results['handler_registrations'])}")
|
||||
|
||||
print("\n" + "-" * 70)
|
||||
print("UPSERT CALLS BY CALLBACK LEVELS:")
|
||||
print("-" * 70)
|
||||
|
||||
# Group by callback levels
|
||||
level_groups = {}
|
||||
for call in self.results["upsert_calls"]:
|
||||
key = f"min={call['min_callback_level']}, max={call['max_callback_level']}"
|
||||
if key not in level_groups:
|
||||
level_groups[key] = []
|
||||
level_groups[key].append(call)
|
||||
|
||||
for levels, calls in sorted(level_groups.items()):
|
||||
print(f"\n{levels}: {len(calls)} calls")
|
||||
for call in calls:
|
||||
print(f" 📄 {call['file']}:{call['line']}")
|
||||
|
||||
print("\n" + "-" * 70)
|
||||
print("HANDLER REGISTRATIONS BY DEPTH:")
|
||||
print("-" * 70)
|
||||
|
||||
# Group by depth
|
||||
depth_groups = {}
|
||||
for handler in self.results["handler_registrations"]:
|
||||
depth = handler['depth']
|
||||
if depth not in depth_groups:
|
||||
depth_groups[depth] = []
|
||||
depth_groups[depth].append(handler)
|
||||
|
||||
for depth in sorted(depth_groups.keys()):
|
||||
handlers = depth_groups[depth]
|
||||
print(f"\nDepth {depth}: {len(handlers)} handlers")
|
||||
for h in handlers:
|
||||
print(f" 📄 {h['file']}:{h['line']}")
|
||||
print(f" Pattern: {h['path_pattern']}")
|
||||
print(f" Handler: {h['handler_function']}")
|
||||
|
||||
def _generate_report(self):
|
||||
"""Generate markdown report."""
|
||||
report_path = self.root_dir / "CALLBACK_DICT_INVENTORY.md"
|
||||
|
||||
with open(report_path, 'w', encoding='utf-8') as f:
|
||||
f.write("# Callback Dict Usage Inventory\n\n")
|
||||
f.write("Generated by analyze_callback_usage.py\n\n")
|
||||
|
||||
f.write("## Summary\n\n")
|
||||
f.write(f"- **Upsert Calls:** {len(self.results['upsert_calls'])}\n")
|
||||
f.write(f"- **Append Calls:** {len(self.results['append_calls'])}\n")
|
||||
f.write(f"- **Remove Calls:** {len(self.results['remove_calls'])}\n")
|
||||
f.write(f"- **Handler Registrations:** {len(self.results['handler_registrations'])}\n\n")
|
||||
|
||||
f.write("---\n\n")
|
||||
|
||||
f.write("## Upsert Calls\n\n")
|
||||
f.write("| File | Line | Min Level | Max Level | Est. Depth |\n")
|
||||
f.write("|------|------|-----------|-----------|------------|\n")
|
||||
for call in self.results["upsert_calls"]:
|
||||
f.write(f"| {call['file']} | {call['line']} | "
|
||||
f"{call['min_callback_level']} | {call['max_callback_level']} | "
|
||||
f"{call['estimated_depth']} |\n")
|
||||
|
||||
f.write("\n---\n\n")
|
||||
|
||||
f.write("## Append Calls\n\n")
|
||||
f.write("| File | Line |\n")
|
||||
f.write("|------|------|\n")
|
||||
for call in self.results["append_calls"]:
|
||||
f.write(f"| {call['file']} | {call['line']} |\n")
|
||||
|
||||
f.write("\n---\n\n")
|
||||
|
||||
f.write("## Remove Calls\n\n")
|
||||
f.write("| File | Line |\n")
|
||||
f.write("|------|------|\n")
|
||||
for call in self.results["remove_calls"]:
|
||||
f.write(f"| {call['file']} | {call['line']} |\n")
|
||||
|
||||
f.write("\n---\n\n")
|
||||
|
||||
f.write("## Handler Registrations\n\n")
|
||||
f.write("| File | Line | Depth | Path Pattern | Handler Function |\n")
|
||||
f.write("|------|------|-------|--------------|------------------|\n")
|
||||
for handler in self.results["handler_registrations"]:
|
||||
f.write(f"| {handler['file']} | {handler['line']} | "
|
||||
f"{handler['depth']} | `{handler['path_pattern']}` | "
|
||||
f"`{handler['handler_function']}` |\n")
|
||||
|
||||
f.write("\n---\n\n")
|
||||
f.write("## Next Steps\n\n")
|
||||
f.write("1. Review each upsert call - does it send complete or partial data?\n")
|
||||
f.write("2. Review each handler - does it expect complete or partial data?\n")
|
||||
f.write("3. Identify mismatches that will benefit from enrichment\n")
|
||||
f.write("4. Plan which files need updating in Phase 2 and Phase 3\n")
|
||||
|
||||
print(f"\n✅ Report generated: {report_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
root = sys.argv[1]
|
||||
else:
|
||||
# Assume we're in the scripts directory
|
||||
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
analyzer = CallbackAnalyzer(root)
|
||||
analyzer.analyze()
|
||||
|
||||
print("\n✨ Analysis complete!")
|
||||
print("📋 Review CALLBACK_DICT_INVENTORY.md for detailed results")
|
||||
print("📖 See CALLBACK_DICT_REFACTOR_PLAN.md for next steps\n")
|
||||
71
bot/resources/services/debug_telnet.py
Normal file
71
bot/resources/services/debug_telnet.py
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Debug script to see raw telnet responses from 7D2D server.
|
||||
This helps update the regex patterns in the action files.
|
||||
"""
|
||||
import telnetlib
|
||||
import json
|
||||
import time
|
||||
import re
|
||||
|
||||
# Load config
|
||||
with open('bot/options/module_telnet.json', 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
HOST = config['host']
|
||||
PORT = config['port']
|
||||
PASSWORD = config['password']
|
||||
|
||||
print(f"Connecting to {HOST}:{PORT}...")
|
||||
|
||||
# Connect
|
||||
tn = telnetlib.Telnet(HOST, PORT, timeout=5)
|
||||
|
||||
# Wait for password prompt
|
||||
response = tn.read_until(b"Please enter password:", timeout=3)
|
||||
print("Got password prompt")
|
||||
|
||||
# Send password
|
||||
tn.write(PASSWORD.encode('ascii') + b"\r\n")
|
||||
|
||||
# Wait for welcome message
|
||||
time.sleep(1)
|
||||
welcome = tn.read_very_eager().decode('utf-8')
|
||||
print("Connected!\n")
|
||||
|
||||
# Commands to test
|
||||
commands = [
|
||||
'admin list',
|
||||
'lp',
|
||||
'gettime',
|
||||
'getgamepref',
|
||||
'getgamestat',
|
||||
'listents'
|
||||
]
|
||||
|
||||
for cmd in commands:
|
||||
print(f"\n{'='*80}")
|
||||
print(f"COMMAND: {cmd}")
|
||||
print('='*80)
|
||||
|
||||
# Send command
|
||||
tn.write(cmd.encode('ascii') + b"\r\n")
|
||||
|
||||
# Wait a bit for response
|
||||
time.sleep(2)
|
||||
|
||||
# Read response
|
||||
response = tn.read_very_eager().decode('utf-8')
|
||||
|
||||
print("RAW RESPONSE:")
|
||||
print(repr(response)) # Show with escape characters
|
||||
print("\nFORMATTED:")
|
||||
print(response)
|
||||
print()
|
||||
|
||||
tn.write(b"exit\r\n")
|
||||
tn.close()
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("Done! Use these responses to update the regex patterns.")
|
||||
print("="*80)
|
||||
179
bot/resources/services/test_all_commands.py
Normal file
179
bot/resources/services/test_all_commands.py
Normal file
@@ -0,0 +1,179 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Comprehensive telnet command tester for 7D2D server
|
||||
Tests all bot commands and validates regex patterns
|
||||
"""
|
||||
import telnetlib
|
||||
import json
|
||||
import time
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
# Load config
|
||||
config_path = Path(__file__).parent / 'bot' / 'options' / 'module_telnet.json'
|
||||
if config_path.exists():
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
else:
|
||||
# Manual config if file doesn't exist
|
||||
config = {
|
||||
'host': input('Server IP: '),
|
||||
'port': int(input('Server Port: ')),
|
||||
'password': input('Telnet Password: ')
|
||||
}
|
||||
|
||||
print(f"\n{'='*80}")
|
||||
print(f"Connecting to {config['host']}:{config['port']}")
|
||||
print(f"{'='*80}\n")
|
||||
|
||||
# Connect
|
||||
tn = telnetlib.Telnet(config['host'], config['port'], timeout=10)
|
||||
|
||||
# Wait for password prompt
|
||||
time.sleep(0.5)
|
||||
output = tn.read_very_eager().decode('ascii', errors='ignore')
|
||||
print('[CONNECTION] Established')
|
||||
|
||||
# Send password
|
||||
tn.write((config['password'] + '\n').encode('ascii'))
|
||||
time.sleep(0.5)
|
||||
output = tn.read_very_eager().decode('ascii', errors='ignore')
|
||||
print('[AUTH] Authenticated\n')
|
||||
|
||||
# Test commands with their expected regex patterns
|
||||
commands = [
|
||||
{
|
||||
'name': 'admin list',
|
||||
'command': 'admin list',
|
||||
'regex': r"Executing\scommand\s\'admin list\'\sby\sTelnet\sfrom\s(?P<called_by>.*?)\r?\n"
|
||||
r"(?P<raw_adminlist>(?:Defined User Permissions\:.*?(?=Defined Group Permissions|$)))",
|
||||
'wait': 1,
|
||||
'description': 'Get admin list'
|
||||
},
|
||||
{
|
||||
'name': 'lp (getplayers)',
|
||||
'command': 'lp',
|
||||
'regex': r"Executing\scommand\s\'lp\'\sby\sTelnet\sfrom\s"
|
||||
r"(?P<called_by>.*?)\r?\n"
|
||||
r"(?P<raw_playerdata>[\s\S]*?)"
|
||||
r"Total\sof\s(?P<player_count>\d{1,2})\sin\sthe\sgame",
|
||||
'wait': 1,
|
||||
'description': 'Get player list'
|
||||
},
|
||||
{
|
||||
'name': 'gettime',
|
||||
'command': 'gettime',
|
||||
'regex': r"Day\s(?P<day>\d{1,5}),\s(?P<hour>\d{1,2}):(?P<minute>\d{1,2})",
|
||||
'wait': 1,
|
||||
'description': 'Get game time'
|
||||
},
|
||||
{
|
||||
'name': 'listents (getentities)',
|
||||
'command': 'listents',
|
||||
'regex': r"Executing\scommand\s\'listents\'\sby\sTelnet\sfrom\s(?P<called_by>.*?)\r?\n"
|
||||
r"(?P<raw_entity_data>[\s\S]*?)"
|
||||
r"Total\sof\s(?P<entity_count>\d{1,3})\sin\sthe\sgame",
|
||||
'wait': 1,
|
||||
'description': 'Get entity list'
|
||||
},
|
||||
{
|
||||
'name': 'getgamepref',
|
||||
'command': 'getgamepref',
|
||||
'regex': r"Executing\scommand\s\'getgamepref\'\sby\sTelnet\sfrom\s(?P<called_by>.*?)\r?\n"
|
||||
r"(?P<raw_gameprefs>(?:GamePref\..*?\r?\n)+)",
|
||||
'wait': 2,
|
||||
'description': 'Get game preferences'
|
||||
},
|
||||
{
|
||||
'name': 'getgamestat',
|
||||
'command': 'getgamestat',
|
||||
'regex': r"Executing\scommand\s\'getgamestat\'\sby\sTelnet\sfrom\s(?P<called_by>.*?)\r?\n"
|
||||
r"(?P<raw_gamestats>(?:GameStat\..*?\r?\n)+)",
|
||||
'wait': 2,
|
||||
'description': 'Get game statistics'
|
||||
},
|
||||
{
|
||||
'name': 'version',
|
||||
'command': 'version',
|
||||
'regex': None, # Just check raw output
|
||||
'wait': 1,
|
||||
'description': 'Get server version'
|
||||
},
|
||||
{
|
||||
'name': 'help',
|
||||
'command': 'help',
|
||||
'regex': None, # Just check raw output
|
||||
'wait': 2,
|
||||
'description': 'Get available commands'
|
||||
}
|
||||
]
|
||||
|
||||
results = {
|
||||
'passed': [],
|
||||
'failed': [],
|
||||
'no_regex': []
|
||||
}
|
||||
|
||||
for test in commands:
|
||||
print(f"\n{'='*80}")
|
||||
print(f"TEST: {test['name']}")
|
||||
print(f"Description: {test['description']}")
|
||||
print(f"{'='*80}")
|
||||
|
||||
# Send command
|
||||
print(f"\n>>> Sending: {test['command']}")
|
||||
tn.write((test['command'] + '\n').encode('ascii'))
|
||||
|
||||
# Wait for response
|
||||
time.sleep(test['wait'])
|
||||
output = tn.read_very_eager().decode('ascii', errors='ignore')
|
||||
|
||||
# Show raw output
|
||||
print(f"\n--- RAW OUTPUT (repr) ---")
|
||||
print(repr(output))
|
||||
print(f"\n--- RAW OUTPUT (formatted) ---")
|
||||
print(output)
|
||||
|
||||
# Test regex if provided
|
||||
if test['regex']:
|
||||
print(f"\n--- REGEX TEST ---")
|
||||
print(f"Pattern: {test['regex'][:100]}...")
|
||||
|
||||
matches = list(re.finditer(test['regex'], output, re.MULTILINE | re.DOTALL))
|
||||
|
||||
if matches:
|
||||
print(f"✓ REGEX MATCHED! ({len(matches)} match(es))")
|
||||
for i, match in enumerate(matches, 1):
|
||||
print(f"\nMatch {i}:")
|
||||
for group_name, group_value in match.groupdict().items():
|
||||
value_preview = repr(group_value)[:100]
|
||||
print(f" {group_name}: {value_preview}")
|
||||
results['passed'].append(test['name'])
|
||||
else:
|
||||
print(f"✗ REGEX FAILED - NO MATCH!")
|
||||
results['failed'].append(test['name'])
|
||||
else:
|
||||
print(f"\n--- NO REGEX TEST (raw output only) ---")
|
||||
results['no_regex'].append(test['name'])
|
||||
|
||||
print(f"\n{'='*80}\n")
|
||||
time.sleep(0.5) # Small delay between commands
|
||||
|
||||
# Close connection
|
||||
tn.close()
|
||||
|
||||
# Summary
|
||||
print(f"\n\n{'='*80}")
|
||||
print("SUMMARY")
|
||||
print(f"{'='*80}")
|
||||
print(f"✓ Passed: {len(results['passed'])} - {', '.join(results['passed']) if results['passed'] else 'none'}")
|
||||
print(f"✗ Failed: {len(results['failed'])} - {', '.join(results['failed']) if results['failed'] else 'none'}")
|
||||
print(f"- No test: {len(results['no_regex'])} - {', '.join(results['no_regex']) if results['no_regex'] else 'none'}")
|
||||
print(f"{'='*80}\n")
|
||||
|
||||
if results['failed']:
|
||||
print("⚠️ SOME TESTS FAILED - Regex patterns need to be fixed!")
|
||||
exit(1)
|
||||
else:
|
||||
print("✓ All regex tests passed!")
|
||||
exit(0)
|
||||
Reference in New Issue
Block a user