Add building pipeline
Some checks failed
Continuous Integration / Code Formatting (push) Successful in 29s
Continuous Integration / Code Quality Check (push) Successful in 27s
Continuous Integration / Test Execution (push) Failing after 16s
Continuous Integration / CI Summary (push) Failing after 3s
Continuous Integration / Code Formatting (pull_request) Successful in 26s
Continuous Integration / Code Quality Check (pull_request) Successful in 26s
Continuous Integration / Test Execution (pull_request) Failing after 17s
Continuous Integration / CI Summary (pull_request) Failing after 5s
Some checks failed
Continuous Integration / Code Formatting (push) Successful in 29s
Continuous Integration / Code Quality Check (push) Successful in 27s
Continuous Integration / Test Execution (push) Failing after 16s
Continuous Integration / CI Summary (push) Failing after 3s
Continuous Integration / Code Formatting (pull_request) Successful in 26s
Continuous Integration / Code Quality Check (pull_request) Successful in 26s
Continuous Integration / Test Execution (pull_request) Failing after 17s
Continuous Integration / CI Summary (pull_request) Failing after 5s
This commit is contained in:
@@ -25,6 +25,7 @@ NOTE: Handles "successful but noisy" linter output such as
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@@ -35,6 +36,11 @@ import warnings
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
try:
|
||||
import aiofiles
|
||||
except ImportError:
|
||||
aiofiles = None
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
@@ -181,6 +187,71 @@ def run_command(cmd: List[str], cwd: Path, timeout: int = 30) -> subprocess.Comp
|
||||
return result
|
||||
|
||||
|
||||
async def run_command_async(cmd: List[str], cwd: Path, timeout: int = 30) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Execute a shell command asynchronously with error handling and output filtering.
|
||||
|
||||
Filters out gdtoolkit's pkg_resources deprecation warnings.
|
||||
|
||||
Args:
|
||||
cmd: Command and arguments to execute
|
||||
cwd: Working directory for command execution
|
||||
timeout: Maximum execution time in seconds (default: 30s)
|
||||
|
||||
Returns:
|
||||
Tuple of (returncode, stdout, stderr)
|
||||
"""
|
||||
# Suppress pkg_resources deprecation warnings in subprocesses
|
||||
env = os.environ.copy()
|
||||
env['PYTHONWARNINGS'] = 'ignore::UserWarning:pkg_resources'
|
||||
|
||||
try:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
cwd=cwd,
|
||||
env=env
|
||||
)
|
||||
|
||||
stdout, stderr = await asyncio.wait_for(
|
||||
process.communicate(),
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
stdout_text = stdout.decode('utf-8', errors='replace')
|
||||
stderr_text = stderr.decode('utf-8', errors='replace')
|
||||
|
||||
# Filter out pkg_resources deprecation warnings from the output
|
||||
def filter_warnings(text: str) -> str:
|
||||
if not text:
|
||||
return text
|
||||
lines = text.split('\n')
|
||||
filtered_lines = []
|
||||
skip_next = False
|
||||
|
||||
for line in lines:
|
||||
if skip_next:
|
||||
skip_next = False
|
||||
continue
|
||||
if 'pkg_resources is deprecated' in line:
|
||||
skip_next = True # Skip the next line which contains "import pkg_resources"
|
||||
continue
|
||||
if 'import pkg_resources' in line:
|
||||
continue
|
||||
filtered_lines.append(line)
|
||||
|
||||
return '\n'.join(filtered_lines)
|
||||
|
||||
return process.returncode, filter_warnings(stdout_text), filter_warnings(stderr_text)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
if process.returncode is None:
|
||||
process.kill()
|
||||
await process.wait()
|
||||
raise subprocess.TimeoutExpired(cmd, timeout)
|
||||
|
||||
|
||||
def should_skip_file(file_path: Path) -> bool:
|
||||
"""Check if file should be skipped."""
|
||||
return file_path.name == "TestHelper.gd"
|
||||
@@ -433,6 +504,25 @@ def validate_yaml_file(file_path: Path) -> Tuple[bool, str]:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def validate_yaml_file_async(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a YAML file asynchronously."""
|
||||
if yaml is None:
|
||||
return False, "PyYAML not installed. Install with: pip install PyYAML"
|
||||
|
||||
if aiofiles is None:
|
||||
return validate_yaml_file(file_path)
|
||||
|
||||
try:
|
||||
async with aiofiles.open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = await f.read()
|
||||
yaml.safe_load(content)
|
||||
return True, ""
|
||||
except yaml.YAMLError as e:
|
||||
return False, f"YAML syntax error: {e}"
|
||||
except Exception as e:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
def validate_toml_file(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a TOML file."""
|
||||
if tomllib is None:
|
||||
@@ -448,6 +538,25 @@ def validate_toml_file(file_path: Path) -> Tuple[bool, str]:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def validate_toml_file_async(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a TOML file asynchronously."""
|
||||
if tomllib is None:
|
||||
return False, "tomllib/tomli not available. For Python 3.11+, it's built-in. For older versions: pip install tomli"
|
||||
|
||||
if aiofiles is None:
|
||||
return validate_toml_file(file_path)
|
||||
|
||||
try:
|
||||
async with aiofiles.open(file_path, 'rb') as f:
|
||||
content = await f.read()
|
||||
tomllib.loads(content.decode('utf-8'))
|
||||
return True, ""
|
||||
except tomllib.TOMLDecodeError as e:
|
||||
return False, f"TOML syntax error: {e}"
|
||||
except Exception as e:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
def validate_json_file(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a JSON file."""
|
||||
try:
|
||||
@@ -460,6 +569,251 @@ def validate_json_file(file_path: Path) -> Tuple[bool, str]:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def validate_json_file_async(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a JSON file asynchronously."""
|
||||
if aiofiles is None:
|
||||
return validate_json_file(file_path)
|
||||
|
||||
try:
|
||||
async with aiofiles.open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = await f.read()
|
||||
json.loads(content)
|
||||
return True, ""
|
||||
except json.JSONDecodeError as e:
|
||||
return False, f"JSON syntax error: {e}"
|
||||
except Exception as e:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def process_lint_file_async(gd_file: Path, project_root: Path, semaphore: asyncio.Semaphore, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, str, str]:
|
||||
"""Process a single file for linting asynchronously."""
|
||||
async with semaphore:
|
||||
relative_path = gd_file.relative_to(project_root)
|
||||
if not silent and not yaml_output:
|
||||
file_msg = f"📄 Linting: {relative_path.name}"
|
||||
colored_file = Colors.colorize(file_msg, Colors.CYAN)
|
||||
print(colored_file)
|
||||
|
||||
if should_skip_file(gd_file):
|
||||
if not silent and not yaml_output:
|
||||
print_skip_message("gdlint")
|
||||
return True, "", str(relative_path)
|
||||
|
||||
try:
|
||||
returncode, stdout, stderr = await run_command_async(["gdlint", str(gd_file)], project_root)
|
||||
output = (stdout + stderr).strip()
|
||||
|
||||
if returncode == 0:
|
||||
# If output is "no problems" (or similar), treat as clean.
|
||||
if _is_successful_linter_output(output):
|
||||
if not yaml_output:
|
||||
print_result(True, "", silent)
|
||||
return True, "", str(relative_path)
|
||||
else:
|
||||
if not yaml_output:
|
||||
print_result(True, output, silent)
|
||||
return True, output, str(relative_path)
|
||||
else:
|
||||
if not yaml_output:
|
||||
print_result(False, output, silent)
|
||||
return False, output, str(relative_path)
|
||||
|
||||
except FileNotFoundError:
|
||||
if not silent and not yaml_output:
|
||||
print(" ❌ ERROR: gdlint not found")
|
||||
return False, "gdlint not found", str(relative_path)
|
||||
except Exception as e:
|
||||
if not silent and not yaml_output:
|
||||
print(f" ❌ ERROR: {e}")
|
||||
return False, str(e), str(relative_path)
|
||||
finally:
|
||||
if not silent and not yaml_output:
|
||||
print()
|
||||
|
||||
|
||||
async def run_lint_async(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run gdlint on all GDScript files asynchronously."""
|
||||
if not yaml_output:
|
||||
print_header("🔍 GDScript Linter (Async)", silent)
|
||||
|
||||
gd_files = get_gd_files(project_root)
|
||||
if not silent and not yaml_output:
|
||||
count_msg = f"Found {len(gd_files)} GDScript files to lint."
|
||||
colored_count = Colors.colorize(count_msg, Colors.BLUE)
|
||||
print(f"{colored_count}\n")
|
||||
|
||||
clean_files = warning_files = error_files = 0
|
||||
failed_paths = []
|
||||
|
||||
# Use semaphore to limit concurrent operations
|
||||
semaphore = asyncio.Semaphore(min(10, len(gd_files)))
|
||||
|
||||
# Process files concurrently
|
||||
tasks = [process_lint_file_async(gd_file, project_root, semaphore, silent, yaml_output) for gd_file in gd_files]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
error_files += 1
|
||||
failed_paths.append("Unknown file - exception occurred")
|
||||
else:
|
||||
success, output, relative_path = result
|
||||
if success:
|
||||
if not output:
|
||||
clean_files += 1
|
||||
else:
|
||||
warning_files += 1
|
||||
else:
|
||||
error_files += 1
|
||||
failed_paths.append(relative_path)
|
||||
|
||||
# Summary
|
||||
stats = {
|
||||
"Total files": len(gd_files),
|
||||
"Clean files": clean_files,
|
||||
"Files with warnings": warning_files,
|
||||
"Files with errors": error_files
|
||||
}
|
||||
|
||||
success = error_files == 0
|
||||
|
||||
if yaml_output:
|
||||
output_yaml_results("lint", {**stats, "failed_paths": failed_paths}, success)
|
||||
else:
|
||||
print_summary("Linting Summary", stats, silent)
|
||||
if not silent:
|
||||
print()
|
||||
if not success:
|
||||
msg = "❌ Linting FAILED - Please fix the errors above"
|
||||
colored_msg = Colors.colorize(msg, Colors.RED + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif warning_files > 0:
|
||||
msg = "⚠️ Linting PASSED with warnings - Consider fixing them"
|
||||
colored_msg = Colors.colorize(msg, Colors.YELLOW + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
else:
|
||||
msg = "✅ All GDScript files passed linting!"
|
||||
colored_msg = Colors.colorize(msg, Colors.GREEN + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif not success:
|
||||
# In silent mode, still show failed files
|
||||
for failed_path in failed_paths:
|
||||
print(f"❌ {failed_path}")
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
async def process_format_file_async(gd_file: Path, project_root: Path, semaphore: asyncio.Semaphore, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, str]:
|
||||
"""Process a single file for formatting asynchronously."""
|
||||
async with semaphore:
|
||||
relative_path = gd_file.relative_to(project_root)
|
||||
if not silent and not yaml_output:
|
||||
file_msg = f"🎯 Formatting: {relative_path.name}"
|
||||
colored_file = Colors.colorize(file_msg, Colors.CYAN)
|
||||
print(colored_file)
|
||||
|
||||
if should_skip_file(gd_file):
|
||||
if not silent and not yaml_output:
|
||||
print_skip_message("gdformat")
|
||||
return True, str(relative_path)
|
||||
|
||||
try:
|
||||
returncode, stdout, stderr = await run_command_async(["gdformat", str(gd_file)], project_root)
|
||||
|
||||
if returncode == 0:
|
||||
if not silent and not yaml_output:
|
||||
success_msg = "✅ Success"
|
||||
colored_success = Colors.colorize(success_msg, Colors.GREEN)
|
||||
print(f" {colored_success}")
|
||||
return True, str(relative_path)
|
||||
else:
|
||||
if not silent and not yaml_output:
|
||||
fail_msg = f"❌ FAILED: {relative_path}"
|
||||
colored_fail = Colors.colorize(fail_msg, Colors.RED)
|
||||
print(f" {colored_fail}")
|
||||
output = (stdout + stderr).strip()
|
||||
if output:
|
||||
colored_output = Colors.colorize(output, Colors.RED)
|
||||
print(f" {colored_output}")
|
||||
return False, str(relative_path)
|
||||
|
||||
except FileNotFoundError:
|
||||
if not silent and not yaml_output:
|
||||
print(" ❌ ERROR: gdformat not found")
|
||||
return False, str(relative_path)
|
||||
except Exception as e:
|
||||
if not silent and not yaml_output:
|
||||
print(f" ❌ ERROR: {e}")
|
||||
return False, str(relative_path)
|
||||
finally:
|
||||
if not silent and not yaml_output:
|
||||
print()
|
||||
|
||||
|
||||
async def run_format_async(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run gdformat on all GDScript files asynchronously."""
|
||||
if not yaml_output:
|
||||
print_header("🎨 GDScript Formatter (Async)", silent)
|
||||
|
||||
gd_files = get_gd_files(project_root)
|
||||
if not silent and not yaml_output:
|
||||
count_msg = f"Found {len(gd_files)} GDScript files to format."
|
||||
colored_count = Colors.colorize(count_msg, Colors.BLUE)
|
||||
print(f"{colored_count}\n")
|
||||
|
||||
formatted_files = failed_files = 0
|
||||
failed_paths = []
|
||||
|
||||
# Use semaphore to limit concurrent operations
|
||||
semaphore = asyncio.Semaphore(min(10, len(gd_files)))
|
||||
|
||||
# Process files concurrently
|
||||
tasks = [process_format_file_async(gd_file, project_root, semaphore, silent, yaml_output) for gd_file in gd_files]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
failed_files += 1
|
||||
failed_paths.append("Unknown file - exception occurred")
|
||||
else:
|
||||
success, relative_path = result
|
||||
if success:
|
||||
formatted_files += 1
|
||||
else:
|
||||
failed_files += 1
|
||||
failed_paths.append(relative_path)
|
||||
|
||||
# Summary
|
||||
stats = {
|
||||
"Total files": len(gd_files),
|
||||
"Successfully formatted": formatted_files,
|
||||
"Failed": failed_files
|
||||
}
|
||||
|
||||
success = failed_files == 0
|
||||
|
||||
if yaml_output:
|
||||
output_yaml_results("format", {**stats, "failed_paths": failed_paths}, success)
|
||||
else:
|
||||
print_summary("Formatting Summary", stats, silent)
|
||||
if not silent:
|
||||
print()
|
||||
if not success:
|
||||
msg = "⚠️ WARNING: Some files failed to format"
|
||||
colored_msg = Colors.colorize(msg, Colors.YELLOW + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
else:
|
||||
msg = "✅ All GDScript files formatted successfully!"
|
||||
colored_msg = Colors.colorize(msg, Colors.GREEN + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif not success:
|
||||
# In silent mode, still show failed files
|
||||
for failed_path in failed_paths:
|
||||
print(f"❌ {failed_path}")
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
def run_validate(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run validation on YAML, TOML, and JSON files."""
|
||||
if not silent and not yaml_output:
|
||||
@@ -562,6 +916,134 @@ def run_validate(project_root: Path, silent: bool = False, yaml_output: bool = F
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
async def process_validation_file_async(file_path: Path, file_type: str, project_root: Path, semaphore: asyncio.Semaphore, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, str]:
|
||||
"""Process a single file for validation asynchronously."""
|
||||
async with semaphore:
|
||||
relative_path = file_path.relative_to(project_root)
|
||||
if not silent and not yaml_output:
|
||||
file_msg = f"📄 Validating: {relative_path}"
|
||||
colored_file = Colors.colorize(file_msg, Colors.CYAN)
|
||||
print(colored_file)
|
||||
|
||||
# Validation functions mapping
|
||||
validators = {
|
||||
'yaml': validate_yaml_file_async,
|
||||
'toml': validate_toml_file_async,
|
||||
'json': validate_json_file_async
|
||||
}
|
||||
|
||||
validator = validators[file_type]
|
||||
is_valid, error_msg = await validator(file_path)
|
||||
|
||||
if is_valid:
|
||||
if not yaml_output:
|
||||
print_result(True, "", silent)
|
||||
return True, str(relative_path)
|
||||
else:
|
||||
if not yaml_output:
|
||||
print_result(False, error_msg, silent)
|
||||
return False, str(relative_path)
|
||||
|
||||
|
||||
async def run_validate_async(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run validation on YAML, TOML, and JSON files asynchronously."""
|
||||
if not silent and not yaml_output:
|
||||
print_header("📋 File Format Validation (Async)")
|
||||
|
||||
# Get all validation files
|
||||
validation_files = get_validation_files(project_root)
|
||||
total_files = sum(len(files) for files in validation_files.values())
|
||||
|
||||
if total_files == 0:
|
||||
if not silent:
|
||||
msg = "No YAML, TOML, or JSON files found to validate."
|
||||
colored_msg = Colors.colorize(msg, Colors.YELLOW)
|
||||
print(colored_msg)
|
||||
return True, {"Total files": 0, "Valid files": 0, "Invalid files": 0}
|
||||
|
||||
if not silent and not yaml_output:
|
||||
count_msg = f"Found {total_files} files to validate:"
|
||||
colored_count = Colors.colorize(count_msg, Colors.BLUE)
|
||||
print(colored_count)
|
||||
|
||||
for file_type, files in validation_files.items():
|
||||
if files:
|
||||
type_msg = f" {file_type.upper()}: {len(files)} files"
|
||||
colored_type = Colors.colorize(type_msg, Colors.CYAN)
|
||||
print(colored_type)
|
||||
|
||||
print()
|
||||
|
||||
valid_files = invalid_files = 0
|
||||
failed_paths = []
|
||||
|
||||
# Use semaphore to limit concurrent operations
|
||||
semaphore = asyncio.Semaphore(min(10, total_files))
|
||||
|
||||
# Prepare all validation tasks
|
||||
all_tasks = []
|
||||
for file_type, files in validation_files.items():
|
||||
if not files:
|
||||
continue
|
||||
|
||||
if not silent and not yaml_output:
|
||||
type_header = f"🔍 Validating {file_type.upper()} files"
|
||||
colored_header = Colors.colorize(type_header, Colors.MAGENTA + Colors.BOLD)
|
||||
print(colored_header)
|
||||
|
||||
for file_path in files:
|
||||
all_tasks.append(process_validation_file_async(file_path, file_type, project_root, semaphore, silent, yaml_output))
|
||||
|
||||
# Process files concurrently
|
||||
results = await asyncio.gather(*all_tasks, return_exceptions=True)
|
||||
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
invalid_files += 1
|
||||
failed_paths.append("Unknown file - exception occurred")
|
||||
else:
|
||||
success, relative_path = result
|
||||
if success:
|
||||
valid_files += 1
|
||||
else:
|
||||
invalid_files += 1
|
||||
failed_paths.append(relative_path)
|
||||
|
||||
if not silent and not yaml_output:
|
||||
print()
|
||||
|
||||
# Summary
|
||||
stats = {
|
||||
"Total files": total_files,
|
||||
"Valid files": valid_files,
|
||||
"Invalid files": invalid_files
|
||||
}
|
||||
|
||||
success = invalid_files == 0
|
||||
|
||||
if yaml_output:
|
||||
output_yaml_results("validate", {**stats, "failed_paths": failed_paths}, success)
|
||||
else:
|
||||
if not silent:
|
||||
print_summary("Validation Summary", stats)
|
||||
print()
|
||||
if not success:
|
||||
msg = "❌ File validation FAILED - Please fix the syntax errors above"
|
||||
colored_msg = Colors.colorize(msg, Colors.RED + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
else:
|
||||
msg = "✅ All files passed validation!"
|
||||
colored_msg = Colors.colorize(msg, Colors.GREEN + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif not success:
|
||||
# In silent mode, still show errors
|
||||
for failed_path in failed_paths:
|
||||
print(f"❌ {failed_path}")
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
def run_format(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run gdformat on all GDScript files."""
|
||||
if not yaml_output:
|
||||
|
||||
Reference in New Issue
Block a user