Add building pipeline
Some checks failed
Continuous Integration / Code Formatting (push) Successful in 29s
Continuous Integration / Code Quality Check (push) Successful in 27s
Continuous Integration / Test Execution (push) Failing after 16s
Continuous Integration / CI Summary (push) Failing after 3s
Continuous Integration / Code Formatting (pull_request) Successful in 26s
Continuous Integration / Code Quality Check (pull_request) Successful in 26s
Continuous Integration / Test Execution (pull_request) Failing after 17s
Continuous Integration / CI Summary (pull_request) Failing after 5s
Some checks failed
Continuous Integration / Code Formatting (push) Successful in 29s
Continuous Integration / Code Quality Check (push) Successful in 27s
Continuous Integration / Test Execution (push) Failing after 16s
Continuous Integration / CI Summary (push) Failing after 3s
Continuous Integration / Code Formatting (pull_request) Successful in 26s
Continuous Integration / Code Quality Check (pull_request) Successful in 26s
Continuous Integration / Test Execution (pull_request) Failing after 17s
Continuous Integration / CI Summary (pull_request) Failing after 5s
This commit is contained in:
403
.gitea/workflows/build.yml
Normal file
403
.gitea/workflows/build.yml
Normal file
@@ -0,0 +1,403 @@
|
||||
name: Build Game
|
||||
|
||||
# Build pipeline for creating game executables across multiple platforms
|
||||
#
|
||||
# Features:
|
||||
# - Manual trigger with platform selection
|
||||
# - Tag-based automatic builds for releases
|
||||
# - Multi-platform builds (Windows, Linux, macOS, Android)
|
||||
# - Artifact storage for one week
|
||||
# - Configurable build options
|
||||
|
||||
on:
|
||||
# Manual trigger with platform selection
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
platforms:
|
||||
description: 'Platforms to build (comma-separated: windows,linux,macos,android)'
|
||||
required: true
|
||||
default: 'windows,linux'
|
||||
type: string
|
||||
build_type:
|
||||
description: 'Build type'
|
||||
required: true
|
||||
default: 'release'
|
||||
type: choice
|
||||
options:
|
||||
- release
|
||||
- debug
|
||||
version_override:
|
||||
description: 'Override version (optional)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
# Automatic trigger on git tags (for releases)
|
||||
push:
|
||||
tags:
|
||||
- 'v*' # Version tags (v1.0.0, v2.1.0, etc.)
|
||||
- 'release-*' # Release tags
|
||||
|
||||
env:
|
||||
GODOT_VERSION: "4.4-dev2"
|
||||
PROJECT_NAME: "Skelly"
|
||||
BUILD_DIR: "builds"
|
||||
|
||||
jobs:
|
||||
# Preparation job - determines build configuration
|
||||
prepare:
|
||||
name: Prepare Build
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
platforms: ${{ steps.config.outputs.platforms }}
|
||||
build_type: ${{ steps.config.outputs.build_type }}
|
||||
version: ${{ steps.config.outputs.version }}
|
||||
artifact_name: ${{ steps.config.outputs.artifact_name }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure build parameters
|
||||
id: config
|
||||
run: |
|
||||
# Determine platforms to build
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
platforms="${{ github.event.inputs.platforms }}"
|
||||
build_type="${{ github.event.inputs.build_type }}"
|
||||
version_override="${{ github.event.inputs.version_override }}"
|
||||
else
|
||||
# Tag-triggered build - build all platforms
|
||||
platforms="windows,linux,macos,android"
|
||||
build_type="release"
|
||||
version_override=""
|
||||
fi
|
||||
|
||||
# Determine version
|
||||
if [[ -n "$version_override" ]]; then
|
||||
version="$version_override"
|
||||
elif [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
version="${{ github.ref_name }}"
|
||||
else
|
||||
# Generate version from git info
|
||||
commit_short=$(git rev-parse --short HEAD)
|
||||
branch_name="${{ github.ref_name }}"
|
||||
timestamp=$(date +%Y%m%d-%H%M)
|
||||
version="${branch_name}-${commit_short}-${timestamp}"
|
||||
fi
|
||||
|
||||
# Create artifact name
|
||||
artifact_name="${{ env.PROJECT_NAME }}-${version}-${build_type}"
|
||||
|
||||
echo "platforms=${platforms}" >> $GITHUB_OUTPUT
|
||||
echo "build_type=${build_type}" >> $GITHUB_OUTPUT
|
||||
echo "version=${version}" >> $GITHUB_OUTPUT
|
||||
echo "artifact_name=${artifact_name}" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "🔧 Build Configuration:"
|
||||
echo " Platforms: ${platforms}"
|
||||
echo " Build Type: ${build_type}"
|
||||
echo " Version: ${version}"
|
||||
echo " Artifact: ${artifact_name}"
|
||||
|
||||
# Windows build job
|
||||
build-windows:
|
||||
name: Build Windows
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
if: contains(needs.prepare.outputs.platforms, 'windows')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Godot
|
||||
uses: chickensoft-games/setup-godot@v1
|
||||
with:
|
||||
version: ${{ env.GODOT_VERSION }}
|
||||
use-dotnet: false
|
||||
|
||||
- name: Create build directory
|
||||
run: mkdir -p ${{ env.BUILD_DIR }}
|
||||
|
||||
- name: Import project assets
|
||||
run: |
|
||||
echo "📦 Importing project assets..."
|
||||
godot --headless --verbose --editor --quit || true
|
||||
sleep 2
|
||||
|
||||
- name: Build Windows executable
|
||||
run: |
|
||||
echo "🏗️ Building Windows executable..."
|
||||
godot --headless --verbose --export-${{ needs.prepare.outputs.build_type }} "Windows Desktop" \
|
||||
${{ env.BUILD_DIR }}/skelly-windows-${{ needs.prepare.outputs.version }}.exe
|
||||
|
||||
# Verify build output
|
||||
if [[ -f "${{ env.BUILD_DIR }}/skelly-windows-${{ needs.prepare.outputs.version }}.exe" ]]; then
|
||||
echo "✅ Windows build successful"
|
||||
ls -la ${{ env.BUILD_DIR }}/
|
||||
else
|
||||
echo "❌ Windows build failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload Windows build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ needs.prepare.outputs.artifact_name }}-windows
|
||||
path: ${{ env.BUILD_DIR }}/skelly-windows-${{ needs.prepare.outputs.version }}.exe
|
||||
retention-days: 7
|
||||
compression-level: 6
|
||||
|
||||
# Linux build job
|
||||
build-linux:
|
||||
name: Build Linux
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
if: contains(needs.prepare.outputs.platforms, 'linux')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Godot
|
||||
uses: chickensoft-games/setup-godot@v1
|
||||
with:
|
||||
version: ${{ env.GODOT_VERSION }}
|
||||
use-dotnet: false
|
||||
|
||||
- name: Create build directory
|
||||
run: mkdir -p ${{ env.BUILD_DIR }}
|
||||
|
||||
- name: Import project assets
|
||||
run: |
|
||||
echo "📦 Importing project assets..."
|
||||
godot --headless --verbose --editor --quit || true
|
||||
sleep 2
|
||||
|
||||
- name: Build Linux executable
|
||||
run: |
|
||||
echo "🏗️ Building Linux executable..."
|
||||
godot --headless --verbose --export-${{ needs.prepare.outputs.build_type }} "Linux" \
|
||||
${{ env.BUILD_DIR }}/skelly-linux-${{ needs.prepare.outputs.version }}.x86_64
|
||||
|
||||
# Make executable
|
||||
chmod +x ${{ env.BUILD_DIR }}/skelly-linux-${{ needs.prepare.outputs.version }}.x86_64
|
||||
|
||||
# Verify build output
|
||||
if [[ -f "${{ env.BUILD_DIR }}/skelly-linux-${{ needs.prepare.outputs.version }}.x86_64" ]]; then
|
||||
echo "✅ Linux build successful"
|
||||
ls -la ${{ env.BUILD_DIR }}/
|
||||
else
|
||||
echo "❌ Linux build failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload Linux build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ needs.prepare.outputs.artifact_name }}-linux
|
||||
path: ${{ env.BUILD_DIR }}/skelly-linux-${{ needs.prepare.outputs.version }}.x86_64
|
||||
retention-days: 7
|
||||
compression-level: 6
|
||||
|
||||
# macOS build job
|
||||
build-macos:
|
||||
name: Build macOS
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
if: contains(needs.prepare.outputs.platforms, 'macos')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Godot
|
||||
uses: chickensoft-games/setup-godot@v1
|
||||
with:
|
||||
version: ${{ env.GODOT_VERSION }}
|
||||
use-dotnet: false
|
||||
|
||||
- name: Create build directory
|
||||
run: mkdir -p ${{ env.BUILD_DIR }}
|
||||
|
||||
- name: Import project assets
|
||||
run: |
|
||||
echo "📦 Importing project assets..."
|
||||
godot --headless --verbose --editor --quit || true
|
||||
sleep 2
|
||||
|
||||
- name: Build macOS application
|
||||
run: |
|
||||
echo "🏗️ Building macOS application..."
|
||||
godot --headless --verbose --export-${{ needs.prepare.outputs.build_type }} "macOS" \
|
||||
${{ env.BUILD_DIR }}/skelly-macos-${{ needs.prepare.outputs.version }}.zip
|
||||
|
||||
# Verify build output
|
||||
if [[ -f "${{ env.BUILD_DIR }}/skelly-macos-${{ needs.prepare.outputs.version }}.zip" ]]; then
|
||||
echo "✅ macOS build successful"
|
||||
ls -la ${{ env.BUILD_DIR }}/
|
||||
else
|
||||
echo "❌ macOS build failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload macOS build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ needs.prepare.outputs.artifact_name }}-macos
|
||||
path: ${{ env.BUILD_DIR }}/skelly-macos-${{ needs.prepare.outputs.version }}.zip
|
||||
retention-days: 7
|
||||
compression-level: 6
|
||||
|
||||
# Android build job
|
||||
build-android:
|
||||
name: Build Android
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
if: contains(needs.prepare.outputs.platforms, 'android')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: '17'
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@v3
|
||||
with:
|
||||
api-level: 33
|
||||
build-tools: 33.0.0
|
||||
|
||||
- name: Setup Godot
|
||||
uses: chickensoft-games/setup-godot@v1
|
||||
with:
|
||||
version: ${{ env.GODOT_VERSION }}
|
||||
use-dotnet: false
|
||||
|
||||
- name: Setup Android export templates
|
||||
run: |
|
||||
echo "📱 Setting up Android export templates..."
|
||||
# Download Android export templates
|
||||
mkdir -p ~/.local/share/godot/export_templates/${{ env.GODOT_VERSION }}
|
||||
# Templates will be automatically downloaded by Godot during export
|
||||
|
||||
- name: Create build directory
|
||||
run: mkdir -p ${{ env.BUILD_DIR }}
|
||||
|
||||
- name: Import project assets
|
||||
run: |
|
||||
echo "📦 Importing project assets..."
|
||||
godot --headless --verbose --editor --quit || true
|
||||
sleep 2
|
||||
|
||||
- name: Build Android APK
|
||||
run: |
|
||||
echo "🏗️ Building Android APK..."
|
||||
|
||||
# Set ANDROID_HOME if not already set
|
||||
export ANDROID_HOME=${ANDROID_HOME:-$ANDROID_SDK_ROOT}
|
||||
|
||||
godot --headless --verbose --export-${{ needs.prepare.outputs.build_type }} "Android" \
|
||||
${{ env.BUILD_DIR }}/skelly-android-${{ needs.prepare.outputs.version }}.apk
|
||||
|
||||
# Verify build output
|
||||
if [[ -f "${{ env.BUILD_DIR }}/skelly-android-${{ needs.prepare.outputs.version }}.apk" ]]; then
|
||||
echo "✅ Android build successful"
|
||||
ls -la ${{ env.BUILD_DIR }}/
|
||||
|
||||
# Show APK info
|
||||
echo "📱 APK Information:"
|
||||
file ${{ env.BUILD_DIR }}/skelly-android-${{ needs.prepare.outputs.version }}.apk
|
||||
else
|
||||
echo "❌ Android build failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload Android build
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ needs.prepare.outputs.artifact_name }}-android
|
||||
path: ${{ env.BUILD_DIR }}/skelly-android-${{ needs.prepare.outputs.version }}.apk
|
||||
retention-days: 7
|
||||
compression-level: 6
|
||||
|
||||
# Summary job - creates release summary
|
||||
summary:
|
||||
name: Build Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prepare, build-windows, build-linux, build-macos, build-android]
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Generate build summary
|
||||
run: |
|
||||
echo "🎮 Build Summary for ${{ needs.prepare.outputs.artifact_name }}"
|
||||
echo "=================================="
|
||||
echo ""
|
||||
echo "📋 Configuration:"
|
||||
echo " Version: ${{ needs.prepare.outputs.version }}"
|
||||
echo " Build Type: ${{ needs.prepare.outputs.build_type }}"
|
||||
echo " Platforms: ${{ needs.prepare.outputs.platforms }}"
|
||||
echo ""
|
||||
echo "📊 Build Results:"
|
||||
|
||||
platforms="${{ needs.prepare.outputs.platforms }}"
|
||||
|
||||
if [[ "$platforms" == *"windows"* ]]; then
|
||||
windows_status="${{ needs.build-windows.result }}"
|
||||
echo " 🪟 Windows: $windows_status"
|
||||
fi
|
||||
|
||||
if [[ "$platforms" == *"linux"* ]]; then
|
||||
linux_status="${{ needs.build-linux.result }}"
|
||||
echo " 🐧 Linux: $linux_status"
|
||||
fi
|
||||
|
||||
if [[ "$platforms" == *"macos"* ]]; then
|
||||
macos_status="${{ needs.build-macos.result }}"
|
||||
echo " 🍎 macOS: $macos_status"
|
||||
fi
|
||||
|
||||
if [[ "$platforms" == *"android"* ]]; then
|
||||
android_status="${{ needs.build-android.result }}"
|
||||
echo " 🤖 Android: $android_status"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "📦 Artifacts are available for 7 days"
|
||||
echo "🔗 Download from: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
||||
|
||||
- name: Check overall build status
|
||||
run: |
|
||||
# Check if any required builds failed
|
||||
platforms="${{ needs.prepare.outputs.platforms }}"
|
||||
failed_builds=()
|
||||
|
||||
if [[ "$platforms" == *"windows"* ]] && [[ "${{ needs.build-windows.result }}" != "success" ]]; then
|
||||
failed_builds+=("Windows")
|
||||
fi
|
||||
|
||||
if [[ "$platforms" == *"linux"* ]] && [[ "${{ needs.build-linux.result }}" != "success" ]]; then
|
||||
failed_builds+=("Linux")
|
||||
fi
|
||||
|
||||
if [[ "$platforms" == *"macos"* ]] && [[ "${{ needs.build-macos.result }}" != "success" ]]; then
|
||||
failed_builds+=("macOS")
|
||||
fi
|
||||
|
||||
if [[ "$platforms" == *"android"* ]] && [[ "${{ needs.build-android.result }}" != "success" ]]; then
|
||||
failed_builds+=("Android")
|
||||
fi
|
||||
|
||||
if [[ ${#failed_builds[@]} -gt 0 ]]; then
|
||||
echo "❌ Build failed for: ${failed_builds[*]}"
|
||||
exit 1
|
||||
else
|
||||
echo "✅ All builds completed successfully!"
|
||||
fi
|
||||
@@ -25,6 +25,7 @@ NOTE: Handles "successful but noisy" linter output such as
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@@ -35,6 +36,11 @@ import warnings
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
try:
|
||||
import aiofiles
|
||||
except ImportError:
|
||||
aiofiles = None
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
@@ -181,6 +187,71 @@ def run_command(cmd: List[str], cwd: Path, timeout: int = 30) -> subprocess.Comp
|
||||
return result
|
||||
|
||||
|
||||
async def run_command_async(cmd: List[str], cwd: Path, timeout: int = 30) -> Tuple[int, str, str]:
|
||||
"""
|
||||
Execute a shell command asynchronously with error handling and output filtering.
|
||||
|
||||
Filters out gdtoolkit's pkg_resources deprecation warnings.
|
||||
|
||||
Args:
|
||||
cmd: Command and arguments to execute
|
||||
cwd: Working directory for command execution
|
||||
timeout: Maximum execution time in seconds (default: 30s)
|
||||
|
||||
Returns:
|
||||
Tuple of (returncode, stdout, stderr)
|
||||
"""
|
||||
# Suppress pkg_resources deprecation warnings in subprocesses
|
||||
env = os.environ.copy()
|
||||
env['PYTHONWARNINGS'] = 'ignore::UserWarning:pkg_resources'
|
||||
|
||||
try:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
cwd=cwd,
|
||||
env=env
|
||||
)
|
||||
|
||||
stdout, stderr = await asyncio.wait_for(
|
||||
process.communicate(),
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
stdout_text = stdout.decode('utf-8', errors='replace')
|
||||
stderr_text = stderr.decode('utf-8', errors='replace')
|
||||
|
||||
# Filter out pkg_resources deprecation warnings from the output
|
||||
def filter_warnings(text: str) -> str:
|
||||
if not text:
|
||||
return text
|
||||
lines = text.split('\n')
|
||||
filtered_lines = []
|
||||
skip_next = False
|
||||
|
||||
for line in lines:
|
||||
if skip_next:
|
||||
skip_next = False
|
||||
continue
|
||||
if 'pkg_resources is deprecated' in line:
|
||||
skip_next = True # Skip the next line which contains "import pkg_resources"
|
||||
continue
|
||||
if 'import pkg_resources' in line:
|
||||
continue
|
||||
filtered_lines.append(line)
|
||||
|
||||
return '\n'.join(filtered_lines)
|
||||
|
||||
return process.returncode, filter_warnings(stdout_text), filter_warnings(stderr_text)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
if process.returncode is None:
|
||||
process.kill()
|
||||
await process.wait()
|
||||
raise subprocess.TimeoutExpired(cmd, timeout)
|
||||
|
||||
|
||||
def should_skip_file(file_path: Path) -> bool:
|
||||
"""Check if file should be skipped."""
|
||||
return file_path.name == "TestHelper.gd"
|
||||
@@ -433,6 +504,25 @@ def validate_yaml_file(file_path: Path) -> Tuple[bool, str]:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def validate_yaml_file_async(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a YAML file asynchronously."""
|
||||
if yaml is None:
|
||||
return False, "PyYAML not installed. Install with: pip install PyYAML"
|
||||
|
||||
if aiofiles is None:
|
||||
return validate_yaml_file(file_path)
|
||||
|
||||
try:
|
||||
async with aiofiles.open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = await f.read()
|
||||
yaml.safe_load(content)
|
||||
return True, ""
|
||||
except yaml.YAMLError as e:
|
||||
return False, f"YAML syntax error: {e}"
|
||||
except Exception as e:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
def validate_toml_file(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a TOML file."""
|
||||
if tomllib is None:
|
||||
@@ -448,6 +538,25 @@ def validate_toml_file(file_path: Path) -> Tuple[bool, str]:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def validate_toml_file_async(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a TOML file asynchronously."""
|
||||
if tomllib is None:
|
||||
return False, "tomllib/tomli not available. For Python 3.11+, it's built-in. For older versions: pip install tomli"
|
||||
|
||||
if aiofiles is None:
|
||||
return validate_toml_file(file_path)
|
||||
|
||||
try:
|
||||
async with aiofiles.open(file_path, 'rb') as f:
|
||||
content = await f.read()
|
||||
tomllib.loads(content.decode('utf-8'))
|
||||
return True, ""
|
||||
except tomllib.TOMLDecodeError as e:
|
||||
return False, f"TOML syntax error: {e}"
|
||||
except Exception as e:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
def validate_json_file(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a JSON file."""
|
||||
try:
|
||||
@@ -460,6 +569,251 @@ def validate_json_file(file_path: Path) -> Tuple[bool, str]:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def validate_json_file_async(file_path: Path) -> Tuple[bool, str]:
|
||||
"""Validate a JSON file asynchronously."""
|
||||
if aiofiles is None:
|
||||
return validate_json_file(file_path)
|
||||
|
||||
try:
|
||||
async with aiofiles.open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = await f.read()
|
||||
json.loads(content)
|
||||
return True, ""
|
||||
except json.JSONDecodeError as e:
|
||||
return False, f"JSON syntax error: {e}"
|
||||
except Exception as e:
|
||||
return False, f"Error reading file: {e}"
|
||||
|
||||
|
||||
async def process_lint_file_async(gd_file: Path, project_root: Path, semaphore: asyncio.Semaphore, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, str, str]:
|
||||
"""Process a single file for linting asynchronously."""
|
||||
async with semaphore:
|
||||
relative_path = gd_file.relative_to(project_root)
|
||||
if not silent and not yaml_output:
|
||||
file_msg = f"📄 Linting: {relative_path.name}"
|
||||
colored_file = Colors.colorize(file_msg, Colors.CYAN)
|
||||
print(colored_file)
|
||||
|
||||
if should_skip_file(gd_file):
|
||||
if not silent and not yaml_output:
|
||||
print_skip_message("gdlint")
|
||||
return True, "", str(relative_path)
|
||||
|
||||
try:
|
||||
returncode, stdout, stderr = await run_command_async(["gdlint", str(gd_file)], project_root)
|
||||
output = (stdout + stderr).strip()
|
||||
|
||||
if returncode == 0:
|
||||
# If output is "no problems" (or similar), treat as clean.
|
||||
if _is_successful_linter_output(output):
|
||||
if not yaml_output:
|
||||
print_result(True, "", silent)
|
||||
return True, "", str(relative_path)
|
||||
else:
|
||||
if not yaml_output:
|
||||
print_result(True, output, silent)
|
||||
return True, output, str(relative_path)
|
||||
else:
|
||||
if not yaml_output:
|
||||
print_result(False, output, silent)
|
||||
return False, output, str(relative_path)
|
||||
|
||||
except FileNotFoundError:
|
||||
if not silent and not yaml_output:
|
||||
print(" ❌ ERROR: gdlint not found")
|
||||
return False, "gdlint not found", str(relative_path)
|
||||
except Exception as e:
|
||||
if not silent and not yaml_output:
|
||||
print(f" ❌ ERROR: {e}")
|
||||
return False, str(e), str(relative_path)
|
||||
finally:
|
||||
if not silent and not yaml_output:
|
||||
print()
|
||||
|
||||
|
||||
async def run_lint_async(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run gdlint on all GDScript files asynchronously."""
|
||||
if not yaml_output:
|
||||
print_header("🔍 GDScript Linter (Async)", silent)
|
||||
|
||||
gd_files = get_gd_files(project_root)
|
||||
if not silent and not yaml_output:
|
||||
count_msg = f"Found {len(gd_files)} GDScript files to lint."
|
||||
colored_count = Colors.colorize(count_msg, Colors.BLUE)
|
||||
print(f"{colored_count}\n")
|
||||
|
||||
clean_files = warning_files = error_files = 0
|
||||
failed_paths = []
|
||||
|
||||
# Use semaphore to limit concurrent operations
|
||||
semaphore = asyncio.Semaphore(min(10, len(gd_files)))
|
||||
|
||||
# Process files concurrently
|
||||
tasks = [process_lint_file_async(gd_file, project_root, semaphore, silent, yaml_output) for gd_file in gd_files]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
error_files += 1
|
||||
failed_paths.append("Unknown file - exception occurred")
|
||||
else:
|
||||
success, output, relative_path = result
|
||||
if success:
|
||||
if not output:
|
||||
clean_files += 1
|
||||
else:
|
||||
warning_files += 1
|
||||
else:
|
||||
error_files += 1
|
||||
failed_paths.append(relative_path)
|
||||
|
||||
# Summary
|
||||
stats = {
|
||||
"Total files": len(gd_files),
|
||||
"Clean files": clean_files,
|
||||
"Files with warnings": warning_files,
|
||||
"Files with errors": error_files
|
||||
}
|
||||
|
||||
success = error_files == 0
|
||||
|
||||
if yaml_output:
|
||||
output_yaml_results("lint", {**stats, "failed_paths": failed_paths}, success)
|
||||
else:
|
||||
print_summary("Linting Summary", stats, silent)
|
||||
if not silent:
|
||||
print()
|
||||
if not success:
|
||||
msg = "❌ Linting FAILED - Please fix the errors above"
|
||||
colored_msg = Colors.colorize(msg, Colors.RED + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif warning_files > 0:
|
||||
msg = "⚠️ Linting PASSED with warnings - Consider fixing them"
|
||||
colored_msg = Colors.colorize(msg, Colors.YELLOW + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
else:
|
||||
msg = "✅ All GDScript files passed linting!"
|
||||
colored_msg = Colors.colorize(msg, Colors.GREEN + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif not success:
|
||||
# In silent mode, still show failed files
|
||||
for failed_path in failed_paths:
|
||||
print(f"❌ {failed_path}")
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
async def process_format_file_async(gd_file: Path, project_root: Path, semaphore: asyncio.Semaphore, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, str]:
|
||||
"""Process a single file for formatting asynchronously."""
|
||||
async with semaphore:
|
||||
relative_path = gd_file.relative_to(project_root)
|
||||
if not silent and not yaml_output:
|
||||
file_msg = f"🎯 Formatting: {relative_path.name}"
|
||||
colored_file = Colors.colorize(file_msg, Colors.CYAN)
|
||||
print(colored_file)
|
||||
|
||||
if should_skip_file(gd_file):
|
||||
if not silent and not yaml_output:
|
||||
print_skip_message("gdformat")
|
||||
return True, str(relative_path)
|
||||
|
||||
try:
|
||||
returncode, stdout, stderr = await run_command_async(["gdformat", str(gd_file)], project_root)
|
||||
|
||||
if returncode == 0:
|
||||
if not silent and not yaml_output:
|
||||
success_msg = "✅ Success"
|
||||
colored_success = Colors.colorize(success_msg, Colors.GREEN)
|
||||
print(f" {colored_success}")
|
||||
return True, str(relative_path)
|
||||
else:
|
||||
if not silent and not yaml_output:
|
||||
fail_msg = f"❌ FAILED: {relative_path}"
|
||||
colored_fail = Colors.colorize(fail_msg, Colors.RED)
|
||||
print(f" {colored_fail}")
|
||||
output = (stdout + stderr).strip()
|
||||
if output:
|
||||
colored_output = Colors.colorize(output, Colors.RED)
|
||||
print(f" {colored_output}")
|
||||
return False, str(relative_path)
|
||||
|
||||
except FileNotFoundError:
|
||||
if not silent and not yaml_output:
|
||||
print(" ❌ ERROR: gdformat not found")
|
||||
return False, str(relative_path)
|
||||
except Exception as e:
|
||||
if not silent and not yaml_output:
|
||||
print(f" ❌ ERROR: {e}")
|
||||
return False, str(relative_path)
|
||||
finally:
|
||||
if not silent and not yaml_output:
|
||||
print()
|
||||
|
||||
|
||||
async def run_format_async(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run gdformat on all GDScript files asynchronously."""
|
||||
if not yaml_output:
|
||||
print_header("🎨 GDScript Formatter (Async)", silent)
|
||||
|
||||
gd_files = get_gd_files(project_root)
|
||||
if not silent and not yaml_output:
|
||||
count_msg = f"Found {len(gd_files)} GDScript files to format."
|
||||
colored_count = Colors.colorize(count_msg, Colors.BLUE)
|
||||
print(f"{colored_count}\n")
|
||||
|
||||
formatted_files = failed_files = 0
|
||||
failed_paths = []
|
||||
|
||||
# Use semaphore to limit concurrent operations
|
||||
semaphore = asyncio.Semaphore(min(10, len(gd_files)))
|
||||
|
||||
# Process files concurrently
|
||||
tasks = [process_format_file_async(gd_file, project_root, semaphore, silent, yaml_output) for gd_file in gd_files]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
failed_files += 1
|
||||
failed_paths.append("Unknown file - exception occurred")
|
||||
else:
|
||||
success, relative_path = result
|
||||
if success:
|
||||
formatted_files += 1
|
||||
else:
|
||||
failed_files += 1
|
||||
failed_paths.append(relative_path)
|
||||
|
||||
# Summary
|
||||
stats = {
|
||||
"Total files": len(gd_files),
|
||||
"Successfully formatted": formatted_files,
|
||||
"Failed": failed_files
|
||||
}
|
||||
|
||||
success = failed_files == 0
|
||||
|
||||
if yaml_output:
|
||||
output_yaml_results("format", {**stats, "failed_paths": failed_paths}, success)
|
||||
else:
|
||||
print_summary("Formatting Summary", stats, silent)
|
||||
if not silent:
|
||||
print()
|
||||
if not success:
|
||||
msg = "⚠️ WARNING: Some files failed to format"
|
||||
colored_msg = Colors.colorize(msg, Colors.YELLOW + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
else:
|
||||
msg = "✅ All GDScript files formatted successfully!"
|
||||
colored_msg = Colors.colorize(msg, Colors.GREEN + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif not success:
|
||||
# In silent mode, still show failed files
|
||||
for failed_path in failed_paths:
|
||||
print(f"❌ {failed_path}")
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
def run_validate(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run validation on YAML, TOML, and JSON files."""
|
||||
if not silent and not yaml_output:
|
||||
@@ -562,6 +916,134 @@ def run_validate(project_root: Path, silent: bool = False, yaml_output: bool = F
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
async def process_validation_file_async(file_path: Path, file_type: str, project_root: Path, semaphore: asyncio.Semaphore, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, str]:
|
||||
"""Process a single file for validation asynchronously."""
|
||||
async with semaphore:
|
||||
relative_path = file_path.relative_to(project_root)
|
||||
if not silent and not yaml_output:
|
||||
file_msg = f"📄 Validating: {relative_path}"
|
||||
colored_file = Colors.colorize(file_msg, Colors.CYAN)
|
||||
print(colored_file)
|
||||
|
||||
# Validation functions mapping
|
||||
validators = {
|
||||
'yaml': validate_yaml_file_async,
|
||||
'toml': validate_toml_file_async,
|
||||
'json': validate_json_file_async
|
||||
}
|
||||
|
||||
validator = validators[file_type]
|
||||
is_valid, error_msg = await validator(file_path)
|
||||
|
||||
if is_valid:
|
||||
if not yaml_output:
|
||||
print_result(True, "", silent)
|
||||
return True, str(relative_path)
|
||||
else:
|
||||
if not yaml_output:
|
||||
print_result(False, error_msg, silent)
|
||||
return False, str(relative_path)
|
||||
|
||||
|
||||
async def run_validate_async(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run validation on YAML, TOML, and JSON files asynchronously."""
|
||||
if not silent and not yaml_output:
|
||||
print_header("📋 File Format Validation (Async)")
|
||||
|
||||
# Get all validation files
|
||||
validation_files = get_validation_files(project_root)
|
||||
total_files = sum(len(files) for files in validation_files.values())
|
||||
|
||||
if total_files == 0:
|
||||
if not silent:
|
||||
msg = "No YAML, TOML, or JSON files found to validate."
|
||||
colored_msg = Colors.colorize(msg, Colors.YELLOW)
|
||||
print(colored_msg)
|
||||
return True, {"Total files": 0, "Valid files": 0, "Invalid files": 0}
|
||||
|
||||
if not silent and not yaml_output:
|
||||
count_msg = f"Found {total_files} files to validate:"
|
||||
colored_count = Colors.colorize(count_msg, Colors.BLUE)
|
||||
print(colored_count)
|
||||
|
||||
for file_type, files in validation_files.items():
|
||||
if files:
|
||||
type_msg = f" {file_type.upper()}: {len(files)} files"
|
||||
colored_type = Colors.colorize(type_msg, Colors.CYAN)
|
||||
print(colored_type)
|
||||
|
||||
print()
|
||||
|
||||
valid_files = invalid_files = 0
|
||||
failed_paths = []
|
||||
|
||||
# Use semaphore to limit concurrent operations
|
||||
semaphore = asyncio.Semaphore(min(10, total_files))
|
||||
|
||||
# Prepare all validation tasks
|
||||
all_tasks = []
|
||||
for file_type, files in validation_files.items():
|
||||
if not files:
|
||||
continue
|
||||
|
||||
if not silent and not yaml_output:
|
||||
type_header = f"🔍 Validating {file_type.upper()} files"
|
||||
colored_header = Colors.colorize(type_header, Colors.MAGENTA + Colors.BOLD)
|
||||
print(colored_header)
|
||||
|
||||
for file_path in files:
|
||||
all_tasks.append(process_validation_file_async(file_path, file_type, project_root, semaphore, silent, yaml_output))
|
||||
|
||||
# Process files concurrently
|
||||
results = await asyncio.gather(*all_tasks, return_exceptions=True)
|
||||
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
invalid_files += 1
|
||||
failed_paths.append("Unknown file - exception occurred")
|
||||
else:
|
||||
success, relative_path = result
|
||||
if success:
|
||||
valid_files += 1
|
||||
else:
|
||||
invalid_files += 1
|
||||
failed_paths.append(relative_path)
|
||||
|
||||
if not silent and not yaml_output:
|
||||
print()
|
||||
|
||||
# Summary
|
||||
stats = {
|
||||
"Total files": total_files,
|
||||
"Valid files": valid_files,
|
||||
"Invalid files": invalid_files
|
||||
}
|
||||
|
||||
success = invalid_files == 0
|
||||
|
||||
if yaml_output:
|
||||
output_yaml_results("validate", {**stats, "failed_paths": failed_paths}, success)
|
||||
else:
|
||||
if not silent:
|
||||
print_summary("Validation Summary", stats)
|
||||
print()
|
||||
if not success:
|
||||
msg = "❌ File validation FAILED - Please fix the syntax errors above"
|
||||
colored_msg = Colors.colorize(msg, Colors.RED + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
else:
|
||||
msg = "✅ All files passed validation!"
|
||||
colored_msg = Colors.colorize(msg, Colors.GREEN + Colors.BOLD)
|
||||
print(colored_msg)
|
||||
elif not success:
|
||||
# In silent mode, still show errors
|
||||
for failed_path in failed_paths:
|
||||
print(f"❌ {failed_path}")
|
||||
|
||||
return success, {**stats, "failed_paths": failed_paths}
|
||||
|
||||
|
||||
def run_format(project_root: Path, silent: bool = False, yaml_output: bool = False) -> Tuple[bool, Dict]:
|
||||
"""Run gdformat on all GDScript files."""
|
||||
if not yaml_output:
|
||||
|
||||
Reference in New Issue
Block a user