- Added GitHub spec-kit for development workflow - Standardized file signatures to Descomplicar® format - Updated development configuration 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
412 lines
14 KiB
PHP
412 lines
14 KiB
PHP
/**
|
|
* Descomplicar® Crescimento Digital
|
|
* https://descomplicar.pt
|
|
*/
|
|
|
|
<?php
|
|
|
|
/**
|
|
* Comprehensive Test Runner for Desk-Moloni Synchronization System
|
|
*
|
|
* Executes all test suites and generates detailed reports
|
|
* Tests bidirectional sync, queue processing, error handling, and performance
|
|
*
|
|
* @package DeskMoloni\Tests
|
|
* @author Descomplicar® - PHP Fullstack Engineer
|
|
* @version 1.0.0
|
|
*/
|
|
|
|
// Prevent execution in production
|
|
if (defined('ENVIRONMENT') && ENVIRONMENT === 'production') {
|
|
exit('Tests cannot be run in production environment');
|
|
}
|
|
|
|
// Set error reporting for comprehensive testing
|
|
error_reporting(E_ALL);
|
|
ini_set('display_errors', 1);
|
|
ini_set('memory_limit', '512M');
|
|
set_time_limit(300); // 5 minutes
|
|
|
|
// Test configuration
|
|
$test_config = [
|
|
'run_unit_tests' => true,
|
|
'run_integration_tests' => true,
|
|
'run_performance_tests' => true,
|
|
'generate_coverage' => false, // Set to true if XDebug is available
|
|
'output_format' => 'html', // html, json, text
|
|
'detailed_output' => true
|
|
];
|
|
|
|
// Test results storage
|
|
$test_results = [
|
|
'start_time' => microtime(true),
|
|
'total_tests' => 0,
|
|
'passed_tests' => 0,
|
|
'failed_tests' => 0,
|
|
'skipped_tests' => 0,
|
|
'test_suites' => [],
|
|
'errors' => [],
|
|
'warnings' => [],
|
|
'performance_metrics' => [],
|
|
'memory_usage' => []
|
|
];
|
|
|
|
/**
|
|
* Execute a test class and capture results
|
|
*/
|
|
function run_test_class($class_name, $test_file) {
|
|
global $test_results;
|
|
|
|
$suite_start = microtime(true);
|
|
$suite_results = [
|
|
'class' => $class_name,
|
|
'file' => $test_file,
|
|
'tests' => [],
|
|
'passed' => 0,
|
|
'failed' => 0,
|
|
'skipped' => 0,
|
|
'execution_time' => 0,
|
|
'memory_used' => 0,
|
|
'errors' => []
|
|
];
|
|
|
|
try {
|
|
// Load test file
|
|
if (!file_exists($test_file)) {
|
|
throw new Exception("Test file not found: {$test_file}");
|
|
}
|
|
|
|
require_once $test_file;
|
|
|
|
if (!class_exists($class_name)) {
|
|
throw new Exception("Test class not found: {$class_name}");
|
|
}
|
|
|
|
// Create test instance
|
|
$test_instance = new $class_name();
|
|
$reflection = new ReflectionClass($class_name);
|
|
|
|
// Get all test methods
|
|
$methods = $reflection->getMethods(ReflectionMethod::IS_PUBLIC);
|
|
$test_methods = array_filter($methods, function($method) {
|
|
return strpos($method->getName(), 'test') === 0;
|
|
});
|
|
|
|
echo "Running {$class_name} (" . count($test_methods) . " tests)...\n";
|
|
|
|
foreach ($test_methods as $method) {
|
|
$test_name = $method->getName();
|
|
$test_start = microtime(true);
|
|
$test_memory_start = memory_get_usage(true);
|
|
|
|
try {
|
|
// Setup
|
|
if (method_exists($test_instance, 'setUp')) {
|
|
$test_instance->setUp();
|
|
}
|
|
|
|
// Execute test
|
|
$method->invoke($test_instance);
|
|
|
|
// Test passed
|
|
$suite_results['passed']++;
|
|
$test_results['passed_tests']++;
|
|
$status = 'PASSED';
|
|
$error = null;
|
|
|
|
} catch (Exception $e) {
|
|
// Test failed
|
|
$suite_results['failed']++;
|
|
$test_results['failed_tests']++;
|
|
$status = 'FAILED';
|
|
$error = $e->getMessage();
|
|
$suite_results['errors'][] = [
|
|
'test' => $test_name,
|
|
'error' => $error,
|
|
'trace' => $e->getTraceAsString()
|
|
];
|
|
|
|
} finally {
|
|
// Teardown
|
|
if (method_exists($test_instance, 'tearDown')) {
|
|
try {
|
|
$test_instance->tearDown();
|
|
} catch (Exception $e) {
|
|
// Teardown error
|
|
$test_results['warnings'][] = "Teardown error in {$test_name}: " . $e->getMessage();
|
|
}
|
|
}
|
|
}
|
|
|
|
$test_execution_time = microtime(true) - $test_start;
|
|
$test_memory_used = memory_get_usage(true) - $test_memory_start;
|
|
|
|
$suite_results['tests'][] = [
|
|
'name' => $test_name,
|
|
'status' => $status,
|
|
'execution_time' => $test_execution_time,
|
|
'memory_used' => $test_memory_used,
|
|
'error' => $error
|
|
];
|
|
|
|
$test_results['total_tests']++;
|
|
|
|
// Progress output
|
|
echo " {$test_name}: {$status}";
|
|
if ($test_execution_time > 1.0) {
|
|
echo " (slow: " . number_format($test_execution_time, 2) . "s)";
|
|
}
|
|
echo "\n";
|
|
}
|
|
|
|
} catch (Exception $e) {
|
|
$suite_results['errors'][] = [
|
|
'test' => 'Suite Setup',
|
|
'error' => $e->getMessage(),
|
|
'trace' => $e->getTraceAsString()
|
|
];
|
|
$test_results['errors'][] = "Error in {$class_name}: " . $e->getMessage();
|
|
}
|
|
|
|
$suite_results['execution_time'] = microtime(true) - $suite_start;
|
|
$suite_results['memory_used'] = memory_get_peak_usage(true);
|
|
$test_results['test_suites'][] = $suite_results;
|
|
|
|
echo " Completed in " . number_format($suite_results['execution_time'], 2) . "s\n\n";
|
|
}
|
|
|
|
/**
|
|
* Generate test report
|
|
*/
|
|
function generate_test_report($format = 'html') {
|
|
global $test_results;
|
|
|
|
$test_results['end_time'] = microtime(true);
|
|
$test_results['total_execution_time'] = $test_results['end_time'] - $test_results['start_time'];
|
|
$test_results['peak_memory'] = memory_get_peak_usage(true);
|
|
$test_results['success_rate'] = $test_results['total_tests'] > 0 ?
|
|
($test_results['passed_tests'] / $test_results['total_tests']) * 100 : 0;
|
|
|
|
switch ($format) {
|
|
case 'html':
|
|
return generate_html_report();
|
|
case 'json':
|
|
return json_encode($test_results, JSON_PRETTY_PRINT);
|
|
case 'text':
|
|
default:
|
|
return generate_text_report();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Generate HTML report
|
|
*/
|
|
function generate_html_report() {
|
|
global $test_results;
|
|
|
|
$html = '<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<meta charset="UTF-8">
|
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
<title>Desk-Moloni Test Report</title>
|
|
<style>
|
|
body { font-family: Arial, sans-serif; margin: 20px; }
|
|
.header { background: #f8f9fa; padding: 20px; border-radius: 5px; margin-bottom: 20px; }
|
|
.summary { display: flex; gap: 20px; margin-bottom: 20px; }
|
|
.metric { background: #e9ecef; padding: 15px; border-radius: 5px; text-align: center; flex: 1; }
|
|
.metric.passed { background: #d4edda; }
|
|
.metric.failed { background: #f8d7da; }
|
|
.suite { border: 1px solid #dee2e6; margin-bottom: 20px; border-radius: 5px; }
|
|
.suite-header { background: #f8f9fa; padding: 10px; font-weight: bold; }
|
|
.test { padding: 10px; border-bottom: 1px solid #dee2e6; }
|
|
.test.passed { background: #f8fffa; }
|
|
.test.failed { background: #fff8f8; }
|
|
.error { background: #f8d7da; padding: 10px; margin: 10px 0; border-radius: 3px; }
|
|
.performance { background: #fff3cd; padding: 10px; margin: 10px 0; border-radius: 3px; }
|
|
</style>
|
|
</head>
|
|
<body>';
|
|
|
|
$html .= '<div class="header">';
|
|
$html .= '<h1>🐘 Desk-Moloni Synchronization System Test Report</h1>';
|
|
$html .= '<p>Generated on: ' . date('Y-m-d H:i:s') . '</p>';
|
|
$html .= '<p>Environment: ' . (defined('ENVIRONMENT') ? ENVIRONMENT : 'development') . '</p>';
|
|
$html .= '</div>';
|
|
|
|
// Summary metrics
|
|
$html .= '<div class="summary">';
|
|
$html .= '<div class="metric">';
|
|
$html .= '<h3>' . $test_results['total_tests'] . '</h3>';
|
|
$html .= '<p>Total Tests</p>';
|
|
$html .= '</div>';
|
|
$html .= '<div class="metric passed">';
|
|
$html .= '<h3>' . $test_results['passed_tests'] . '</h3>';
|
|
$html .= '<p>Passed</p>';
|
|
$html .= '</div>';
|
|
$html .= '<div class="metric failed">';
|
|
$html .= '<h3>' . $test_results['failed_tests'] . '</h3>';
|
|
$html .= '<p>Failed</p>';
|
|
$html .= '</div>';
|
|
$html .= '<div class="metric">';
|
|
$html .= '<h3>' . number_format($test_results['success_rate'], 1) . '%</h3>';
|
|
$html .= '<p>Success Rate</p>';
|
|
$html .= '</div>';
|
|
$html .= '</div>';
|
|
|
|
// Execution metrics
|
|
$html .= '<div class="performance">';
|
|
$html .= '<h3>Performance Metrics</h3>';
|
|
$html .= '<p><strong>Total Execution Time:</strong> ' . number_format($test_results['total_execution_time'], 2) . ' seconds</p>';
|
|
$html .= '<p><strong>Peak Memory Usage:</strong> ' . number_format($test_results['peak_memory'] / 1024 / 1024, 2) . ' MB</p>';
|
|
$html .= '</div>';
|
|
|
|
// Test suites
|
|
foreach ($test_results['test_suites'] as $suite) {
|
|
$html .= '<div class="suite">';
|
|
$html .= '<div class="suite-header">';
|
|
$html .= $suite['class'] . ' (' . count($suite['tests']) . ' tests, ';
|
|
$html .= $suite['passed'] . ' passed, ' . $suite['failed'] . ' failed)';
|
|
$html .= ' - ' . number_format($suite['execution_time'], 2) . 's';
|
|
$html .= '</div>';
|
|
|
|
foreach ($suite['tests'] as $test) {
|
|
$html .= '<div class="test ' . strtolower($test['status']) . '">';
|
|
$html .= '<strong>' . $test['name'] . '</strong> - ' . $test['status'];
|
|
$html .= ' (' . number_format($test['execution_time'], 3) . 's)';
|
|
if ($test['error']) {
|
|
$html .= '<div class="error">' . htmlspecialchars($test['error']) . '</div>';
|
|
}
|
|
$html .= '</div>';
|
|
}
|
|
|
|
if (!empty($suite['errors'])) {
|
|
foreach ($suite['errors'] as $error) {
|
|
$html .= '<div class="error">';
|
|
$html .= '<strong>Error in ' . $error['test'] . ':</strong><br>';
|
|
$html .= htmlspecialchars($error['error']);
|
|
$html .= '</div>';
|
|
}
|
|
}
|
|
|
|
$html .= '</div>';
|
|
}
|
|
|
|
// Global errors
|
|
if (!empty($test_results['errors'])) {
|
|
$html .= '<div class="suite">';
|
|
$html .= '<div class="suite-header">Global Errors</div>';
|
|
foreach ($test_results['errors'] as $error) {
|
|
$html .= '<div class="error">' . htmlspecialchars($error) . '</div>';
|
|
}
|
|
$html .= '</div>';
|
|
}
|
|
|
|
$html .= '</body></html>';
|
|
|
|
return $html;
|
|
}
|
|
|
|
/**
|
|
* Generate text report
|
|
*/
|
|
function generate_text_report() {
|
|
global $test_results;
|
|
|
|
$output = "\n";
|
|
$output .= "============================================\n";
|
|
$output .= "🐘 DESK-MOLONI SYNCHRONIZATION TEST REPORT\n";
|
|
$output .= "============================================\n\n";
|
|
|
|
$output .= "Generated: " . date('Y-m-d H:i:s') . "\n";
|
|
$output .= "Environment: " . (defined('ENVIRONMENT') ? ENVIRONMENT : 'development') . "\n\n";
|
|
|
|
$output .= "SUMMARY:\n";
|
|
$output .= "--------\n";
|
|
$output .= "Total Tests: " . $test_results['total_tests'] . "\n";
|
|
$output .= "Passed: " . $test_results['passed_tests'] . "\n";
|
|
$output .= "Failed: " . $test_results['failed_tests'] . "\n";
|
|
$output .= "Success Rate: " . number_format($test_results['success_rate'], 1) . "%\n";
|
|
$output .= "Execution Time: " . number_format($test_results['total_execution_time'], 2) . " seconds\n";
|
|
$output .= "Peak Memory: " . number_format($test_results['peak_memory'] / 1024 / 1024, 2) . " MB\n\n";
|
|
|
|
foreach ($test_results['test_suites'] as $suite) {
|
|
$output .= "TEST SUITE: " . $suite['class'] . "\n";
|
|
$output .= str_repeat("-", strlen("TEST SUITE: " . $suite['class'])) . "\n";
|
|
|
|
foreach ($suite['tests'] as $test) {
|
|
$status_symbol = $test['status'] === 'PASSED' ? '✓' : '✗';
|
|
$output .= "{$status_symbol} {$test['name']} ({$test['status']})";
|
|
if ($test['execution_time'] > 1.0) {
|
|
$output .= " [SLOW: " . number_format($test['execution_time'], 2) . "s]";
|
|
}
|
|
$output .= "\n";
|
|
|
|
if ($test['error']) {
|
|
$output .= " Error: " . $test['error'] . "\n";
|
|
}
|
|
}
|
|
$output .= "\n";
|
|
}
|
|
|
|
if (!empty($test_results['errors'])) {
|
|
$output .= "GLOBAL ERRORS:\n";
|
|
$output .= "--------------\n";
|
|
foreach ($test_results['errors'] as $error) {
|
|
$output .= "• " . $error . "\n";
|
|
}
|
|
$output .= "\n";
|
|
}
|
|
|
|
return $output;
|
|
}
|
|
|
|
// Main execution
|
|
echo "🐘 Starting Desk-Moloni Synchronization System Tests...\n";
|
|
echo "========================================================\n\n";
|
|
|
|
// Define test suites
|
|
$test_suites = [];
|
|
|
|
if ($test_config['run_unit_tests']) {
|
|
$test_suites = array_merge($test_suites, [
|
|
'ClientSyncServiceTest' => __DIR__ . '/ClientSyncServiceTest.php',
|
|
'QueueProcessorTest' => __DIR__ . '/QueueProcessorTest.php'
|
|
]);
|
|
}
|
|
|
|
if ($test_config['run_integration_tests']) {
|
|
$test_suites['IntegrationTest'] = __DIR__ . '/IntegrationTest.php';
|
|
}
|
|
|
|
// Execute test suites
|
|
foreach ($test_suites as $class_name => $test_file) {
|
|
run_test_class($class_name, $test_file);
|
|
}
|
|
|
|
// Generate and output report
|
|
echo "\n" . str_repeat("=", 50) . "\n";
|
|
echo "GENERATING TEST REPORT...\n";
|
|
echo str_repeat("=", 50) . "\n";
|
|
|
|
$report = generate_test_report($test_config['output_format']);
|
|
|
|
if ($test_config['output_format'] === 'html') {
|
|
$report_file = __DIR__ . '/test_report_' . date('Y-m-d_H-i-s') . '.html';
|
|
file_put_contents($report_file, $report);
|
|
echo "HTML report saved to: {$report_file}\n";
|
|
}
|
|
|
|
echo $report;
|
|
|
|
// Exit with appropriate code
|
|
$exit_code = $test_results['failed_tests'] > 0 ? 1 : 0;
|
|
echo "\nTest execution completed with exit code: {$exit_code}\n";
|
|
|
|
if ($exit_code === 0) {
|
|
echo "🎉 All tests passed!\n";
|
|
} else {
|
|
echo "❌ Some tests failed. Please review the results above.\n";
|
|
}
|
|
|
|
exit($exit_code); |