/** * Descomplicar® Crescimento Digital * https://descomplicar.pt */ true, 'run_integration_tests' => true, 'run_performance_tests' => true, 'generate_coverage' => false, // Set to true if XDebug is available 'output_format' => 'html', // html, json, text 'detailed_output' => true ]; // Test results storage $test_results = [ 'start_time' => microtime(true), 'total_tests' => 0, 'passed_tests' => 0, 'failed_tests' => 0, 'skipped_tests' => 0, 'test_suites' => [], 'errors' => [], 'warnings' => [], 'performance_metrics' => [], 'memory_usage' => [] ]; /** * Execute a test class and capture results */ function run_test_class($class_name, $test_file) { global $test_results; $suite_start = microtime(true); $suite_results = [ 'class' => $class_name, 'file' => $test_file, 'tests' => [], 'passed' => 0, 'failed' => 0, 'skipped' => 0, 'execution_time' => 0, 'memory_used' => 0, 'errors' => [] ]; try { // Load test file if (!file_exists($test_file)) { throw new Exception("Test file not found: {$test_file}"); } require_once $test_file; if (!class_exists($class_name)) { throw new Exception("Test class not found: {$class_name}"); } // Create test instance $test_instance = new $class_name(); $reflection = new ReflectionClass($class_name); // Get all test methods $methods = $reflection->getMethods(ReflectionMethod::IS_PUBLIC); $test_methods = array_filter($methods, function($method) { return strpos($method->getName(), 'test') === 0; }); echo "Running {$class_name} (" . count($test_methods) . " tests)...\n"; foreach ($test_methods as $method) { $test_name = $method->getName(); $test_start = microtime(true); $test_memory_start = memory_get_usage(true); try { // Setup if (method_exists($test_instance, 'setUp')) { $test_instance->setUp(); } // Execute test $method->invoke($test_instance); // Test passed $suite_results['passed']++; $test_results['passed_tests']++; $status = 'PASSED'; $error = null; } catch (Exception $e) { // Test failed $suite_results['failed']++; $test_results['failed_tests']++; $status = 'FAILED'; $error = $e->getMessage(); $suite_results['errors'][] = [ 'test' => $test_name, 'error' => $error, 'trace' => $e->getTraceAsString() ]; } finally { // Teardown if (method_exists($test_instance, 'tearDown')) { try { $test_instance->tearDown(); } catch (Exception $e) { // Teardown error $test_results['warnings'][] = "Teardown error in {$test_name}: " . $e->getMessage(); } } } $test_execution_time = microtime(true) - $test_start; $test_memory_used = memory_get_usage(true) - $test_memory_start; $suite_results['tests'][] = [ 'name' => $test_name, 'status' => $status, 'execution_time' => $test_execution_time, 'memory_used' => $test_memory_used, 'error' => $error ]; $test_results['total_tests']++; // Progress output echo " {$test_name}: {$status}"; if ($test_execution_time > 1.0) { echo " (slow: " . number_format($test_execution_time, 2) . "s)"; } echo "\n"; } } catch (Exception $e) { $suite_results['errors'][] = [ 'test' => 'Suite Setup', 'error' => $e->getMessage(), 'trace' => $e->getTraceAsString() ]; $test_results['errors'][] = "Error in {$class_name}: " . $e->getMessage(); } $suite_results['execution_time'] = microtime(true) - $suite_start; $suite_results['memory_used'] = memory_get_peak_usage(true); $test_results['test_suites'][] = $suite_results; echo " Completed in " . number_format($suite_results['execution_time'], 2) . "s\n\n"; } /** * Generate test report */ function generate_test_report($format = 'html') { global $test_results; $test_results['end_time'] = microtime(true); $test_results['total_execution_time'] = $test_results['end_time'] - $test_results['start_time']; $test_results['peak_memory'] = memory_get_peak_usage(true); $test_results['success_rate'] = $test_results['total_tests'] > 0 ? ($test_results['passed_tests'] / $test_results['total_tests']) * 100 : 0; switch ($format) { case 'html': return generate_html_report(); case 'json': return json_encode($test_results, JSON_PRETTY_PRINT); case 'text': default: return generate_text_report(); } } /** * Generate HTML report */ function generate_html_report() { global $test_results; $html = ' Desk-Moloni Test Report '; $html .= '
'; $html .= '

🐘 Desk-Moloni Synchronization System Test Report

'; $html .= '

Generated on: ' . date('Y-m-d H:i:s') . '

'; $html .= '

Environment: ' . (defined('ENVIRONMENT') ? ENVIRONMENT : 'development') . '

'; $html .= '
'; // Summary metrics $html .= '
'; $html .= '
'; $html .= '

' . $test_results['total_tests'] . '

'; $html .= '

Total Tests

'; $html .= '
'; $html .= '
'; $html .= '

' . $test_results['passed_tests'] . '

'; $html .= '

Passed

'; $html .= '
'; $html .= '
'; $html .= '

' . $test_results['failed_tests'] . '

'; $html .= '

Failed

'; $html .= '
'; $html .= '
'; $html .= '

' . number_format($test_results['success_rate'], 1) . '%

'; $html .= '

Success Rate

'; $html .= '
'; $html .= '
'; // Execution metrics $html .= '
'; $html .= '

Performance Metrics

'; $html .= '

Total Execution Time: ' . number_format($test_results['total_execution_time'], 2) . ' seconds

'; $html .= '

Peak Memory Usage: ' . number_format($test_results['peak_memory'] / 1024 / 1024, 2) . ' MB

'; $html .= '
'; // Test suites foreach ($test_results['test_suites'] as $suite) { $html .= '
'; $html .= '
'; $html .= $suite['class'] . ' (' . count($suite['tests']) . ' tests, '; $html .= $suite['passed'] . ' passed, ' . $suite['failed'] . ' failed)'; $html .= ' - ' . number_format($suite['execution_time'], 2) . 's'; $html .= '
'; foreach ($suite['tests'] as $test) { $html .= '
'; $html .= '' . $test['name'] . ' - ' . $test['status']; $html .= ' (' . number_format($test['execution_time'], 3) . 's)'; if ($test['error']) { $html .= '
' . htmlspecialchars($test['error']) . '
'; } $html .= '
'; } if (!empty($suite['errors'])) { foreach ($suite['errors'] as $error) { $html .= '
'; $html .= 'Error in ' . $error['test'] . ':
'; $html .= htmlspecialchars($error['error']); $html .= '
'; } } $html .= '
'; } // Global errors if (!empty($test_results['errors'])) { $html .= '
'; $html .= '
Global Errors
'; foreach ($test_results['errors'] as $error) { $html .= '
' . htmlspecialchars($error) . '
'; } $html .= '
'; } $html .= ''; return $html; } /** * Generate text report */ function generate_text_report() { global $test_results; $output = "\n"; $output .= "============================================\n"; $output .= "🐘 DESK-MOLONI SYNCHRONIZATION TEST REPORT\n"; $output .= "============================================\n\n"; $output .= "Generated: " . date('Y-m-d H:i:s') . "\n"; $output .= "Environment: " . (defined('ENVIRONMENT') ? ENVIRONMENT : 'development') . "\n\n"; $output .= "SUMMARY:\n"; $output .= "--------\n"; $output .= "Total Tests: " . $test_results['total_tests'] . "\n"; $output .= "Passed: " . $test_results['passed_tests'] . "\n"; $output .= "Failed: " . $test_results['failed_tests'] . "\n"; $output .= "Success Rate: " . number_format($test_results['success_rate'], 1) . "%\n"; $output .= "Execution Time: " . number_format($test_results['total_execution_time'], 2) . " seconds\n"; $output .= "Peak Memory: " . number_format($test_results['peak_memory'] / 1024 / 1024, 2) . " MB\n\n"; foreach ($test_results['test_suites'] as $suite) { $output .= "TEST SUITE: " . $suite['class'] . "\n"; $output .= str_repeat("-", strlen("TEST SUITE: " . $suite['class'])) . "\n"; foreach ($suite['tests'] as $test) { $status_symbol = $test['status'] === 'PASSED' ? '✓' : '✗'; $output .= "{$status_symbol} {$test['name']} ({$test['status']})"; if ($test['execution_time'] > 1.0) { $output .= " [SLOW: " . number_format($test['execution_time'], 2) . "s]"; } $output .= "\n"; if ($test['error']) { $output .= " Error: " . $test['error'] . "\n"; } } $output .= "\n"; } if (!empty($test_results['errors'])) { $output .= "GLOBAL ERRORS:\n"; $output .= "--------------\n"; foreach ($test_results['errors'] as $error) { $output .= "• " . $error . "\n"; } $output .= "\n"; } return $output; } // Main execution echo "🐘 Starting Desk-Moloni Synchronization System Tests...\n"; echo "========================================================\n\n"; // Define test suites $test_suites = []; if ($test_config['run_unit_tests']) { $test_suites = array_merge($test_suites, [ 'ClientSyncServiceTest' => __DIR__ . '/ClientSyncServiceTest.php', 'QueueProcessorTest' => __DIR__ . '/QueueProcessorTest.php' ]); } if ($test_config['run_integration_tests']) { $test_suites['IntegrationTest'] = __DIR__ . '/IntegrationTest.php'; } // Execute test suites foreach ($test_suites as $class_name => $test_file) { run_test_class($class_name, $test_file); } // Generate and output report echo "\n" . str_repeat("=", 50) . "\n"; echo "GENERATING TEST REPORT...\n"; echo str_repeat("=", 50) . "\n"; $report = generate_test_report($test_config['output_format']); if ($test_config['output_format'] === 'html') { $report_file = __DIR__ . '/test_report_' . date('Y-m-d_H-i-s') . '.html'; file_put_contents($report_file, $report); echo "HTML report saved to: {$report_file}\n"; } echo $report; // Exit with appropriate code $exit_code = $test_results['failed_tests'] > 0 ? 1 : 0; echo "\nTest execution completed with exit code: {$exit_code}\n"; if ($exit_code === 0) { echo "🎉 All tests passed!\n"; } else { echo "❌ Some tests failed. Please review the results above.\n"; } exit($exit_code);